From 34f8ec6a29af7a499ce6ab1aab4a484380637889 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Sat, 20 Jan 2024 10:41:21 +0100 Subject: [PATCH 001/210] CH-110 jupyterhub update wip --- .../deploy/resources/hub/jupyterhub_config.py | 148 +- .../jupyterhub/deploy/resources/hub/z2jh.py | 21 +- .../jupyterhub/deploy/templates/NOTES.txt | 158 + .../deploy/templates/_helpers-auth-rework.tpl | 18 +- .../deploy/templates/_helpers-names.tpl | 62 +- .../deploy/templates/_helpers-netpol.tpl | 101 + .../jupyterhub/deploy/templates/_helpers.tpl | 63 +- .../deploy/templates/hub/configmap.yaml | 1 + .../deploy/templates/hub/deployment.yaml | 24 +- .../deploy/templates/hub/netpol.yaml | 25 +- .../jupyterhub/deploy/templates/hub/pdb.yaml | 4 - .../jupyterhub/deploy/templates/hub/rbac.yaml | 15 +- .../deploy/templates/hub/serviceaccount.yaml | 12 + .../deploy/templates/image-pull-secret.yaml | 15 + .../image-puller/_helpers-daemonset.tpl | 51 +- .../deploy/templates/image-puller/job.yaml | 13 +- .../templates/image-puller/priorityclass.yaml | 18 + .../deploy/templates/image-puller/rbac.yaml | 27 +- .../image-puller/serviceaccount.yaml | 21 + .../templates/proxy/autohttps/_README.txt | 9 - .../templates/proxy/autohttps/configmap.yaml | 28 - .../templates/proxy/autohttps/deployment.yaml | 141 - .../templates/proxy/autohttps/rbac.yaml | 40 - .../templates/proxy/autohttps/service.yaml | 25 - .../deploy/templates/proxy/deployment.yaml | 14 +- .../deploy/templates/proxy/netpol.yaml | 24 +- .../deploy/templates/proxy/pdb.yaml | 4 - .../deploy/templates/proxy/service.yaml | 9 +- .../templates/scheduling/priorityclass.yaml | 13 - .../scheduling/user-placeholder/pdb.yaml | 4 - .../user-placeholder/priorityclass.yaml | 13 - .../user-placeholder/statefulset.yaml | 15 +- .../scheduling/user-scheduler/configmap.yaml | 20 +- .../scheduling/user-scheduler/deployment.yaml | 31 +- .../scheduling/user-scheduler/pdb.yaml | 4 - .../scheduling/user-scheduler/rbac.yaml | 78 +- .../user-scheduler/serviceaccount.yaml | 14 + .../deploy/templates/singleuser/netpol.yaml | 39 +- .../deploy/templates/singleuser/secret.yaml | 17 + .../jupyterhub/deploy/values.schema.yaml | 3014 +++++++++++++++++ applications/jupyterhub/deploy/values.yaml | 276 +- .../jupyterhub/zero-to-jupyterhub-k8s | 1 + 42 files changed, 4013 insertions(+), 617 deletions(-) create mode 100644 applications/jupyterhub/deploy/templates/NOTES.txt create mode 100644 applications/jupyterhub/deploy/templates/_helpers-netpol.tpl create mode 100644 applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml create mode 100644 applications/jupyterhub/deploy/templates/image-pull-secret.yaml create mode 100644 applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml create mode 100644 applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml delete mode 100755 applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt delete mode 100755 applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml delete mode 100755 applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml delete mode 100755 applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml delete mode 100755 applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml create mode 100644 applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml create mode 100644 applications/jupyterhub/deploy/templates/singleuser/secret.yaml create mode 100644 applications/jupyterhub/deploy/values.schema.yaml create mode 160000 applications/jupyterhub/zero-to-jupyterhub-k8s diff --git a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py index d4b3cee2..8ec801ee 100755 --- a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py +++ b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py @@ -1,9 +1,17 @@ +# load the config object (satisfies linters) +c = get_config() # noqa + +import glob import os import re import sys -import logging +from jupyterhub.utils import url_path_join +from kubernetes_asyncio import client from tornado.httpclient import AsyncHTTPClient + +#CLOUDHARNESS: EDIT START +import logging from kubernetes import client from jupyterhub.utils import url_path_join @@ -12,7 +20,7 @@ harness_hub() # activates harness hooks on jupyterhub except Exception as e: logging.error("could not import harness_jupyter", exc_info=True) - +# CLOUDHARNESS: EDIT END # Make sure that modules placed in the same directory as the jupyterhub config are added to the pythonpath configuration_directory = os.path.dirname(os.path.realpath(__file__)) @@ -20,39 +28,13 @@ from z2jh import ( get_config, - set_config_if_not_none, get_name, get_name_env, get_secret_value, + set_config_if_not_none, ) -print('Base url is', c.JupyterHub.get('base_url', '/')) - -# Configure JupyterHub to use the curl backend for making HTTP requests, -# rather than the pure-python implementations. The default one starts -# being too slow to make a large number of requests to the proxy API -# at the rate required. -AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") - -c.JupyterHub.spawner_class = 'kubespawner.KubeSpawner' - -# Connect to a proxy running in a different pod -c.ConfigurableHTTPProxy.api_url = 'http://{}:{}'.format(os.environ['PROXY_API_SERVICE_HOST'], int(os.environ['PROXY_API_SERVICE_PORT'])) -c.ConfigurableHTTPProxy.should_start = False - -# Do not shut down user pods when hub is restarted -c.JupyterHub.cleanup_servers = False - -# Check that the proxy has routes appropriately setup -c.JupyterHub.last_activity_interval = 60 - -# Don't wait at all before redirecting a spawning user to the progress page -c.JupyterHub.tornado_settings = { - 'slow_spawn_timeout': 0, -} - - def camelCaseify(s): """convert snake_case to camelCase @@ -173,6 +155,7 @@ def camelCaseify(s): ("events_enabled", "events"), ("extra_labels", None), ("extra_annotations", None), + # ("allow_privilege_escalation", None), # Managed manually below ("uid", None), ("fs_gid", None), ("service_account", "serviceAccountName"), @@ -206,10 +189,19 @@ def camelCaseify(s): if image: tag = get_config("singleuser.image.tag") if tag: - image = "{}:{}".format(image, tag) + image = f"{image}:{tag}" c.KubeSpawner.image = image +# allow_privilege_escalation defaults to False in KubeSpawner 2+. Since its a +# property where None, False, and True all are valid values that users of the +# Helm chart may want to set, we can't use the set_config_if_not_none helper +# function as someone may want to override the default False value to None. +# +c.KubeSpawner.allow_privilege_escalation = get_config( + "singleuser.allowPrivilegeEscalation" +) + # Combine imagePullSecret.create (single), imagePullSecrets (list), and # singleuser.image.pullSecrets (list). image_pull_secrets = [] @@ -255,7 +247,7 @@ def camelCaseify(s): pass else: raise ValueError( - "Unrecognized value for matchNodePurpose: %r" % match_node_purpose + f"Unrecognized value for matchNodePurpose: {match_node_purpose}" ) # Combine the common tolerations for user pods with singleuser tolerations @@ -271,7 +263,7 @@ def camelCaseify(s): pvc_name_template = get_config("singleuser.storage.dynamic.pvcNameTemplate") c.KubeSpawner.pvc_name_template = pvc_name_template volume_name_template = get_config("singleuser.storage.dynamic.volumeNameTemplate") - c.KubeSpawner.storage_pvc_ensure = False + c.KubeSpawner.storage_pvc_ensure = True set_config_if_not_none( c.KubeSpawner, "storage_class", "singleuser.storage.dynamic.storageClass" ) @@ -354,41 +346,62 @@ def camelCaseify(s): ) c.JupyterHub.services = [] +c.JupyterHub.load_roles = [] +# jupyterhub-idle-culler's permissions are scoped to what it needs only, see +# https://github.com/jupyterhub/jupyterhub-idle-culler#permissions. +# if get_config("cull.enabled", False): + jupyterhub_idle_culler_role = { + "name": "jupyterhub-idle-culler", + "scopes": [ + "list:users", + "read:users:activity", + "read:servers", + "delete:servers", + # "admin:users", # dynamically added if --cull-users is passed + ], + # assign the role to a jupyterhub service, so it gains these permissions + "services": ["jupyterhub-idle-culler"], + } + cull_cmd = ["python3", "-m", "jupyterhub_idle_culler"] base_url = c.JupyterHub.get("base_url", "/") cull_cmd.append("--url=http://localhost:8081" + url_path_join(base_url, "hub/api")) cull_timeout = get_config("cull.timeout") if cull_timeout: - cull_cmd.append("--timeout=%s" % cull_timeout) + cull_cmd.append(f"--timeout={cull_timeout}") cull_every = get_config("cull.every") if cull_every: - cull_cmd.append("--cull-every=%s" % cull_every) + cull_cmd.append(f"--cull-every={cull_every}") cull_concurrency = get_config("cull.concurrency") if cull_concurrency: - cull_cmd.append("--concurrency=%s" % cull_concurrency) + cull_cmd.append(f"--concurrency={cull_concurrency}") if get_config("cull.users"): cull_cmd.append("--cull-users") + jupyterhub_idle_culler_role["scopes"].append("admin:users") + + if not get_config("cull.adminUsers"): + cull_cmd.append("--cull-admin-users=false") if get_config("cull.removeNamedServers"): cull_cmd.append("--remove-named-servers") cull_max_age = get_config("cull.maxAge") if cull_max_age: - cull_cmd.append("--max-age=%s" % cull_max_age) + cull_cmd.append(f"--max-age={cull_max_age}") c.JupyterHub.services.append( { - "name": "cull-idle", - "admin": True, + "name": "jupyterhub-idle-culler", "command": cull_cmd, } ) + c.JupyterHub.load_roles.append(jupyterhub_idle_culler_role) for key, service in get_config("hub.services", {}).items(): # c.JupyterHub.services is a list of dicts, but @@ -402,26 +415,44 @@ def camelCaseify(s): c.JupyterHub.services.append(service) +for key, role in get_config("hub.loadRoles", {}).items(): + # c.JupyterHub.load_roles is a list of dicts, but + # hub.loadRoles is a dict of dicts to make the config mergable + role.setdefault("name", key) + + c.JupyterHub.load_roles.append(role) + +# respect explicit null command (distinct from unspecified) +# this avoids relying on KubeSpawner.cmd's default being None +_unspecified = object() +specified_cmd = get_config("singleuser.cmd", _unspecified) +if specified_cmd is not _unspecified: + c.Spawner.cmd = specified_cmd -set_config_if_not_none(c.Spawner, "cmd", "singleuser.cmd") set_config_if_not_none(c.Spawner, "default_url", "singleuser.defaultUrl") -cloud_metadata = get_config("singleuser.cloudMetadata", {}) +cloud_metadata = get_config("singleuser.cloudMetadata") if cloud_metadata.get("blockWithIptables") == True: # Use iptables to block access to cloud metadata by default network_tools_image_name = get_config("singleuser.networkTools.image.name") network_tools_image_tag = get_config("singleuser.networkTools.image.tag") + network_tools_resources = get_config("singleuser.networkTools.resources") + ip = cloud_metadata["ip"] ip_block_container = client.V1Container( name="block-cloud-metadata", image=f"{network_tools_image_name}:{network_tools_image_tag}", command=[ "iptables", - "-A", + "--append", "OUTPUT", - "-d", - cloud_metadata.get("ip", "169.254.169.254"), - "-j", + "--protocol", + "tcp", + "--destination", + ip, + "--destination-port", + "80", + "--jump", "DROP", ], security_context=client.V1SecurityContext( @@ -429,6 +460,7 @@ def camelCaseify(s): run_as_user=0, capabilities=client.V1Capabilities(add=["NET_ADMIN"]), ), + resources=network_tools_resources, ) c.KubeSpawner.init_containers.append(ip_block_container) @@ -438,17 +470,6 @@ def camelCaseify(s): c.JupyterHub.log_level = "DEBUG" c.Spawner.debug = True -# load /usr/local/etc/jupyterhub/jupyterhub_config.d config files -config_dir = "/usr/local/etc/jupyterhub/jupyterhub_config.d" -if os.path.isdir(config_dir): - for file_path in sorted(glob.glob(f"{config_dir}/*.py")): - file_name = os.path.basename(file_path) - print(f"Loading {config_dir} config: {file_name}") - with open(file_path) as f: - file_content = f.read() - # compiling makes debugging easier: https://stackoverflow.com/a/437857 - exec(compile(source=file_content, filename=file_name, mode="exec")) - # load potentially seeded secrets # # NOTE: ConfigurableHTTPProxy.auth_token is set through an environment variable @@ -471,11 +492,23 @@ def camelCaseify(s): cfg.pop("keys", None) c[app].update(cfg) +# load /usr/local/etc/jupyterhub/jupyterhub_config.d config files +config_dir = "/usr/local/etc/jupyterhub/jupyterhub_config.d" +if os.path.isdir(config_dir): + for file_path in sorted(glob.glob(f"{config_dir}/*.py")): + file_name = os.path.basename(file_path) + print(f"Loading {config_dir} config: {file_name}") + with open(file_path) as f: + file_content = f.read() + # compiling makes debugging easier: https://stackoverflow.com/a/437857 + exec(compile(source=file_content, filename=file_name, mode="exec")) + # execute hub.extraConfig entries for key, config_py in sorted(get_config("hub.extraConfig", {}).items()): - print("Loading extra config: %s" % key) + print(f"Loading extra config: {key}") exec(config_py) +# CLOUDHARNESS: EDIT START # Allow switching authenticators easily auth_type = get_config('hub.config.JupyterHub.authenticator_class') email_domain = 'local' @@ -525,4 +558,5 @@ def camelCaseify(s): c.apps = get_config('apps') c.registry = get_config('registry') c.domain = get_config('root.domain') -c.namespace = get_config('root.namespace') \ No newline at end of file +c.namespace = get_config('root.namespace') +# CLOUDHARNESS: EDIT END \ No newline at end of file diff --git a/applications/jupyterhub/deploy/resources/hub/z2jh.py b/applications/jupyterhub/deploy/resources/hub/z2jh.py index 834a6b6c..fc368f64 100755 --- a/applications/jupyterhub/deploy/resources/hub/z2jh.py +++ b/applications/jupyterhub/deploy/resources/hub/z2jh.py @@ -3,15 +3,15 @@ Methods here can be imported by extraConfig in values.yaml """ -from collections import Mapping -from functools import lru_cache import os -import re +from collections.abc import Mapping +from functools import lru_cache import yaml + # memoize so we only load config once -@lru_cache() +@lru_cache def _load_config(): """Load the Helm chart configuration used to render the Helm templates of the chart from a mounted k8s Secret, and merge in values from an optionally @@ -27,6 +27,7 @@ def _load_config(): cfg = _merge_dictionaries(cfg, values) else: print(f"No config at {path}") + # EDIT: CLOUDHARNESS START path = f"/opt/cloudharness/resources/allvalues.yaml" if os.path.exists(path): print("Loading global CloudHarness config at", path) @@ -34,11 +35,11 @@ def _load_config(): values = yaml.safe_load(f) cfg = _merge_dictionaries(cfg, values) cfg['root'] = values - + # EDIT: CLOUDHARNESS END return cfg -@lru_cache() +@lru_cache def _get_config_value(key): """Load value from the k8s ConfigMap given a key.""" @@ -50,7 +51,7 @@ def _get_config_value(key): raise Exception(f"{path} not found!") -@lru_cache() +@lru_cache def get_secret_value(key, default="never-explicitly-set"): """Load value from the user managed k8s Secret or the default k8s Secret given a key.""" @@ -117,7 +118,7 @@ def get_config(key, default=None): else: value = value[level] - + # EDIT: CLOUDHARNESS START if value and isinstance(value, str): replace_var = re.search("{{.*?}}", value) if replace_var: @@ -128,6 +129,7 @@ def get_config(key, default=None): if repl: print("replace", variable, "in", value, ":", repl) value = re.sub("{{.*?}}", repl, value) + # EDIT: CLOUDHARNESS END return value @@ -137,6 +139,5 @@ def set_config_if_not_none(cparent, name, key): configuration item if not None """ data = get_config(key) - if data is not None: - setattr(cparent, name, data) \ No newline at end of file + setattr(cparent, name, data) diff --git a/applications/jupyterhub/deploy/templates/NOTES.txt b/applications/jupyterhub/deploy/templates/NOTES.txt new file mode 100644 index 00000000..9769a9c7 --- /dev/null +++ b/applications/jupyterhub/deploy/templates/NOTES.txt @@ -0,0 +1,158 @@ +{{- $proxy_service := include "jupyterhub.proxy-public.fullname" . -}} + +{{- /* Generated with https://patorjk.com/software/taag/#p=display&h=0&f=Slant&t=JupyterHub */}} +. __ __ __ __ __ + / / __ __ ____ __ __ / /_ ___ _____ / / / / __ __ / /_ + __ / / / / / / / __ \ / / / / / __/ / _ \ / ___/ / /_/ / / / / / / __ \ +/ /_/ / / /_/ / / /_/ / / /_/ / / /_ / __/ / / / __ / / /_/ / / /_/ / +\____/ \__,_/ / .___/ \__, / \__/ \___/ /_/ /_/ /_/ \__,_/ /_.___/ + /_/ /____/ + + You have successfully installed the official JupyterHub Helm chart! + +### Installation info + + - Kubernetes namespace: {{ .Release.Namespace }} + - Helm release name: {{ .Release.Name }} + - Helm chart version: {{ .Chart.Version }} + - JupyterHub version: {{ .Chart.AppVersion }} + - Hub pod packages: See https://github.com/jupyterhub/zero-to-jupyterhub-k8s/blob/{{ include "jupyterhub.chart-version-to-git-ref" .Chart.Version }}/images/hub/requirements.txt + +### Followup links + + - Documentation: https://z2jh.jupyter.org + - Help forum: https://discourse.jupyter.org + - Social chat: https://gitter.im/jupyterhub/jupyterhub + - Issue tracking: https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues + +### Post-installation checklist + + - Verify that created Pods enter a Running state: + + kubectl --namespace={{ .Release.Namespace }} get pod + + If a pod is stuck with a Pending or ContainerCreating status, diagnose with: + + kubectl --namespace={{ .Release.Namespace }} describe pod + + If a pod keeps restarting, diagnose with: + + kubectl --namespace={{ .Release.Namespace }} logs --previous + {{- println }} + + {{- if eq .Values.apps.jupyterhub.proxy.service.type "LoadBalancer" }} + - Verify an external IP is provided for the k8s Service {{ $proxy_service }}. + + kubectl --namespace={{ .Release.Namespace }} get service {{ $proxy_service }} + + If the external ip remains , diagnose with: + + kubectl --namespace={{ .Release.Namespace }} describe service {{ $proxy_service }} + {{- end }} + + - Verify web based access: + {{- println }} + {{- if .Values.apps.jupyterhub.ingress.enabled }} + {{- range $host := .Values.apps.jupyterhub.ingress.hosts }} + Try insecure HTTP access: http://{{ $host }}{{ $.Values.apps.jupyterhub.hub.baseUrl | trimSuffix "/" }}/ + {{- end }} + + {{- range $tls := .Values.apps.jupyterhub.ingress.tls }} + {{- range $host := $tls.hosts }} + Try secure HTTPS access: https://{{ $host }}{{ $.Values.apps.jupyterhub.hub.baseUrl | trimSuffix "/" }}/ + {{- end }} + {{- end }} + {{- else }} + You have not configured a k8s Ingress resource so you need to access the k8s + Service {{ $proxy_service }} directly. + {{- println }} + + {{- if eq .Values.apps.jupyterhub.proxy.service.type "NodePort" }} + The k8s Service {{ $proxy_service }} is exposed via NodePorts. That means + that all the k8s cluster's nodes are exposing the k8s Service via those + ports. + + Try insecure HTTP access: http://:{{ .Values.apps.jupyterhub.proxy.service.nodePorts.http | default "no-http-nodeport-set"}} + Try secure HTTPS access: https://:{{ .Values.apps.jupyterhub.proxy.service.nodePorts.https | default "no-https-nodeport-set" }} + + {{- else }} + If your computer is outside the k8s cluster, you can port-forward traffic to + the k8s Service {{ $proxy_service }} with kubectl to access it from your + computer. + + kubectl --namespace={{ .Release.Namespace }} port-forward service/{{ $proxy_service }} 8080:http + + Try insecure HTTP access: http://localhost:8080 + {{- end }} + {{- end }} + {{- println }} + + + + + +{{- /* + Warnings for likely misconfigurations +*/}} + +{{- if and (not .Values.apps.jupyterhub.scheduling.podPriority.enabled) (and .Values.apps.jupyterhub.scheduling.userPlaceholder.enabled .Values.apps.jupyterhub.scheduling.userPlaceholder.replicas) }} +################################################################################# +###### WARNING: You are using user placeholders without pod priority ##### +###### enabled*, either enable pod priority or stop using the ##### +###### user placeholders** to avoid having placeholders that ##### +###### refuse to make room for a real user. ##### +###### ##### +###### *scheduling.podPriority.enabled ##### +###### **scheduling.userPlaceholder.enabled ##### +###### **scheduling.userPlaceholder.replicas ##### +################################################################################# +{{- println }} +{{- end }} + + + + + +{{- /* + Breaking changes and failures for likely misconfigurations. +*/}} + +{{- $breaking := "" }} +{{- $breaking_title := "\n" }} +{{- $breaking_title = print $breaking_title "\n#################################################################################" }} +{{- $breaking_title = print $breaking_title "\n###### BREAKING: The config values passed contained no longer accepted #####" }} +{{- $breaking_title = print $breaking_title "\n###### options. See the messages below for more details. #####" }} +{{- $breaking_title = print $breaking_title "\n###### #####" }} +{{- $breaking_title = print $breaking_title "\n###### To verify your updated config is accepted, you can use #####" }} +{{- $breaking_title = print $breaking_title "\n###### the `helm template` command. #####" }} +{{- $breaking_title = print $breaking_title "\n#################################################################################" }} + + +{{- /* + This is an example (in a helm template comment) on how to detect and + communicate with regards to a breaking chart config change. + + {{- if hasKey .Values.apps.jupyterhub.singleuser.cloudMetadata "enabled" }} + {{- $breaking = print $breaking "\n\nCHANGED: singleuser.cloudMetadata.enabled must as of 1.0.0 be configured using singleuser.cloudMetadata.blockWithIptables with the opposite value." }} + {{- end }} +*/}} + + +{{- if hasKey .Values.apps.jupyterhub.rbac "enabled" }} +{{- $breaking = print $breaking "\n\nCHANGED: rbac.enabled must as of version 2.0.0 be configured via rbac.create and .serviceAccount.create." }} +{{- end }} + + +{{- if hasKey .Values.apps.jupyterhub.hub "fsGid" }} +{{- $breaking = print $breaking "\n\nCHANGED: hub.fsGid must as of version 2.0.0 be configured via hub.podSecurityContext.fsGroup." }} +{{- end }} + + +{{- if and .Values.apps.jupyterhub.singleuser.cloudMetadata.blockWithIptables (and .Values.apps.jupyterhub.singleuser.networkPolicy.enabled .Values.apps.jupyterhub.singleuser.networkPolicy.egressAllowRules.cloudMetadataServer) }} +{{- $breaking = print $breaking "\n\nCHANGED: singleuser.cloudMetadata.blockWithIptables must as of version 3.0.0 not be configured together with singleuser.networkPolicy.egressAllowRules.cloudMetadataServer as it leads to an ambiguous configuration." }} +{{- end }} + + +{{- if $breaking }} +{{- fail (print $breaking_title $breaking "\n\n") }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl index b742a126..3159d103 100644 --- a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl +++ b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl @@ -168,30 +168,30 @@ ldap.dn.user.useLookupName: LDAPAuthenticator.use_lookup_dn_username {{- $c := dict }} {{- $result := (dict "hub" (dict "config" $c)) }} {{- /* - Flattens the config in .Values.apps.jupyterhub.auth to a format of + Flattens the config in .Values.apps.jupyterhub.apps.jupyterhub.auth to a format of "keyX.keyY...": "value". Writes output to $c. */}} - {{- include "jupyterhub.flattenDict" (list $c (omit .Values.apps.jupyterhub.auth "type" "custom")) }} + {{- include "jupyterhub.flattenDict" (list $c (omit .Values.apps.jupyterhub.apps.jupyterhub.auth "type" "custom")) }} {{- /* Transform the flattened config using a dictionary representing the old z2jh config, output the result in $c. */}} - {{- include "jupyterhub.authDep.remapOldToNew.mappable" (list $c .Values.apps.jupyterhub.global.safeToShowValues) }} + {{- include "jupyterhub.authDep.remapOldToNew.mappable" (list $c .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub. }} - {{- $class_old_config_key := .Values.apps.jupyterhub.auth.type | default "" }} {{- /* ldap - github */}} + {{- $class_old_config_key := .Values.apps.jupyterhub.apps.jupyterhub.auth.type | default "" }} {{- /* ldap - github */}} {{- $class_new_entrypoint := "" }} {{- /* ldapauthenticator.LDAPAuthenticator - github */}} {{- $class_new_config_key := "" }} {{- /* LDAPAuthenticator - GitHubOAuthenticator */}} {{- /* SET $class_new_entrypoint, $class_new_config_key */}} {{- if eq $class_old_config_key "custom" }} - {{- $class_new_entrypoint = .Values.apps.jupyterhub.auth.custom.className | default "custom.className wasn't configured!" }} + {{- $class_new_entrypoint = .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.className | default "custom.className wasn't configured!" }} {{- $class_new_config_key = $class_new_entrypoint | splitList "." | last }} {{- /* UPDATE c dict explicitly with auth.custom.config */}} - {{- if .Values.apps.jupyterhub.auth.custom.config }} - {{- $custom_config := merge (dict) .Values.apps.jupyterhub.auth.custom.config }} - {{- if not .Values.apps.jupyterhub.global.safeToShowValues }} + {{- if .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.config }} + {{- $custom_config := merge (dict) .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.config }} + {{- if not .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub.}} {{- range $key, $val := $custom_config }} {{- $_ := set $custom_config $key "***" }} {{- end }} @@ -213,7 +213,7 @@ The JupyterHub Helm chart's auth config has been reworked and requires changes. The new way to configure authentication in chart version 0.11.0+ is printed below for your convenience. The values are not shown by default to ensure no -secrets are exposed, run helm upgrade with --set global.safeToShowValues=true +secrets are exposed, run helm upgrade with --set global.safeToSho.Values.apps.jupyterhub.true to show them. {{ $result | toYaml }} diff --git a/applications/jupyterhub/deploy/templates/_helpers-names.tpl b/applications/jupyterhub/deploy/templates/_helpers-names.tpl index e9cf7bb6..401d601a 100644 --- a/applications/jupyterhub/deploy/templates/_helpers-names.tpl +++ b/applications/jupyterhub/deploy/templates/_helpers-names.tpl @@ -3,8 +3,8 @@ parent charts to reference these dynamic resource names. To avoid duplicating documentation, for more information, please see the the - fullnameOverride entry in schema.yaml or the configuration reference that - schema.yaml renders to. + fullnameOverride entry in values.schema.yaml or the configuration reference + that values.schema.yaml renders to. https://z2jh.jupyter.org/en/latest/resources/reference.html#fullnameOverride */}} @@ -38,8 +38,8 @@ {{- $name_override := .Values.apps.jupyterhub.nameOverride }} {{- if ne .Chart.Name "jupyterhub" }} {{- if .Values.apps.jupyterhub.jupyterhub }} - {{- $fullname_override = .Values.apps.jupyterhub.fullnameOverride }} - {{- $name_override = .Values.apps.jupyterhub.nameOverride }} + {{- $fullname_override = .Values.apps.jupyterhub.jupyterhub.fullnameOverride }} + {{- $name_override = .Values.apps.jupyterhub.jupyterhub.nameOverride }} {{- end }} {{- end }} @@ -76,12 +76,23 @@ {{- include "jupyterhub.fullname.dash" . }}hub {{- end }} +{{- /* hub-serviceaccount ServiceAccount */}} +{{- define "jupyterhub.hub-serviceaccount.fullname" -}} + {{- if .Values.apps.jupyterhub.hub.serviceAccount.create }} + {{- .Values.apps.jupyterhub.hub.serviceAccount.name | default (include "jupyterhub.hub.fullname" .) }} + {{- else }} + {{- .Values.apps.jupyterhub.hub.serviceAccount.name | default "default" }} + {{- end }} +{{- end }} + {{- /* hub-existing-secret Secret */}} {{- define "jupyterhub.hub-existing-secret.fullname" -}} {{- /* A hack to avoid issues from invoking this from a parent Helm chart. */}} {{- $existing_secret := .Values.apps.jupyterhub.hub.existingSecret }} {{- if ne .Chart.Name "jupyterhub" }} - {{- $existing_secret = .Values.apps.jupyterhub.hub.existingSecret }} + {{- if .Values.apps.jupyterhub.jupyterhub }} + {{- $existing_secret = .Values.apps.jupyterhub.jupyterhub.hub.existingSecret }} + {{- end }} {{- end }} {{- if $existing_secret }} {{- $existing_secret }} @@ -133,11 +144,29 @@ {{- include "jupyterhub.fullname.dash" . }}autohttps {{- end }} +{{- /* autohttps-serviceaccount ServiceAccount */}} +{{- define "jupyterhub.autohttps-serviceaccount.fullname" -}} + {{- if .Values.apps.jupyterhub.proxy.traefik.serviceAccount.create }} + {{- .Values.apps.jupyterhub.proxy.traefik.serviceAccount.name | default (include "jupyterhub.autohttps.fullname" .) }} + {{- else }} + {{- .Values.apps.jupyterhub.proxy.traefik.serviceAccount.name | default "default" }} + {{- end }} +{{- end }} + {{- /* user-scheduler Deployment */}} {{- define "jupyterhub.user-scheduler-deploy.fullname" -}} {{- include "jupyterhub.fullname.dash" . }}user-scheduler {{- end }} +{{- /* user-scheduler-serviceaccount ServiceAccount */}} +{{- define "jupyterhub.user-scheduler-serviceaccount.fullname" -}} + {{- if .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.create }} + {{- .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.name | default (include "jupyterhub.user-scheduler-deploy.fullname" .) }} + {{- else }} + {{- .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.name | default "default" }} + {{- end }} +{{- end }} + {{- /* user-scheduler leader election lock resource */}} {{- define "jupyterhub.user-scheduler-lock.fullname" -}} {{- include "jupyterhub.user-scheduler-deploy.fullname" . }}-lock @@ -153,6 +182,15 @@ {{- include "jupyterhub.fullname.dash" . }}hook-image-awaiter {{- end }} +{{- /* image-awaiter-serviceaccount ServiceAccount */}} +{{- define "jupyterhub.hook-image-awaiter-serviceaccount.fullname" -}} + {{- if .Values.apps.jupyterhub.prePuller.hook.serviceAccount.create }} + {{- .Values.apps.jupyterhub.prePuller.hook.serviceAccount.name | default (include "jupyterhub.hook-image-awaiter.fullname" .) }} + {{- else }} + {{- .Values.apps.jupyterhub.prePuller.hook.serviceAccount.name | default "default" }} + {{- end }} +{{- end }} + {{- /* hook-image-puller DaemonSet */}} {{- define "jupyterhub.hook-image-puller.fullname" -}} {{- include "jupyterhub.fullname.dash" . }}hook-image-puller @@ -210,6 +248,15 @@ {{- end }} {{- end }} +{{- /* image-puller Priority */}} +{{- define "jupyterhub.image-puller-priority.fullname" -}} + {{- if (include "jupyterhub.fullname" .) }} + {{- include "jupyterhub.fullname.dash" . }}image-puller + {{- else }} + {{- .Release.Name }}-image-puller-priority + {{- end }} +{{- end }} + {{- /* user-scheduler's registered name */}} {{- define "jupyterhub.user-scheduler.fullname" -}} {{- if (include "jupyterhub.fullname" .) }} @@ -231,6 +278,7 @@ fullname: {{ include "jupyterhub.fullname" . | quote }} fullname-dash: {{ include "jupyterhub.fullname.dash" . | quote }} hub: {{ include "jupyterhub.hub.fullname" . | quote }} +hub-serviceaccount: {{ include "jupyterhub.hub-serviceaccount.fullname" . | quote }} hub-existing-secret: {{ include "jupyterhub.hub-existing-secret.fullname" . | quote }} hub-existing-secret-or-default: {{ include "jupyterhub.hub-existing-secret-or-default.fullname" . | quote }} hub-pvc: {{ include "jupyterhub.hub-pvc.fullname" . | quote }} @@ -241,10 +289,14 @@ proxy-public: {{ include "jupyterhub.proxy-public.fullname" . | quote }} proxy-public-tls: {{ include "jupyterhub.proxy-public-tls.fullname" . | quote }} proxy-public-manual-tls: {{ include "jupyterhub.proxy-public-manual-tls.fullname" . | quote }} autohttps: {{ include "jupyterhub.autohttps.fullname" . | quote }} +autohttps-serviceaccount: {{ include "jupyterhub.autohttps-serviceaccount.fullname" . | quote }} user-scheduler-deploy: {{ include "jupyterhub.user-scheduler-deploy.fullname" . | quote }} +user-scheduler-serviceaccount: {{ include "jupyterhub.user-scheduler-serviceaccount.fullname" . | quote }} user-scheduler-lock: {{ include "jupyterhub.user-scheduler-lock.fullname" . | quote }} user-placeholder: {{ include "jupyterhub.user-placeholder.fullname" . | quote }} +image-puller-priority: {{ include "jupyterhub.image-puller-priority.fullname" . | quote }} hook-image-awaiter: {{ include "jupyterhub.hook-image-awaiter.fullname" . | quote }} +hook-image-awaiter-serviceaccount: {{ include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . | quote }} hook-image-puller: {{ include "jupyterhub.hook-image-puller.fullname" . | quote }} continuous-image-puller: {{ include "jupyterhub.continuous-image-puller.fullname" . | quote }} singleuser: {{ include "jupyterhub.singleuser.fullname" . | quote }} diff --git a/applications/jupyterhub/deploy/templates/_helpers-netpol.tpl b/applications/jupyterhub/deploy/templates/_helpers-netpol.tpl new file mode 100644 index 00000000..4075569e --- /dev/null +++ b/applications/jupyterhub/deploy/templates/_helpers-netpol.tpl @@ -0,0 +1,101 @@ +{{- /* + This named template renders egress rules for NetworkPolicy resources based on + common configuration. + + It is rendering based on the `egressAllowRules` and `egress` keys of the + passed networkPolicy config object. Each flag set to true under + `egressAllowRules` is rendered to a egress rule that next to any custom user + defined rules from the `egress` config. + + This named template needs to render based on a specific networkPolicy + resource, but also needs access to the root context. Due to that, it + accepts a list as its scope, where the first element is supposed to be the + root context and the second element is supposed to be the networkPolicy + configuration object. + + As an example, this is how you would render this named template from a + NetworkPolicy resource under its egress: + + egress: + # other rules here... + + {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.hub.networkPolicy)) }} + {{- . | nindent 4 }} + {{- end }} + + Note that the reference to privateIPs and nonPrivateIPs relate to + https://en.wikipedia.org/wiki/Private_network#Private_IPv4_addresses. +*/}} + +{{- define "jupyterhub.networkPolicy.renderEgressRules" -}} +{{- $root := index . 0 }} +{{- $netpol := index . 1 }} +{{- if or (or $netpol.egressAllowRules.dnsPortsCloudMetadataServer $netpol.egressAllowRules.dnsPortsKubeSystemNamespace) $netpol.egressAllowRules.dnsPortsPrivateIPs }} +- ports: + - port: 53 + protocol: UDP + - port: 53 + protocol: TCP + to: + {{- if $netpol.egressAllowRules.dnsPortsCloudMetadataServer }} + # Allow outbound connections to DNS ports on the cloud metadata server + - ipBlock: + cidr: {{ $root.Values.apps.jupyterhub.singleuser.cloudMetadata.ip }}/32 + {{- end }} + {{- if $netpol.egressAllowRules.dnsPortsKubeSystemNamespace }} + # Allow outbound connections to DNS ports on pods in the kube-system + # namespace + - namespaceSelector: + matchLabels: + kubernetes.io/metadata.name: kube-system + {{- end }} + {{- if $netpol.egressAllowRules.dnsPortsPrivateIPs }} + # Allow outbound connections to DNS ports on destinations in the private IP + # ranges + - ipBlock: + cidr: 10.0.0.0/8 + - ipBlock: + cidr: 172.16.0.0/12 + - ipBlock: + cidr: 192.168.0.0/16 + {{- end }} +{{- end }} + +{{- if $netpol.egressAllowRules.nonPrivateIPs }} +# Allow outbound connections to non-private IP ranges +- to: + - ipBlock: + cidr: 0.0.0.0/0 + except: + # As part of this rule: + # - don't allow outbound connections to private IPs + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + # - don't allow outbound connections to the cloud metadata server + - {{ $root.Values.apps.jupyterhub.singleuser.cloudMetadata.ip }}/32 +{{- end }} + +{{- if $netpol.egressAllowRules.privateIPs }} +# Allow outbound connections to private IP ranges +- to: + - ipBlock: + cidr: 10.0.0.0/8 + - ipBlock: + cidr: 172.16.0.0/12 + - ipBlock: + cidr: 192.168.0.0/16 +{{- end }} + +{{- if $netpol.egressAllowRules.cloudMetadataServer }} +# Allow outbound connections to the cloud metadata server +- to: + - ipBlock: + cidr: {{ $root.Values.apps.jupyterhub.singleuser.cloudMetadata.ip }}/32 +{{- end }} + +{{- with $netpol.egress }} +# Allow outbound connections based on user specified rules +{{ . | toYaml }} +{{- end }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/_helpers.tpl b/applications/jupyterhub/deploy/templates/_helpers.tpl index efea86d1..a2023639 100755 --- a/applications/jupyterhub/deploy/templates/_helpers.tpl +++ b/applications/jupyterhub/deploy/templates/_helpers.tpl @@ -12,7 +12,7 @@ When you ask a helper to render its content, one often forward the current scope to the helper in order to allow it to access .Release.Name, - .Values.apps.jupyterhub.rbac.enabled and similar values. + .Values.apps.jupyterhub.rbac.create and similar values. #### Example - Passing the current scope {{ include "jupyterhub.commonLabels" . }} @@ -180,8 +180,51 @@ component: {{ include "jupyterhub.componentLabel" . }} Augments passed .pullSecrets with $.Values.apps.jupyterhub.imagePullSecrets */}} {{- define "jupyterhub.imagePullSecrets" -}} + {{- /* + We have implemented a trick to allow a parent chart depending on this + chart to call this named templates. + + Caveats and notes: + + 1. While parent charts can reference these, grandparent charts can't. + 2. Parent charts must not use an alias for this chart. + 3. There is no failsafe workaround to above due to + https://github.com/helm/helm/issues/9214. + 4. .Chart is of its own type (*chart.Metadata) and needs to be casted + using "toYaml | fromYaml" in order to be able to use normal helm + template functions on it. + */}} + {{- $jupyterhub_values := .root.Values.apps.jupyterhub.}} + {{- if ne .root.Chart.Name "jupyterhub" }} + {{- if .root.Values.apps.jupyterhub.jupyterhub }} + {{- $jupyterhub_values = .root.Values.apps.jupyterhub.jupyterhub }} + {{- end }} + {{- end }} + {{- /* Populate $_.list with all relevant entries */}} + {{- $_ := dict "list" (concat .image.pullSecrets $jupyterhub_values.imagePullSecrets | uniq) }} + {{- if and $jupyterhub_values.imagePullSecret.create $jupyterhub_values.imagePullSecret.automaticReferenceInjection }} + {{- $__ := set $_ "list" (append $_.list (include "jupyterhub.image-pull-secret.fullname" .root) | uniq) }} + {{- end }} + {{- /* Decide if something should be written */}} + {{- if not (eq ($_.list | toJson) "[]") }} + + {{- /* Process the $_.list where strings become dicts with a name key and the + strings become the name keys' values into $_.res */}} + {{- $_ := set $_ "res" list }} + {{- range $_.list }} + {{- if eq (typeOf .) "string" }} + {{- $__ := set $_ "res" (append $_.res (dict "name" .)) }} + {{- else }} + {{- $__ := set $_ "res" (append $_.res .) }} + {{- end }} + {{- end }} + + {{- /* Write the results */}} + {{- $_.res | toJson }} + + {{- end }} {{- end }} {{- /* @@ -339,3 +382,21 @@ limits: {{- print "\n\nextraFiles entries (" $file_key ") must only contain one of the fields: 'data', 'stringData', and 'binaryData'." | fail }} {{- end }} {{- end }} + +{{- /* + jupyterhub.chart-version-to-git-ref: + Renders a valid git reference from a chartpress generated version string. + In practice, either a git tag or a git commit hash will be returned. + + - The version string will follow a chartpress pattern, see + https://github.com/jupyterhub/chartpress#examples-chart-versions-and-image-tags. + + - The regexReplaceAll function is a sprig library function, see + https://masterminds.github.io/sprig/strings.html. + + - The regular expression is in golang syntax, but \d had to become \\d for + example. +*/}} +{{- define "jupyterhub.chart-version-to-git-ref" -}} +{{- regexReplaceAll ".*[.-]n\\d+[.]h(.*)" . "${1}" }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/hub/configmap.yaml b/applications/jupyterhub/deploy/templates/hub/configmap.yaml index c913f678..f52feb6a 100755 --- a/applications/jupyterhub/deploy/templates/hub/configmap.yaml +++ b/applications/jupyterhub/deploy/templates/hub/configmap.yaml @@ -29,5 +29,6 @@ data: */}} checksum_hook-image-puller: {{ include "jupyterhub.imagePuller.daemonset.hook.checksum" . | quote }} + # EDIT: CLOUDHARNESS allvalues.yaml: | {{- .Values | toYaml | nindent 4 }} \ No newline at end of file diff --git a/applications/jupyterhub/deploy/templates/hub/deployment.yaml b/applications/jupyterhub/deploy/templates/hub/deployment.yaml index 82132c62..d105ecca 100755 --- a/applications/jupyterhub/deploy/templates/hub/deployment.yaml +++ b/applications/jupyterhub/deploy/templates/hub/deployment.yaml @@ -5,6 +5,9 @@ metadata: labels: {{- include "jupyterhub.labels" . | nindent 4 }} spec: + {{- if typeIs "int" .Values.apps.jupyterhub.hub.revisionHistoryLimit }} + revisionHistoryLimit: {{ .Values.apps.jupyterhub.hub.revisionHistoryLimit }} + {{- end }} replicas: 1 selector: matchLabels: @@ -30,11 +33,14 @@ spec: {{- . | toYaml | nindent 8 }} {{- end }} spec: -{{ include "deploy_utils.etcHosts" . | indent 6 }} +{{ include "deploy_utils.etcHosts" . | indent 6 }} # EDIT: CLOUDHARNESS {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} priorityClassName: {{ include "jupyterhub.priority.fullname" . }} {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.hub.nodeSelector }} + {{- with .Values.apps.jupyterhub.hub.nodeSelector }} + nodeSelector: + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.hub.tolerations }} tolerations: {{- . | toYaml | nindent 8 }} @@ -44,7 +50,7 @@ spec: - name: config configMap: name: {{ include "jupyterhub.hub.fullname" . }} - {{- /* This is needed by cloudharness libraries */}} + {{- /* EDIT: CLOUDHARNESS This is needed by cloudharness libraries */}} - name: cloudharness-allvalues configMap: name: cloudharness-allvalues @@ -82,11 +88,13 @@ spec: persistentVolumeClaim: claimName: {{ include "jupyterhub.hub-pvc.fullname" . }} {{- end }} - {{- if .Values.apps.jupyterhub.rbac.enabled }} - serviceAccountName: {{ include "jupyterhub.hub.fullname" . }} + {{- with include "jupyterhub.hub-serviceaccount.fullname" . }} + serviceAccountName: {{ . }} {{- end }} + {{- with .Values.apps.jupyterhub.hub.podSecurityContext }} securityContext: - fsGroup: {{ .Values.apps.jupyterhub.hub.fsGid }} + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with include "jupyterhub.imagePullSecrets" (dict "root" . "image" .Values.apps.jupyterhub.hub.image) }} imagePullSecrets: {{ . }} {{- end }} @@ -153,14 +161,14 @@ spec: name: config - mountPath: /usr/local/etc/jupyterhub/secret/ name: secret - - name: cloudharness-allvalues + - name: cloudharness-allvalues # EDIT: CLOUDHARNESS START mountPath: /opt/cloudharness/resources/allvalues.yaml subPath: allvalues.yaml {{- if .Values.apps.accounts }} - name: cloudharness-kc-accounts mountPath: /opt/cloudharness/resources/auth readOnly: true - {{- end }} + {{- end }} # EDIT: CLOUDHARNESS END {{- if (include "jupyterhub.hub-existing-secret.fullname" .) }} - mountPath: /usr/local/etc/jupyterhub/existing-secret/ name: existing-secret diff --git a/applications/jupyterhub/deploy/templates/hub/netpol.yaml b/applications/jupyterhub/deploy/templates/hub/netpol.yaml index 9a7a6bc1..d9508e20 100755 --- a/applications/jupyterhub/deploy/templates/hub/netpol.yaml +++ b/applications/jupyterhub/deploy/templates/hub/netpol.yaml @@ -61,31 +61,24 @@ spec: egress: # hub --> proxy - - ports: - - port: 8001 - to: + - to: - podSelector: matchLabels: {{- $_ := merge (dict "componentLabel" "proxy") . }} {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8001 + # hub --> singleuser-server - - ports: - - port: 8888 - to: + - to: - podSelector: matchLabels: {{- $_ := merge (dict "componentLabel" "singleuser-server") . }} {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8888 - # hub --> Kubernetes internal DNS - - ports: - - protocol: UDP - port: 53 - - protocol: TCP - port: 53 - - {{- with .Values.apps.jupyterhub.hub.networkPolicy.egress }} - # hub --> depends, but the default is everything - {{- . | toYaml | nindent 4 }} + {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.hub.networkPolicy)) }} + {{- . | nindent 4 }} {{- end }} {{- end }} diff --git a/applications/jupyterhub/deploy/templates/hub/pdb.yaml b/applications/jupyterhub/deploy/templates/hub/pdb.yaml index 855609d4..bb6c7b16 100755 --- a/applications/jupyterhub/deploy/templates/hub/pdb.yaml +++ b/applications/jupyterhub/deploy/templates/hub/pdb.yaml @@ -1,9 +1,5 @@ {{- if .Values.apps.jupyterhub.hub.pdb.enabled -}} -{{- if .Capabilities.APIVersions.Has "policy/v1" }} apiVersion: policy/v1 -{{- else }} -apiVersion: policy/v1beta1 -{{- end }} kind: PodDisruptionBudget metadata: name: {{ include "jupyterhub.hub.fullname" . }} diff --git a/applications/jupyterhub/deploy/templates/hub/rbac.yaml b/applications/jupyterhub/deploy/templates/hub/rbac.yaml index 738daab1..1b689af4 100755 --- a/applications/jupyterhub/deploy/templates/hub/rbac.yaml +++ b/applications/jupyterhub/deploy/templates/hub/rbac.yaml @@ -1,15 +1,4 @@ -{{- if .Values.apps.jupyterhub.rbac.enabled -}} -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "jupyterhub.hub.fullname" . }} - {{- with .Values.apps.jupyterhub.hub.serviceAccount.annotations }} - annotations: - {{- . | toYaml | nindent 4 }} - {{- end }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} ---- +{{- if .Values.apps.jupyterhub.rbac.create -}} kind: Role apiVersion: rbac.authorization.k8s.io/v1 metadata: @@ -32,7 +21,7 @@ metadata: {{- include "jupyterhub.labels" . | nindent 4 }} subjects: - kind: ServiceAccount - name: {{ include "jupyterhub.hub.fullname" . }} + name: {{ include "jupyterhub.hub-serviceaccount.fullname" . }} namespace: "{{ .Release.Namespace }}" roleRef: kind: Role diff --git a/applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml b/applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml new file mode 100644 index 00000000..817ed661 --- /dev/null +++ b/applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.apps.jupyterhub.hub.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "jupyterhub.hub-serviceaccount.fullname" . }} + {{- with .Values.apps.jupyterhub.hub.serviceAccount.annotations }} + annotations: + {{- . | toYaml | nindent 4 }} + {{- end }} + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/image-pull-secret.yaml b/applications/jupyterhub/deploy/templates/image-pull-secret.yaml new file mode 100644 index 00000000..b7544db7 --- /dev/null +++ b/applications/jupyterhub/deploy/templates/image-pull-secret.yaml @@ -0,0 +1,15 @@ +{{- if .Values.apps.jupyterhub.imagePullSecret.create }} +kind: Secret +apiVersion: v1 +metadata: + name: {{ include "jupyterhub.image-pull-secret.fullname" . }} + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": pre-install,pre-upgrade + "helm.sh/hook-delete-policy": before-hook-creation + "helm.sh/hook-weight": "-20" +type: kubernetes.io/dockerconfigjson +data: + .dockerconfigjson: {{ include "jupyterhub.dockerconfigjson" . }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl index e16fd1a9..528345c0 100644 --- a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl +++ b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl @@ -34,6 +34,9 @@ spec: type: RollingUpdate rollingUpdate: maxUnavailable: 100% + {{- if typeIs "int" .Values.apps.jupyterhub.prePuller.revisionHistoryLimit }} + revisionHistoryLimit: {{ .Values.apps.jupyterhub.prePuller.revisionHistoryLimit }} + {{- end }} template: metadata: labels: @@ -44,13 +47,17 @@ spec: {{- end }} spec: {{- /* - continuous-image-puller pods are made evictable to save on the k8s pods - per node limit all k8s clusters have. + image-puller pods are made evictable to save on the k8s pods + per node limit all k8s clusters have and have a higher priority + than user-placeholder pods that could block an entire node. */}} - {{- if and (not .hook) .Values.apps.jupyterhub.scheduling.podPriority.enabled }} - priorityClassName: {{ include "jupyterhub.user-placeholder-priority.fullname" . }} + {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} + priorityClassName: {{ include "jupyterhub.image-puller-priority.fullname" . }} + {{- end }} + {{- with .Values.apps.jupyterhub.singleuser.nodeSelector }} + nodeSelector: + {{- . | toYaml | nindent 8 }} {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.singleuser.nodeSelector }} {{- with concat .Values.apps.jupyterhub.scheduling.userPods.tolerations .Values.apps.jupyterhub.singleuser.extraTolerations .Values.apps.jupyterhub.prePuller.extraTolerations }} tolerations: {{- . | toYaml | nindent 8 }} @@ -127,6 +134,7 @@ spec: {{- /* --- Conditionally pull profileList images --- */}} {{- if .Values.apps.jupyterhub.prePuller.pullProfileListImages }} {{- range $k, $container := .Values.apps.jupyterhub.singleuser.profileList }} + {{- /* profile's kubespawner_override */}} {{- if $container.kubespawner_override }} {{- if $container.kubespawner_override.image }} - name: image-pull-singleuser-profilelist-{{ $k }} @@ -145,13 +153,15 @@ spec: {{- end }} {{- end }} {{- end }} - {{- end }} - {{- end }} - - {{- /* --- Pull extra images --- */}} - {{- range $k, $v := .Values.apps.jupyterhub.prePuller.extraImages }} - - name: image-pull-{{ $k }} - image: {{ $v.name }}:{{ $v.tag }} + {{- /* kubespawner_override in profile's profile_options */}} + {{- if $container.profile_options }} + {{- range $option, $option_spec := $container.profile_options }} + {{- if $option_spec.choices }} + {{- range $choice, $choice_spec := $option_spec.choices }} + {{- if $choice_spec.kubespawner_override }} + {{- if $choice_spec.kubespawner_override.image }} + - name: image-pull-profile-{{ $k }}-option-{{ $option }}-{{ $choice }} + image: {{ $choice_spec.kubespawner_override.image }} command: - /bin/sh - -c @@ -163,13 +173,20 @@ spec: {{- with $.Values.apps.jupyterhub.prePuller.containerSecurityContext }} securityContext: {{- . | toYaml | nindent 12 }} - {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} {{- end }} - {{- /* --- Pull CloudHarness tasks images --- */}} - {{- range $k, $v := ( index .Values "task-images" ) }} - - name: image-pull-{{ $k | replace "-" "" }} - image: {{ $v }} + {{- /* --- Pull extra images --- */}} + {{- range $k, $v := .Values.apps.jupyterhub.prePuller.extraImages }} + - name: image-pull-{{ $k }} + image: {{ $v.name }}:{{ $v.tag }} command: - /bin/sh - -c diff --git a/applications/jupyterhub/deploy/templates/image-puller/job.yaml b/applications/jupyterhub/deploy/templates/image-puller/job.yaml index bdd9f63c..cc6db3ec 100755 --- a/applications/jupyterhub/deploy/templates/image-puller/job.yaml +++ b/applications/jupyterhub/deploy/templates/image-puller/job.yaml @@ -28,16 +28,22 @@ spec: labels: {{- /* Changes here will cause the Job to restart the pods. */}} {{- include "jupyterhub.matchLabels" . | nindent 8 }} + {{- with .Values.apps.jupyterhub.prePuller.labels }} + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with .Values.apps.jupyterhub.prePuller.annotations }} annotations: {{- . | toYaml | nindent 8 }} {{- end }} spec: restartPolicy: Never - {{- if .Values.apps.jupyterhub.rbac.enabled }} - serviceAccountName: {{ include "jupyterhub.hook-image-awaiter.fullname" . }} + {{- with include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . }} + serviceAccountName: {{ . }} + {{- end }} + {{- with .Values.apps.jupyterhub.prePuller.hook.nodeSelector }} + nodeSelector: + {{- . | toYaml | nindent 8 }} {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.prePuller.hook.nodeSelector }} {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.prePuller.hook.tolerations }} tolerations: {{- . | toYaml | nindent 8 }} @@ -58,6 +64,7 @@ spec: - -api-server-address=https://kubernetes.default.svc:$(KUBERNETES_SERVICE_PORT) - -namespace={{ .Release.Namespace }} - -daemonset={{ include "jupyterhub.hook-image-puller.fullname" . }} + - -pod-scheduling-wait-duration={{ .Values.apps.jupyterhub.prePuller.hook.podSchedulingWaitDuration }} {{- with .Values.apps.jupyterhub.prePuller.hook.containerSecurityContext }} securityContext: {{- . | toYaml | nindent 12 }} diff --git a/applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml b/applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml new file mode 100644 index 00000000..1a3fca33 --- /dev/null +++ b/applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml @@ -0,0 +1,18 @@ +{{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} +{{- if or .Values.apps.jupyterhub.prePuller.hook.enabled .Values.apps.jupyterhub.prePuller.continuous.enabled -}} +apiVersion: scheduling.k8s.io/v1 +kind: PriorityClass +metadata: + name: {{ include "jupyterhub.image-puller-priority.fullname" . }} + annotations: + meta.helm.sh/release-name: "{{ .Release.Name }}" + meta.helm.sh/release-namespace: "{{ .Release.Namespace }}" + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} +value: {{ .Values.apps.jupyterhub.scheduling.podPriority.imagePullerPriority }} +globalDefault: false +description: >- + Enables [hook|continuous]-image-puller pods to fit on nodes even though they + are clogged by user-placeholder pods, while not evicting normal user pods. +{{- end }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml b/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml index 95c86ddf..5946896b 100755 --- a/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml +++ b/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml @@ -1,29 +1,8 @@ {{- /* Permissions to be used by the hook-image-awaiter job */}} -{{- if .Values.apps.jupyterhub.rbac.enabled }} -{{- if (include "jupyterhub.imagePuller.daemonset.hook.install" .) }} -{{- /* -This service account... -*/ -}} -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "jupyterhub.hook-image-awaiter.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} - hub.jupyter.org/deletable: "true" - annotations: - "helm.sh/hook": pre-install,pre-upgrade - "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded - "helm.sh/hook-weight": "0" - {{- with .Values.apps.jupyterhub.prePuller.hook.serviceAccount.annotations }} - {{- . | toYaml | nindent 4 }} - {{- end }} ---- -{{- /* -... will be used by this role... -*/}} +{{- if .Values.apps.jupyterhub.rbac.create -}} +{{- if (include "jupyterhub.imagePuller.daemonset.hook.install" .) -}} kind: Role apiVersion: rbac.authorization.k8s.io/v1 metadata: @@ -56,7 +35,7 @@ metadata: "helm.sh/hook-weight": "0" subjects: - kind: ServiceAccount - name: {{ include "jupyterhub.hook-image-awaiter.fullname" . }} + name: {{ include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . }} namespace: "{{ .Release.Namespace }}" roleRef: kind: Role diff --git a/applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml b/applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml new file mode 100644 index 00000000..2e5fa728 --- /dev/null +++ b/applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml @@ -0,0 +1,21 @@ +{{- /* +ServiceAccount for the pre-puller hook's image-awaiter-job +*/}} +{{- if .Values.apps.jupyterhub.prePuller.hook.serviceAccount.create -}} +{{- if (include "jupyterhub.imagePuller.daemonset.hook.install" .) -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . }} + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + hub.jupyter.org/deletable: "true" + annotations: + "helm.sh/hook": pre-install,pre-upgrade + "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded + "helm.sh/hook-weight": "0" + {{- with .Values.apps.jupyterhub.prePuller.hook.serviceAccount.annotations }} + {{- . | toYaml | nindent 4 }} + {{- end }} +{{- end }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt b/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt deleted file mode 100755 index 08bd7bba..00000000 --- a/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt +++ /dev/null @@ -1,9 +0,0 @@ -# Automatic HTTPS Terminator - -This directory has Kubernetes objects for automatic Let's Encrypt Support. -When enabled, we create a new deployment object that has an nginx-ingress -and kube-lego container in it. This is responsible for requesting, -storing and renewing certificates as needed from Let's Encrypt. - -The only change required outside of this directory is in the `proxy-public` -service, which targets different hubs based on automatic HTTPS status. \ No newline at end of file diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml deleted file mode 100755 index 8d71a971..00000000 --- a/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml +++ /dev/null @@ -1,28 +0,0 @@ -{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} -{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} -{{- if $autoHTTPS -}} -{{- $_ := .Values.apps.jupyterhub.proxy.https.letsencrypt.contactEmail | required "proxy.https.letsencrypt.contactEmail is a required field" -}} - -# This configmap contains Traefik configuration files to be mounted. -# - traefik.yaml will only be read during startup (static configuration) -# - dynamic.yaml will be read on change (dynamic configuration) -# -# ref: https://docs.traefik.io/getting-started/configuration-overview/ -# -# The configuration files are first rendered with Helm templating to large YAML -# strings. Then we use the fromYAML function on these strings to get an object, -# that we in turn merge with user provided extra configuration. -# -kind: ConfigMap -apiVersion: v1 -metadata: - name: {{ include "jupyterhub.autohttps.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} -data: - traefik.yaml: | - {{- include "jupyterhub.traefik.yaml" . | fromYaml | merge .Values.apps.jupyterhub.proxy.traefik.extraStaticConfig | toYaml | nindent 4 }} - dynamic.yaml: | - {{- include "jupyterhub.dynamic.yaml" . | fromYaml | merge .Values.apps.jupyterhub.proxy.traefik.extraDynamicConfig | toYaml | nindent 4 }} - -{{- end }} diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml deleted file mode 100755 index fcb062fd..00000000 --- a/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml +++ /dev/null @@ -1,141 +0,0 @@ -{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} -{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} -{{- if $autoHTTPS -}} -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "jupyterhub.autohttps.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} -spec: - replicas: 1 - selector: - matchLabels: - {{- include "jupyterhub.matchLabels" . | nindent 6 }} - template: - metadata: - labels: - {{- include "jupyterhub.matchLabels" . | nindent 8 }} - hub.jupyter.org/network-access-proxy-http: "true" - {{- with .Values.apps.jupyterhub.proxy.traefik.labels }} - {{- . | toYaml | nindent 8 }} - {{- end }} - annotations: - # Only force a restart through a change to this checksum when the static - # configuration is changed, as the dynamic can be updated after start. - # Any disruptions to this deployment impacts everything, it is the - # entrypoint of all network traffic. - checksum/static-config: {{ include "jupyterhub.traefik.yaml" . | fromYaml | merge .Values.apps.jupyterhub.proxy.traefik.extraStaticConfig | toYaml | sha256sum }} - spec: - {{- if .Values.apps.jupyterhub.rbac.enabled }} - serviceAccountName: {{ include "jupyterhub.autohttps.fullname" . }} - {{- end }} - {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} - priorityClassName: {{ include "jupyterhub.priority.fullname" . }} - {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.proxy.traefik.nodeSelector }} - {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.proxy.traefik.tolerations }} - tolerations: - {{- . | toYaml | nindent 8 }} - {{- end }} - {{- include "jupyterhub.coreAffinity" . | nindent 6 }} - volumes: - - name: certificates - emptyDir: {} - - name: traefik-config - configMap: - name: {{ include "jupyterhub.autohttps.fullname" . }} - {{- with .Values.apps.jupyterhub.proxy.traefik.extraVolumes }} - {{- . | toYaml | nindent 8 }} - {{- end }} - {{- with include "jupyterhub.imagePullSecrets" (dict "root" . "image" .Values.apps.jupyterhub.proxy.traefik.image) }} - imagePullSecrets: {{ . }} - {{- end }} - initContainers: - - name: load-acme - image: "{{ .Values.apps.jupyterhub.proxy.secretSync.image.name }}:{{ .Values.apps.jupyterhub.proxy.secretSync.image.tag }}" - {{- with .Values.apps.jupyterhub.proxy.secretSync.image.pullPolicy }} - imagePullPolicy: {{ . }} - {{- end }} - args: - - load - - {{ include "jupyterhub.proxy-public-tls.fullname" . }} - - acme.json - - /etc/acme/acme.json - env: - # We need this to get logs immediately - - name: PYTHONUNBUFFERED - value: "True" - {{- with .Values.apps.jupyterhub.proxy.traefik.extraEnv }} - {{- include "jupyterhub.extraEnv" . | nindent 12 }} - {{- end }} - volumeMounts: - - name: certificates - mountPath: /etc/acme - {{- with .Values.apps.jupyterhub.proxy.secretSync.containerSecurityContext }} - securityContext: - {{- . | toYaml | nindent 12 }} - {{- end }} - containers: - - name: traefik - image: "{{ .Values.apps.jupyterhub.proxy.traefik.image.name }}:{{ .Values.apps.jupyterhub.proxy.traefik.image.tag }}" - {{- with .Values.apps.jupyterhub.proxy.traefik.image.pullPolicy }} - imagePullPolicy: {{ . }} - {{- end }} - {{- with .Values.apps.jupyterhub.proxy.traefik.resources }} - resources: - {{- . | toYaml | nindent 12 }} - {{- end }} - ports: - - name: http - containerPort: 8080 - - name: https - containerPort: 8443 - {{- with .Values.apps.jupyterhub.proxy.traefik.extraPorts }} - {{- . | toYaml | nindent 12 }} - {{- end }} - volumeMounts: - - name: traefik-config - mountPath: /etc/traefik - - name: certificates - mountPath: /etc/acme - {{- with .Values.apps.jupyterhub.proxy.traefik.extraVolumeMounts }} - {{- . | toYaml | nindent 12 }} - {{- end }} - {{- with .Values.apps.jupyterhub.proxy.traefik.extraEnv }} - env: - {{- include "jupyterhub.extraEnv" . | nindent 12 }} - {{- end }} - {{- with .Values.apps.jupyterhub.proxy.traefik.containerSecurityContext }} - securityContext: - {{- . | toYaml | nindent 12 }} - {{- end }} - - name: secret-sync - image: "{{ .Values.apps.jupyterhub.proxy.secretSync.image.name }}:{{ .Values.apps.jupyterhub.proxy.secretSync.image.tag }}" - {{- with .Values.apps.jupyterhub.proxy.secretSync.image.pullPolicy }} - imagePullPolicy: {{ . }} - {{- end }} - args: - - watch-save - - --label=app={{ include "jupyterhub.appLabel" . }} - - --label=release={{ .Release.Name }} - - --label=chart={{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} - - --label=heritage=secret-sync - - {{ include "jupyterhub.proxy-public-tls.fullname" . }} - - acme.json - - /etc/acme/acme.json - env: - # We need this to get logs immediately - - name: PYTHONUNBUFFERED - value: "True" - volumeMounts: - - name: certificates - mountPath: /etc/acme - {{- with .Values.apps.jupyterhub.proxy.secretSync.containerSecurityContext }} - securityContext: - {{- . | toYaml | nindent 12 }} - {{- end }} - {{- with .Values.apps.jupyterhub.proxy.traefik.extraPodSpec }} - {{- . | toYaml | nindent 6 }} - {{- end }} -{{- end }} diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml deleted file mode 100755 index ea43b672..00000000 --- a/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml +++ /dev/null @@ -1,40 +0,0 @@ -{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} -{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} -{{- if (and $autoHTTPS .Values.apps.jupyterhub.rbac.enabled) -}} -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: {{ include "jupyterhub.autohttps.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} - {{- with .Values.apps.jupyterhub.proxy.traefik.serviceAccount.annotations }} - annotations: - {{- . | toYaml | nindent 4 }} - {{- end }} -rules: -- apiGroups: [""] - resources: ["secrets"] - verbs: ["get", "patch", "list", "create"] ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: {{ include "jupyterhub.autohttps.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} -subjects: -- kind: ServiceAccount - name: {{ include "jupyterhub.autohttps.fullname" . }} - apiGroup: -roleRef: - kind: Role - name: {{ include "jupyterhub.autohttps.fullname" . }} - apiGroup: rbac.authorization.k8s.io ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "jupyterhub.autohttps.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} -{{- end }} diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml deleted file mode 100755 index d57c135d..00000000 --- a/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml +++ /dev/null @@ -1,25 +0,0 @@ -{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} -{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} -{{- if $autoHTTPS -}} -apiVersion: v1 -kind: Service -metadata: - name: {{ include "jupyterhub.proxy-http.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} - {{- with .Values.apps.jupyterhub.proxy.service.labels }} - {{- . | toYaml | nindent 4 }} - {{- end }} - {{- with .Values.apps.jupyterhub.proxy.service.annotations }} - annotations: - {{- . | toYaml | nindent 4 }} - {{- end }} -spec: - type: ClusterIP - selector: - {{- $_ := merge (dict "componentLabel" "proxy") . }} - {{- include "jupyterhub.matchLabels" $_ | nindent 4 }} - ports: - - port: 8000 - targetPort: http -{{- end }} diff --git a/applications/jupyterhub/deploy/templates/proxy/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/deployment.yaml index 6d63ba88..bb37b8f0 100755 --- a/applications/jupyterhub/deploy/templates/proxy/deployment.yaml +++ b/applications/jupyterhub/deploy/templates/proxy/deployment.yaml @@ -7,6 +7,9 @@ metadata: labels: {{- include "jupyterhub.labels" . | nindent 4 }} spec: + {{- if typeIs "int" .Values.apps.jupyterhub.proxy.chp.revisionHistoryLimit }} + revisionHistoryLimit: {{ .Values.apps.jupyterhub.proxy.chp.revisionHistoryLimit }} + {{- end }} replicas: 1 selector: matchLabels: @@ -35,7 +38,7 @@ spec: # match the k8s Secret during the first upgrade following an auth_token # was generated. checksum/auth-token: {{ include "jupyterhub.hub.config.ConfigurableHTTPProxy.auth_token" . | sha256sum | trunc 4 | quote }} - checksum/proxy-secret: {{ include (print $.Template.BasePath "/jupyterhub/hub/secret.yaml") . | sha256sum }} + checksum/proxy-secret: {{ include (print $.Template.BasePath "/jupyterhub/proxy/secret.yaml") . | sha256sum | quote }} {{- with .Values.apps.jupyterhub.proxy.annotations }} {{- . | toYaml | nindent 8 }} {{- end }} @@ -44,7 +47,10 @@ spec: {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} priorityClassName: {{ include "jupyterhub.priority.fullname" . }} {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.proxy.chp.nodeSelector }} + {{- with .Values.apps.jupyterhub.proxy.chp.nodeSelector }} + nodeSelector: + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.proxy.chp.tolerations }} tolerations: {{- . | toYaml | nindent 8 }} @@ -135,6 +141,8 @@ spec: livenessProbe: initialDelaySeconds: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.initialDelaySeconds }} periodSeconds: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.timeoutSeconds }} + failureThreshold: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.failureThreshold }} httpGet: path: /_chp_healthz {{- if or $manualHTTPS $manualHTTPSwithsecret }} @@ -149,6 +157,8 @@ spec: readinessProbe: initialDelaySeconds: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.initialDelaySeconds }} periodSeconds: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.timeoutSeconds }} + failureThreshold: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.failureThreshold }} httpGet: path: /_chp_healthz {{- if or $manualHTTPS $manualHTTPSwithsecret }} diff --git a/applications/jupyterhub/deploy/templates/proxy/netpol.yaml b/applications/jupyterhub/deploy/templates/proxy/netpol.yaml index adc82773..88a00be6 100755 --- a/applications/jupyterhub/deploy/templates/proxy/netpol.yaml +++ b/applications/jupyterhub/deploy/templates/proxy/netpol.yaml @@ -85,32 +85,24 @@ spec: egress: # proxy --> hub - - ports: - - port: 8081 - to: + - to: - podSelector: matchLabels: {{- $_ := merge (dict "componentLabel" "hub") . }} {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8081 # proxy --> singleuser-server - - ports: - - port: 8888 - to: + - to: - podSelector: matchLabels: {{- $_ := merge (dict "componentLabel" "singleuser-server") . }} {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8888 - # proxy --> Kubernetes internal DNS - - ports: - - protocol: UDP - port: 53 - - protocol: TCP - port: 53 - - {{- with .Values.apps.jupyterhub.proxy.chp.networkPolicy.egress }} - # proxy --> depends, but the default is everything - {{- . | toYaml | nindent 4 }} + {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.proxy.chp.networkPolicy)) }} + {{- . | nindent 4 }} {{- end }} {{- end }} diff --git a/applications/jupyterhub/deploy/templates/proxy/pdb.yaml b/applications/jupyterhub/deploy/templates/proxy/pdb.yaml index 1846a3b0..155895b0 100755 --- a/applications/jupyterhub/deploy/templates/proxy/pdb.yaml +++ b/applications/jupyterhub/deploy/templates/proxy/pdb.yaml @@ -1,9 +1,5 @@ {{- if .Values.apps.jupyterhub.proxy.chp.pdb.enabled -}} -{{- if .Capabilities.APIVersions.Has "policy/v1" }} apiVersion: policy/v1 -{{- else }} -apiVersion: policy/v1beta1 -{{- end }} kind: PodDisruptionBudget metadata: name: {{ include "jupyterhub.proxy.fullname" . }} diff --git a/applications/jupyterhub/deploy/templates/proxy/service.yaml b/applications/jupyterhub/deploy/templates/proxy/service.yaml index 0d9ca5b2..f634ba9e 100755 --- a/applications/jupyterhub/deploy/templates/proxy/service.yaml +++ b/applications/jupyterhub/deploy/templates/proxy/service.yaml @@ -35,12 +35,15 @@ metadata: {{- end }} spec: selector: + # This service will target the autohttps pod if autohttps is configured, and + # the proxy pod if not. When autohttps is configured, the service proxy-http + # will be around to target the proxy pod directly. {{- if $autoHTTPS }} - component: autohttps + {{- $_ := merge (dict "componentLabel" "autohttps") . -}} + {{- include "jupyterhub.matchLabels" $_ | nindent 4 }} {{- else }} - component: proxy + {{- include "jupyterhub.matchLabels" . | nindent 4 }} {{- end }} - release: {{ .Release.Name }} ports: {{- if $HTTPS }} - name: https diff --git a/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml index 588cf196..1bed905e 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml @@ -4,22 +4,9 @@ kind: PriorityClass metadata: name: {{ include "jupyterhub.priority.fullname" . }} annotations: - # FIXME: PriorityClasses must be added before the other resources reference - # them, and in the past a workaround was needed to accomplish this: - # to make the resource a Helm hook. - # - # To transition this resource to no longer be a Helm hook resource, - # we explicitly add ownership annotations/labels (in 1.0.0) which - # will allow a future upgrade (in 2.0.0) to remove all hook and - # ownership annotations/labels. - # - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-delete-policy: before-hook-creation - helm.sh/hook-weight: "-100" meta.helm.sh/release-name: "{{ .Release.Name }}" meta.helm.sh/release-namespace: "{{ .Release.Namespace }}" labels: - app.kubernetes.io/managed-by: Helm {{- $_ := merge (dict "componentLabel" "default-priority") . }} {{- include "jupyterhub.labels" $_ | nindent 4 }} value: {{ .Values.apps.jupyterhub.scheduling.podPriority.defaultPriority }} diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml index b1dc6c5d..800ac208 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml @@ -3,11 +3,7 @@ The cluster autoscaler should be allowed to evict and reschedule these pods if it would help in order to scale down a node. */}} {{- if .Values.apps.jupyterhub.scheduling.userPlaceholder.enabled -}} -{{- if .Capabilities.APIVersions.Has "policy/v1" }} apiVersion: policy/v1 -{{- else }} -apiVersion: policy/v1beta1 -{{- end }} kind: PodDisruptionBudget metadata: name: {{ include "jupyterhub.user-placeholder.fullname" . }} diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml index e03497db..688e217c 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml @@ -5,22 +5,9 @@ kind: PriorityClass metadata: name: {{ include "jupyterhub.user-placeholder-priority.fullname" . }} annotations: - # FIXME: PriorityClasses must be added before the other resources reference - # them, and in the past a workaround was needed to accomplish this: - # to make the resource a Helm hook. - # - # To transition this resource to no longer be a Helm hook resource, - # we explicitly add ownership annotations/labels (in 1.0.0) which - # will allow a future upgrade (in 2.0.0) to remove all hook and - # ownership annotations/labels. - # - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-delete-policy: before-hook-creation - helm.sh/hook-weight: "-100" meta.helm.sh/release-name: "{{ .Release.Name }}" meta.helm.sh/release-namespace: "{{ .Release.Namespace }}" labels: - app.kubernetes.io/managed-by: Helm {{- include "jupyterhub.labels" . | nindent 4 }} value: {{ .Values.apps.jupyterhub.scheduling.podPriority.userPlaceholderPriority }} globalDefault: false diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml index 114f6262..c243beee 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml @@ -16,6 +16,9 @@ metadata: {{- include "jupyterhub.labels" . | nindent 4 }} spec: podManagementPolicy: Parallel + {{- if typeIs "int" .Values.apps.jupyterhub.scheduling.userPlaceholder.revisionHistoryLimit }} + revisionHistoryLimit: {{ .Values.apps.jupyterhub.scheduling.userPlaceholder.revisionHistoryLimit }} + {{- end }} replicas: {{ .Values.apps.jupyterhub.scheduling.userPlaceholder.replicas }} selector: matchLabels: @@ -23,9 +26,16 @@ spec: serviceName: {{ include "jupyterhub.user-placeholder.fullname" . }} template: metadata: + {{- with .Values.apps.jupyterhub.scheduling.userPlaceholder.annotations }} + annotations: + {{- . | toYaml | nindent 8 }} + {{- end }} labels: {{- /* Changes here will cause the Deployment to restart the pods. */}} {{- include "jupyterhub.matchLabels" . | nindent 8 }} + {{- with .Values.apps.jupyterhub.scheduling.userPlaceholder.labels }} + {{- . | toYaml | nindent 8 }} + {{- end }} spec: {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} priorityClassName: {{ include "jupyterhub.user-placeholder-priority.fullname" . }} @@ -33,7 +43,10 @@ spec: {{- if .Values.apps.jupyterhub.scheduling.userScheduler.enabled }} schedulerName: {{ include "jupyterhub.user-scheduler.fullname" . }} {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.singleuser.nodeSelector }} + {{- with .Values.apps.jupyterhub.singleuser.nodeSelector }} + nodeSelector: + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with concat .Values.apps.jupyterhub.scheduling.userPods.tolerations .Values.apps.jupyterhub.singleuser.extraTolerations }} tolerations: {{- . | toYaml | nindent 8 }} diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml index ef8a37f6..3e83b444 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml @@ -6,16 +6,28 @@ metadata: labels: {{- include "jupyterhub.labels" . | nindent 4 }} data: - # ref: https://kubernetes.io/docs/reference/scheduling/config/ + {{- /* + This is configuration of a k8s official kube-scheduler binary running in the + user-scheduler. + + ref: https://kubernetes.io/docs/reference/scheduling/config/ + ref: https://kubernetes.io/docs/reference/config-api/kube-scheduler-config.v1/ + */}} config.yaml: | - apiVersion: kubescheduler.config.k8s.io/v1beta1 + apiVersion: kubescheduler.config.k8s.io/v1 kind: KubeSchedulerConfiguration leaderElection: - resourceLock: endpoints + resourceLock: leases resourceName: {{ include "jupyterhub.user-scheduler-lock.fullname" . }} resourceNamespace: "{{ .Release.Namespace }}" profiles: - schedulerName: {{ include "jupyterhub.user-scheduler.fullname" . }} + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.plugins }} plugins: - {{- .Values.apps.jupyterhub.scheduling.userScheduler.plugins | toYaml | nindent 10 }} + {{- . | toYaml | nindent 10 }} + {{- end }} + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.pluginConfig }} + pluginConfig: + {{- . | toYaml | nindent 10 }} + {{- end }} {{- end }} diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml index 1bcaf317..f22d0de8 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml @@ -6,6 +6,9 @@ metadata: labels: {{- include "jupyterhub.labels" . | nindent 4 }} spec: + {{- if typeIs "int" .Values.apps.jupyterhub.scheduling.userScheduler.revisionHistoryLimit }} + revisionHistoryLimit: {{ .Values.apps.jupyterhub.scheduling.userScheduler.revisionHistoryLimit }} + {{- end }} replicas: {{ .Values.apps.jupyterhub.scheduling.userScheduler.replicas }} selector: matchLabels: @@ -14,16 +17,25 @@ spec: metadata: labels: {{- include "jupyterhub.matchLabels" . | nindent 8 }} + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.labels }} + {{- . | toYaml | nindent 8 }} + {{- end }} annotations: checksum/config-map: {{ include (print $.Template.BasePath "/jupyterhub/scheduling/user-scheduler/configmap.yaml") . | sha256sum }} + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.annotations }} + {{- . | toYaml | nindent 8 }} + {{- end }} spec: - {{- if .Values.apps.jupyterhub.rbac.enabled }} - serviceAccountName: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} + {{ with include "jupyterhub.user-scheduler-serviceaccount.fullname" . }} + serviceAccountName: {{ . }} {{- end }} {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} priorityClassName: {{ include "jupyterhub.priority.fullname" . }} {{- end }} - nodeSelector: {{ toJson .Values.apps.jupyterhub.scheduling.userScheduler.nodeSelector }} + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.nodeSelector }} + nodeSelector: + {{- . | toYaml | nindent 8 }} + {{- end }} {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.scheduling.userScheduler.tolerations }} tolerations: {{- . | toYaml | nindent 8 }} @@ -44,13 +56,6 @@ spec: {{- end }} command: - /usr/local/bin/kube-scheduler - # NOTE: --leader-elect-... (new) and --lock-object-... (deprecated) - # flags are silently ignored in favor of whats defined in the - # passed KubeSchedulerConfiguration whenever --config is - # passed. - # - # ref: https://kubernetes.io/docs/reference/command-line-tools-reference/kube-scheduler/ - # # NOTE: --authentication-skip-lookup=true is used to avoid a # seemingly harmless error, if we need to not skip # "authentication lookup" in the future, see the linked issue. @@ -65,12 +70,14 @@ spec: livenessProbe: httpGet: path: /healthz - port: 10251 + scheme: HTTPS + port: 10259 initialDelaySeconds: 15 readinessProbe: httpGet: path: /healthz - port: 10251 + scheme: HTTPS + port: 10259 {{- with .Values.apps.jupyterhub.scheduling.userScheduler.resources }} resources: {{- . | toYaml | nindent 12 }} diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml index 04f2af8c..2c9c6de8 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml @@ -1,9 +1,5 @@ {{- if and .Values.apps.jupyterhub.scheduling.userScheduler.enabled .Values.apps.jupyterhub.scheduling.userScheduler.pdb.enabled -}} -{{- if .Capabilities.APIVersions.Has "policy/v1" }} apiVersion: policy/v1 -{{- else }} -apiVersion: policy/v1beta1 -{{- end }} kind: PodDisruptionBudget metadata: name: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml index 083e0654..9c7fab73 100755 --- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml +++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml @@ -1,16 +1,5 @@ {{- if .Values.apps.jupyterhub.scheduling.userScheduler.enabled -}} -{{- if .Values.apps.jupyterhub.rbac.enabled }} -apiVersion: v1 -kind: ServiceAccount -metadata: - name: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} - labels: - {{- include "jupyterhub.labels" . | nindent 4 }} - {{- with .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.annotations }} - annotations: - {{- . | toYaml | nindent 4 }} - {{- end }} ---- +{{- if .Values.apps.jupyterhub.rbac.create -}} kind: ClusterRole apiVersion: rbac.authorization.k8s.io/v1 metadata: @@ -19,13 +8,23 @@ metadata: {{- include "jupyterhub.labels" . | nindent 4 }} rules: # Copied from the system:kube-scheduler ClusterRole of the k8s version - # matching the kube-scheduler binary we use. A modification of two resource - # name references from kube-scheduler to user-scheduler-lock was made. + # matching the kube-scheduler binary we use. A modification has been made to + # resourceName fields to remain relevant for how we have named our resources + # in this Helm chart. # - # NOTE: These rules have been unchanged between 1.12 and 1.15, then changed in - # 1.16 and in 1.17, but unchanged in 1.18 and 1.19. + # NOTE: These rules have been: + # - unchanged between 1.12 and 1.15 + # - changed in 1.16 + # - changed in 1.17 + # - unchanged between 1.18 and 1.20 + # - changed in 1.21: get/list/watch permission for namespace, + # csidrivers, csistoragecapacities was added. + # - unchanged between 1.22 and 1.27 + # - changed in 1.28: permissions to get/update lock endpoint resource + # removed + # - unchanged between 1.28 and 1.29 # - # ref: https://github.com/kubernetes/kubernetes/blob/v1.19.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L696-L829 + # ref: https://github.com/kubernetes/kubernetes/blob/v1.29.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L721-L862 - apiGroups: - "" - events.k8s.io @@ -50,21 +49,6 @@ rules: verbs: - get - update - - apiGroups: - - "" - resources: - - endpoints - verbs: - - create - - apiGroups: - - "" - resourceNames: - - {{ include "jupyterhub.user-scheduler-lock.fullname" . }} - resources: - - endpoints - verbs: - - get - - update - apiGroups: - "" resources: @@ -159,13 +143,37 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - namespaces + verbs: + - get + - list + - watch + - apiGroups: + - storage.k8s.io + resources: + - csidrivers + verbs: + - get + - list + - watch + - apiGroups: + - storage.k8s.io + resources: + - csistoragecapacities + verbs: + - get + - list + - watch # Copied from the system:volume-scheduler ClusterRole of the k8s version # matching the kube-scheduler binary we use. # - # NOTE: These rules have not changed between 1.12 and 1.19. + # NOTE: These rules have not changed between 1.12 and 1.29. # - # ref: https://github.com/kubernetes/kubernetes/blob/v1.19.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L1213-L1240 + # ref: https://github.com/kubernetes/kubernetes/blob/v1.29.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L1283-L1310 - apiGroups: - "" resources: @@ -203,7 +211,7 @@ metadata: {{- include "jupyterhub.labels" . | nindent 4 }} subjects: - kind: ServiceAccount - name: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} + name: {{ include "jupyterhub.user-scheduler-serviceaccount.fullname" . }} namespace: "{{ .Release.Namespace }}" roleRef: kind: ClusterRole diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml new file mode 100644 index 00000000..67618b03 --- /dev/null +++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml @@ -0,0 +1,14 @@ +{{- if .Values.apps.jupyterhub.scheduling.userScheduler.enabled -}} +{{- if .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "jupyterhub.user-scheduler-serviceaccount.fullname" . }} + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.annotations }} + annotations: + {{- . | toYaml | nindent 4 }} + {{- end }} +{{- end }} +{{- end }} diff --git a/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml b/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml index 3dfb1378..931a150f 100755 --- a/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml +++ b/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml @@ -62,23 +62,38 @@ spec: egress: # singleuser-server --> hub - - ports: - - port: 8081 - to: + - to: - podSelector: matchLabels: {{- $_ := merge (dict "componentLabel" "hub") . }} {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8081 - # singleuser-server --> Kubernetes internal DNS - - ports: - - protocol: UDP - port: 53 - - protocol: TCP - port: 53 + # singleuser-server --> proxy + # singleuser-server --> autohttps + # + # While not critical for core functionality, a user or library code may rely + # on communicating with the proxy or autohttps pods via a k8s Service it can + # detected from well known environment variables. + # + - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "proxy") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8000 + - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "autohttps") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} + ports: + - port: 8080 + - port: 8443 - {{- with .Values.apps.jupyterhub.singleuser.networkPolicy.egress }} - # singleuser-server --> depends, but the default is everything - {{- . | toYaml | nindent 4 }} + {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.singleuser.networkPolicy)) }} + {{- . | nindent 4 }} {{- end }} {{- end }} diff --git a/applications/jupyterhub/deploy/templates/singleuser/secret.yaml b/applications/jupyterhub/deploy/templates/singleuser/secret.yaml new file mode 100644 index 00000000..e6eab9bd --- /dev/null +++ b/applications/jupyterhub/deploy/templates/singleuser/secret.yaml @@ -0,0 +1,17 @@ +{{- if .Values.apps.jupyterhub.singleuser.extraFiles }} +kind: Secret +apiVersion: v1 +metadata: + name: {{ include "jupyterhub.singleuser.fullname" . }} + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} +type: Opaque +{{- with include "jupyterhub.extraFiles.data" .Values.apps.jupyterhub.singleuser.extraFiles }} +data: + {{- . | nindent 2 }} +{{- end }} +{{- with include "jupyterhub.extraFiles.stringData" .Values.apps.jupyterhub.singleuser.extraFiles }} +stringData: + {{- . | nindent 2 }} +{{- end }} +{{- end }} diff --git a/applications/jupyterhub/deploy/values.schema.yaml b/applications/jupyterhub/deploy/values.schema.yaml new file mode 100644 index 00000000..69c13a83 --- /dev/null +++ b/applications/jupyterhub/deploy/values.schema.yaml @@ -0,0 +1,3014 @@ +# This schema (a jsonschema in YAML format) is used to generate +# values.schema.json which is packaged with the Helm chart for client side +# validation by helm of values before template rendering. +# +# This schema is also used by our documentation system to build the +# configuration reference section based on the description fields. See +# docs/source/conf.py for that logic! +# +# We look to document everything we have default values for in values.yaml, but +# we don't look to enforce the perfect validation logic within this file. +# +# ref: https://json-schema.org/learn/getting-started-step-by-step.html +# +$schema: http://json-schema.org/draft-07/schema# +type: object +additionalProperties: false +required: + - imagePullSecrets + - hub + - proxy + - singleuser + - ingress + - prePuller + - custom + - cull + - debug + - rbac + - global +properties: + enabled: + type: [boolean, "null"] + description: | + `enabled` is ignored by the jupyterhub chart itself, but a chart depending + on the jupyterhub chart conditionally can make use this config option as + the condition. + fullnameOverride: + type: [string, "null"] + description: | + fullnameOverride and nameOverride allow you to adjust how the resources + part of the Helm chart are named. + + Name format | Resource types | fullnameOverride | nameOverride | Note + ------------------------- | -------------- | ---------------- | ------------ | - + component | namespaced | `""` | * | Default + release-component | cluster wide | `""` | * | Default + fullname-component | * | str | * | - + release-component | * | null | `""` | - + release-(name-)component | * | null | str | omitted if contained in release + release-(chart-)component | * | null | null | omitted if contained in release + + ```{admonition} Warning! + :class: warning + Changing fullnameOverride or nameOverride after the initial installation + of the chart isn't supported. Changing their values likely leads to a + reset of non-external JupyterHub databases, abandonment of users' storage, + and severed couplings to currently running user pods. + ``` + + If you are a developer of a chart depending on this chart, you should + avoid hardcoding names. If you want to reference the name of a resource in + this chart from a parent helm chart's template, you can make use of the + global named templates instead. + + ```yaml + # some pod definition of a parent chart helm template + schedulerName: {{ include "jupyterhub.user-scheduler.fullname" . }} + ``` + + To access them from a container, you can also rely on the hub ConfigMap + that contains entries of all the resource names. + + ```yaml + # some container definition in a parent chart helm template + env: + - name: SCHEDULER_NAME + valueFrom: + configMapKeyRef: + name: {{ include "jupyterhub.user-scheduler.fullname" . }} + key: user-scheduler + ``` + + nameOverride: + type: [string, "null"] + description: | + See the documentation under [`fullnameOverride`](schema_fullnameOverride). + + imagePullSecret: + type: object + required: [create] + if: + properties: + create: + const: true + then: + additionalProperties: false + required: [registry, username, password] + description: | + This is configuration to create a k8s Secret resource of `type: + kubernetes.io/dockerconfigjson`, with credentials to pull images from a + private image registry. If you opt to do so, it will be available for use + by all pods in their respective `spec.imagePullSecrets` alongside other + k8s Secrets defined in `imagePullSecrets` or the pod respective + `...image.pullSecrets` configuration. + + In other words, using this configuration option can automate both the + otherwise manual creation of a k8s Secret and the otherwise manual + configuration to reference this k8s Secret in all the pods of the Helm + chart. + + ```sh + # you won't need to create a k8s Secret manually... + kubectl create secret docker-registry image-pull-secret \ + --docker-server= \ + --docker-username= \ + --docker-email= \ + --docker-password= + ``` + + If you just want to let all Pods reference an existing secret, use the + [`imagePullSecrets`](schema_imagePullSecrets) configuration instead. + properties: + create: + type: boolean + description: | + Toggle the creation of the k8s Secret with provided credentials to + access a private image registry. + automaticReferenceInjection: + type: boolean + description: | + Toggle the automatic reference injection of the created Secret to all + pods' `spec.imagePullSecrets` configuration. + registry: + type: string + description: | + Name of the private registry you want to create a credential set for. + It will default to Docker Hub's image registry. + + Examples: + - https://index.docker.io/v1/ + - quay.io + - eu.gcr.io + - alexmorreale.privatereg.net + username: + type: string + description: | + Name of the user you want to use to connect to your private registry. + + For external gcr.io, you will use the `_json_key`. + + Examples: + - alexmorreale + - alex@pfc.com + - _json_key + password: + type: string + description: | + Password for the private image registry's user. + + Examples: + - plaintextpassword + - abc123SECRETzyx098 + + For gcr.io registries the password will be a big JSON blob for a + Google cloud service account, it should look something like below. + + ```yaml + password: |- + { + "type": "service_account", + "project_id": "jupyter-se", + "private_key_id": "f2ba09118a8d3123b3321bd9a7d6d0d9dc6fdb85", + ... + } + ``` + email: + type: [string, "null"] + description: | + Specification of an email is most often not required, but it is + supported. + + imagePullSecrets: + type: array + description: | + Chart wide configuration to _append_ k8s Secret references to all its + pod's `spec.imagePullSecrets` configuration. + + This will not override or get overridden by pod specific configuration, + but instead augment the pod specific configuration. + + You can use both the k8s native syntax, where each list element is like + `{"name": "my-secret-name"}`, or you can let list elements be strings + naming the secrets directly. + + hub: + type: object + additionalProperties: false + required: [baseUrl] + properties: + revisionHistoryLimit: &revisionHistoryLimit + type: [integer, "null"] + minimum: 0 + description: | + Configures the resource's `spec.revisionHistoryLimit`. This is + available for Deployment, StatefulSet, and DaemonSet resources. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#revision-history-limit) + for more info. + config: + type: object + additionalProperties: true + description: | + JupyterHub and its components (authenticators, spawners, etc), are + Python classes that expose its configuration through + [_traitlets_](https://traitlets.readthedocs.io/en/stable/). With this + Helm chart configuration (`hub.config`), you can directly configure + the Python classes through _static_ YAML values. To _dynamically_ set + values, you need to use [`hub.extraConfig`](schema_hub.extraConfig) + instead. + + ```{admonition} Currently intended only for auth config + :class: warning + This config _currently_ (0.11.0) only influence the software in the + `hub` Pod, but some Helm chart config options such as + [`hub.baseUrl`](schema_hub.baseUrl) is used to set + `JupyterHub.base_url` in the `hub` Pod _and_ influence how other Helm + templates are rendered. + + As we have not yet mapped out all the potential configuration + conflicts except for the authentication related configuration options, + please accept that using it for something else at this point can lead + to issues. + ``` + + __Example__ + + If you inspect documentation or some `jupyterhub_config.py` to contain + the following section: + + ```python + c.JupyterHub.admin_access = true + c.JupyterHub.admin_users = ["jovyan1", "jovyan2"] + c.KubeSpawner.k8s_api_request_timeout = 10 + c.GitHubOAuthenticator.allowed_organizations = ["jupyterhub"] + ``` + + Then, you would be able to represent it with this configuration like: + + ```yaml + hub: + config: + JupyterHub: + admin_access: true + admin_users: + - jovyan1 + - jovyan2 + KubeSpawner: + k8s_api_request_timeout: 10 + GitHubOAuthenticator: + allowed_organizations: + - jupyterhub + ``` + + ```{admonition} YAML limitations + :class: tip + You can't represent Python `Bytes` or `Set` objects in YAML directly. + ``` + + ```{admonition} Helm value merging + :class: tip + `helm` merges a Helm chart's default values with values passed with + the `--values` or `-f` flag. During merging, lists are replaced while + dictionaries are updated. + ``` + extraFiles: &extraFiles + type: object + additionalProperties: false + description: | + A dictionary with extra files to be injected into the pod's container + on startup. This can for example be used to inject: configuration + files, custom user interface templates, images, and more. + + ```yaml + # NOTE: "hub" is used in this example, but the configuration is the + # same for "singleuser". + hub: + extraFiles: + # The file key is just a reference that doesn't influence the + # actual file name. + : + # mountPath is required and must be the absolute file path. + mountPath: + + # Choose one out of the three ways to represent the actual file + # content: data, stringData, or binaryData. + # + # data should be set to a mapping (dictionary). It will in the + # end be rendered to either YAML, JSON, or TOML based on the + # filename extension that are required to be either .yaml, .yml, + # .json, or .toml. + # + # If your content is YAML, JSON, or TOML, it can make sense to + # use data to represent it over stringData as data can be merged + # instead of replaced if set partially from separate Helm + # configuration files. + # + # Both stringData and binaryData should be set to a string + # representing the content, where binaryData should be the + # base64 encoding of the actual file content. + # + data: + myConfig: + myMap: + number: 123 + string: "hi" + myList: + - 1 + - 2 + stringData: | + hello world! + binaryData: aGVsbG8gd29ybGQhCg== + + # mode is by default 0644 and you can optionally override it + # either by octal notation (example: 0400) or decimal notation + # (example: 256). + mode: + ``` + + **Using --set-file** + + To avoid embedding entire files in the Helm chart configuration, you + can use the `--set-file` flag during `helm upgrade` to set the + stringData or binaryData field. + + ```yaml + hub: + extraFiles: + my_image: + mountPath: /usr/local/share/jupyterhub/static/my_image.png + + # Files in /usr/local/etc/jupyterhub/jupyterhub_config.d are + # automatically loaded in alphabetical order of the final file + # name when JupyterHub starts. + my_config: + mountPath: /usr/local/etc/jupyterhub/jupyterhub_config.d/my_jupyterhub_config.py + ``` + + ```bash + # --set-file expects a text based file, so you need to base64 encode + # it manually first. + base64 my_image.png > my_image.png.b64 + + helm upgrade <...> \ + --set-file hub.extraFiles.my_image.binaryData=./my_image.png.b64 \ + --set-file hub.extraFiles.my_config.stringData=./my_jupyterhub_config.py + ``` + + **Common uses** + + 1. **JupyterHub template customization** + + You can replace the default JupyterHub user interface templates in + the hub pod by injecting new ones to + `/usr/local/share/jupyterhub/templates`. These can in turn + reference custom images injected to + `/usr/local/share/jupyterhub/static`. + + 1. **JupyterHub standalone file config** + + Instead of embedding JupyterHub python configuration as a string + within a YAML file through + [`hub.extraConfig`](schema_hub.extraConfig), you can inject a + standalone .py file into + `/usr/local/etc/jupyterhub/jupyterhub_config.d` that is + automatically loaded. + + 1. **Flexible configuration** + + By injecting files, you don't have to embed them in a docker image + that you have to rebuild. + + If your configuration file is a YAML/JSON/TOML file, you can also + use `data` instead of `stringData` which allow you to set various + configuration in separate Helm config files. This can be useful to + help dependent charts override only some configuration part of the + file, or to allow for the configuration be set through multiple + Helm configuration files. + + **Limitations** + + 1. File size + + The files in `hub.extraFiles` and `singleuser.extraFiles` are + respectively stored in their own k8s Secret resource. As k8s + Secret's are limited, typically to 1MB, you will be limited to a + total file size of less than 1MB as there is also base64 encoding + that takes place reducing available capacity to 75%. + + 2. File updates + + The files that are mounted are only set during container startup. + This is [because we use + `subPath`](https://kubernetes.io/docs/concepts/storage/volumes/#secret) + as is required to avoid replacing the content of the entire + directory we mount in. + patternProperties: + ".*": + type: object + additionalProperties: false + required: [mountPath] + oneOf: + - required: [data] + - required: [stringData] + - required: [binaryData] + properties: + mountPath: + type: string + data: + type: object + additionalProperties: true + stringData: + type: string + binaryData: + type: string + mode: + type: number + baseUrl: + type: string + description: | + This is the equivalent of c.JupyterHub.base_url, but it is also needed + by the Helm chart in general. So, instead of setting + c.JupyterHub.base_url, use this configuration. + command: + type: array + description: | + A list of strings to be used to replace the JupyterHub image's + `ENTRYPOINT` entry. Note that in k8s lingo, the Dockerfile's + `ENTRYPOINT` is called `command`. The list of strings will be expanded + with Helm's template function `tpl` which can render Helm template + logic inside curly braces (`{{... }}`). + + This could be useful to wrap the invocation of JupyterHub itself in + some custom way. + + For more details, see the [Kubernetes + documentation](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/). + args: + type: array + description: | + A list of strings to be used to replace the JupyterHub image's `CMD` + entry as well as the Helm chart's default way to start JupyterHub. + Note that in k8s lingo, the Dockerfile's `CMD` is called `args`. The + list of strings will be expanded with Helm's template function `tpl` + which can render Helm template logic inside curly braces (`{{... }}`). + + ```{warning} + By replacing the entire configuration file, which is mounted to + `/usr/local/etc/jupyterhub/jupyterhub_config.py` by the Helm chart, + instead of appending to it with `hub.extraConfig`, you expose your + deployment for issues stemming from getting out of sync with the Helm + chart's config file. + + These kind of issues will be significantly harder to debug and + diagnose, and can due to this could cause a lot of time expenditure + for both the community maintaining the Helm chart as well as yourself, + even if this wasn't the reason for the issue. + + Due to this, we ask that you do your _absolute best to avoid replacing + the default provided `jupyterhub_config.py` file. It can often be + possible. For example, if your goal is to have a dedicated .py file + for more extensive additions that you can syntax highlight and such + and feel limited by passing code in `hub.extraConfig` which is part of + a YAML file, you can use [this + trick](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues/1580#issuecomment-707776237) + instead. + ``` + + ```yaml + hub: + args: + - "jupyterhub" + - "--config" + - "/usr/local/etc/jupyterhub/jupyterhub_config.py" + - "--debug" + - "--upgrade-db" + ``` + + For more details, see the [Kubernetes + documentation](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/). + cookieSecret: + type: [string, "null"] + description: | + ```{note} + As of version 1.0.0 this will automatically be generated and there is + no need to set it manually. + + If you wish to reset a generated key, you can use `kubectl edit` on + the k8s Secret typically named `hub` and remove the + `hub.config.JupyterHub.cookie_secret` entry in the k8s Secret, then + perform a new `helm upgrade`. + ``` + + A 32-byte cryptographically secure randomly generated string used to sign values of + secure cookies set by the hub. If unset, jupyterhub will generate one on startup and + save it in the file `jupyterhub_cookie_secret` in the `/srv/jupyterhub` directory of + the hub container. A value set here will make JupyterHub overwrite any previous file. + + You do not need to set this at all if you are using the default configuration for + storing databases - sqlite on a persistent volume (with `hub.db.type` set to the + default `sqlite-pvc`). If you are using an external database, then you must set this + value explicitly - or your users will keep getting logged out each time the hub pod + restarts. + + Changing this value will all user logins to be invalidated. If this secret leaks, + *immediately* change it to something else, or user data can be compromised + + ```sh + # to generate a value, run + openssl rand -hex 32 + ``` + image: &image-spec + type: object + additionalProperties: false + required: [name, tag] + description: | + Set custom image name, tag, pullPolicy, or pullSecrets for the pod. + properties: + name: + type: string + description: | + The name of the image, without the tag. + + ``` + # example name + gcr.io/my-project/my-image + ``` + tag: + type: string + description: | + The tag of the image to pull. This is the value following `:` in + complete image specifications. + + ``` + # example tags + v1.11.1 + zhy270a + ``` + pullPolicy: + enum: [null, "", IfNotPresent, Always, Never] + description: | + Configures the Pod's `spec.imagePullPolicy`. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/containers/images/#updating-images) + for more info. + pullSecrets: + type: array + description: | + A list of references to existing Kubernetes Secrets with + credentials to pull the image. + + This Pod's final `imagePullSecrets` k8s specification will be a + combination of: + + 1. This list of k8s Secrets, specific for this pod. + 2. The list of k8s Secrets, for use by all pods in the Helm chart, + declared in this Helm charts configuration called + `imagePullSecrets`. + 3. A k8s Secret, for use by all pods in the Helm chart, if + conditionally created from image registry credentials provided + under `imagePullSecret` if `imagePullSecret.create` is set to + true. + + ```yaml + # example - k8s native syntax + pullSecrets: + - name: my-k8s-secret-with-image-registry-credentials + + # example - simplified syntax + pullSecrets: + - my-k8s-secret-with-image-registry-credentials + ``` + networkPolicy: &networkPolicy-spec + type: object + additionalProperties: false + description: | + This configuration regards the creation and configuration of a k8s + _NetworkPolicy resource_. + properties: + enabled: + type: boolean + description: | + Toggle the creation of the NetworkPolicy resource targeting this + pod, and by doing so, restricting its communication to only what + is explicitly allowed in the NetworkPolicy. + ingress: + type: array + description: | + Additional ingress rules to add besides those that are required + for core functionality. + egress: + type: array + description: | + Additional egress rules to add besides those that are required for + core functionality and those added via + [`.egressAllowRules`](schema_hub.networkPolicy.egressAllowRules). + + ```{versionchanged} 2.0.0 + The default value changed from providing one very permissive rule + allowing all egress to providing no rule. The permissive rule is + still provided via + [`.egressAllowRules`](schema_hub.networkPolicy.egressAllowRules) + set to true though. + ``` + + As an example, below is a configuration that disables the more + broadly permissive `.privateIPs` egress allow rule for the hub + pod, and instead provides tightly scoped permissions to access a + specific k8s local service as identified by pod labels. + + ```yaml + hub: + networkPolicy: + egressAllowRules: + privateIPs: false + egress: + - to: + - podSelector: + matchLabels: + app: my-k8s-local-service + ports: + - protocol: TCP + port: 5978 + ``` + egressAllowRules: + type: object + additionalProperties: false + description: | + This is a set of predefined rules that when enabled will be added + to the NetworkPolicy list of egress rules. + + The resulting egress rules will be a composition of: + - rules specific for the respective pod(s) function within the + Helm chart + - rules based on enabled `egressAllowRules` flags + - rules explicitly specified by the user + + ```{note} + Each flag under this configuration will not render into a + dedicated rule in the NetworkPolicy resource, but instead combine + with the other flags to a reduced set of rules to avoid a + performance penalty. + ``` + + ```{versionadded} 2.0.0 + ``` + properties: + cloudMetadataServer: + type: boolean + description: | + Defaults to `false` for singleuser servers, but to `true` for + all other network policies. + + When enabled this rule allows the respective pod(s) to + establish outbound connections to the cloud metadata server. + + Note that the `nonPrivateIPs` rule is allowing all non Private + IP ranges but makes an exception for the cloud metadata + server, leaving this as the definitive configuration to allow + access to the cloud metadata server. + + ```{versionchanged} 3.0.0 + This configuration is not allowed to be configured true at the + same time as + [`singleuser.cloudMetadata.blockWithIptables`](schema_singleuser.cloudMetadata.blockWithIptables) + to avoid an ambiguous configuration. + ``` + dnsPortsCloudMetadataServer: + type: boolean + description: | + Defaults to `true` for all network policies. + + When enabled this rule allows the respective pod(s) to + establish outbound connections to the cloud metadata server + via port 53. + + Relying on this rule for the singleuser config should go hand + in hand with disabling + [`singleuser.cloudMetadata.blockWithIptables`](schema_singleuser.cloudMetadata.blockWithIptables) + to avoid an ambiguous configuration. + + Known situations when this rule can be relevant: + + - In GKE clusters with Cloud DNS that is reached at the + cloud metadata server's non-private IP. + + ```{note} + This chart doesn't know how to identify the DNS server that + pods will rely on due to variations between how k8s clusters + have been setup. Due to that, multiple rules are enabled by + default to ensure DNS connectivity. + ``` + + ```{versionadded} 3.0.0 + ``` + dnsPortsKubeSystemNamespace: + type: boolean + description: | + Defaults to `true` for all network policies. + + When enabled this rule allows the respective pod(s) to + establish outbound connections to pods in the kube-system + namespace via port 53. + + Known situations when this rule can be relevant: + + - GKE, EKS, AKS, and other clusters relying directly on + `kube-dns` or `coredns` pods in the `kube-system` namespace. + + ```{note} + This chart doesn't know how to identify the DNS server that + pods will rely on due to variations between how k8s clusters + have been setup. Due to that, multiple rules are enabled by + default to ensure DNS connectivity. + ``` + + ```{versionadded} 3.0.0 + ``` + dnsPortsPrivateIPs: + type: boolean + description: | + Defaults to `true` for all network policies. + + When enabled this rule allows the respective pod(s) to + establish outbound connections to private IPs via port 53. + + Known situations when this rule can be relevant: + + - GKE clusters relying on a DNS server indirectly via a a node + local DNS cache at an unknown private IP. + + ```{note} + This chart doesn't know how to identify the DNS server that + pods will rely on due to variations between how k8s clusters + have been setup. Due to that, multiple rules are enabled by + default to ensure DNS connectivity. + + ```{warning} + This rule is not expected to work in clusters relying on + Cilium to enforce the NetworkPolicy rules (includes GKE + clusters with Dataplane v2), this is due to a [known + limitation](https://github.com/cilium/cilium/issues/9209). + ``` + nonPrivateIPs: + type: boolean + description: | + Defaults to `true` for all network policies. + + When enabled this rule allows the respective pod(s) to + establish outbound connections to the non-private IP ranges + with the exception of the cloud metadata server. This means + respective pod(s) can establish connections to the internet + but not (say) an unsecured prometheus server running in the + same cluster. + privateIPs: + type: boolean + description: | + Defaults to `false` for singleuser servers, but to `true` for + all other network policies. + + Private IPs refer to the IP ranges `10.0.0.0/8`, + `172.16.0.0/12`, `192.168.0.0/16`. + + When enabled this rule allows the respective pod(s) to + establish outbound connections to the internal k8s cluster. + This means users can access the internet but not (say) an + unsecured prometheus server running in the same cluster. + + Since not all workloads in the k8s cluster may have + NetworkPolicies setup to restrict their incoming connections, + having this set to false can be a good defense against + malicious intent from someone in control of software in these + pods. + + If possible, try to avoid setting this to true as it gives + broad permissions that could be specified more directly via + the [`.egress`](schema_singleuser.networkPolicy.egress). + + ```{warning} + This rule is not expected to work in clusters relying on + Cilium to enforce the NetworkPolicy rules (includes GKE + clusters with Dataplane v2), this is due to a [known + limitation](https://github.com/cilium/cilium/issues/9209). + ``` + interNamespaceAccessLabels: + enum: [accept, ignore] + description: | + This configuration option determines if both namespaces and pods + in other namespaces, that have specific access labels, should be + accepted to allow ingress (set to `accept`), or, if the labels are + to be ignored when applied outside the local namespace (set to + `ignore`). + + The available access labels for respective NetworkPolicy resources + are: + + - `hub.jupyter.org/network-access-hub: "true"` (hub) + - `hub.jupyter.org/network-access-proxy-http: "true"` (proxy.chp, proxy.traefik) + - `hub.jupyter.org/network-access-proxy-api: "true"` (proxy.chp) + - `hub.jupyter.org/network-access-singleuser: "true"` (singleuser) + allowedIngressPorts: + type: array + description: | + A rule to allow ingress on these ports will be added no matter + what the origin of the request is. The default setting for + `proxy.chp` and `proxy.traefik`'s networkPolicy configuration is + `[http, https]`, while it is `[]` for other networkPolicies. + + Note that these port names or numbers target a Pod's port name or + number, not a k8s Service's port name or number. + db: + type: object + additionalProperties: false + properties: + type: + enum: [sqlite-pvc, sqlite-memory, mysql, postgres, other] + description: | + Type of database backend to use for the hub database. + + The Hub requires a persistent database to function, and this lets you specify + where it should be stored. + + The various options are: + + 1. **sqlite-pvc** + + Use an `sqlite` database kept on a persistent volume attached to the hub. + + By default, this disk is created by the cloud provider using + *dynamic provisioning* configured by a [storage + class](https://kubernetes.io/docs/concepts/storage/storage-classes/). + You can customize how this disk is created / attached by + setting various properties under `hub.db.pvc`. + + This is the default setting, and should work well for most cloud provider + deployments. + + 2. **sqlite-memory** + + Use an in-memory `sqlite` database. This should only be used for testing, + since the database is erased whenever the hub pod restarts - causing the hub + to lose all memory of users who had logged in before. + + When using this for testing, make sure you delete all other objects that the + hub has created (such as user pods, user PVCs, etc) every time the hub restarts. + Otherwise you might run into errors about duplicate resources. + + 3. **mysql** + + Use an externally hosted mysql database. + + You have to specify an sqlalchemy connection string for the mysql database you + want to connect to in `hub.db.url` if using this option. + + The general format of the connection string is: + ``` + mysql+pymysql://:@:/ + ``` + + The user specified in the connection string must have the rights to create + tables in the database specified. + + 4. **postgres** + + Use an externally hosted postgres database. + + You have to specify an sqlalchemy connection string for the postgres database you + want to connect to in `hub.db.url` if using this option. + + The general format of the connection string is: + ``` + postgresql+psycopg2://:@:/ + ``` + + The user specified in the connection string must have the rights to create + tables in the database specified. + + 5. **other** + + Use an externally hosted database of some kind other than mysql + or postgres. + + When using _other_, the database password must be passed as + part of [hub.db.url](schema_hub.db.url) as + [hub.db.password](schema_hub.db.password) will be ignored. + pvc: + type: object + additionalProperties: false + required: [storage] + description: | + Customize the Persistent Volume Claim used when `hub.db.type` is `sqlite-pvc`. + properties: + annotations: + type: object + additionalProperties: false + patternProperties: &labels-and-annotations-patternProperties + ".*": + type: string + description: | + Annotations to apply to the PVC containing the sqlite database. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) + for more details about annotations. + selector: + type: object + additionalProperties: true + description: | + Label selectors to set for the PVC containing the sqlite database. + + Useful when you are using a specific PV, and want to bind to + that and only that. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistentvolumeclaims) + for more details about using a label selector for what PV to + bind to. + storage: + type: string + description: | + Size of disk to request for the database disk. + accessModes: + type: array + items: + type: [string, "null"] + description: | + AccessModes contains the desired access modes the volume + should have. See [the k8s + documentation](https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1) + for more information. + storageClassName: + type: [string, "null"] + description: | + Name of the StorageClass required by the claim. + + If this is a blank string it will be set to a blank string, + while if it is null, it will not be set at all. + subPath: + type: [string, "null"] + description: | + Path within the volume from which the container's volume + should be mounted. Defaults to "" (volume's root). + upgrade: + type: [boolean, "null"] + description: | + Users with external databases need to opt-in for upgrades of the + JupyterHub specific database schema if needed as part of a + JupyterHub version upgrade. + url: + type: [string, "null"] + description: | + Connection string when `hub.db.type` is mysql or postgres. + + See documentation for `hub.db.type` for more details on the format of this property. + password: + type: [string, "null"] + description: | + Password for the database when `hub.db.type` is mysql or postgres. + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra labels to add to the hub pod. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + to learn more about labels. + initContainers: + type: array + description: | + list of initContainers to be run with hub pod. See [Kubernetes Docs](https://kubernetes.io/docs/concepts/workloads/pods/init-containers/) + + ```yaml + hub: + initContainers: + - name: init-myservice + image: busybox:1.28 + command: ['sh', '-c', 'command1'] + - name: init-mydb + image: busybox:1.28 + command: ['sh', '-c', 'command2'] + ``` + extraEnv: + type: [object, array] + additionalProperties: true + description: | + Extra environment variables that should be set for the hub pod. + + Environment variables are usually used to: + - Pass parameters to some custom code in `hub.extraConfig`. + - Configure code running in the hub pod, such as an authenticator or + spawner. + + String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which + is a part of Kubernetes. + + ```yaml + hub: + extraEnv: + # basic notation (for literal values only) + MY_ENV_VARS_NAME1: "my env var value 1" + + # explicit notation (the "name" field takes precedence) + HUB_NAMESPACE: + name: HUB_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + + # implicit notation (the "name" field is implied) + PREFIXED_HUB_NAMESPACE: + value: "my-prefix-$(HUB_NAMESPACE)" + SECRET_VALUE: + valueFrom: + secretKeyRef: + name: my-k8s-secret + key: password + ``` + + For more information, see the [Kubernetes EnvVar + specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). + extraConfig: + type: object + additionalProperties: true + description: | + Arbitrary extra python based configuration that should be in `jupyterhub_config.py`. + + This is the *escape hatch* - if you want to configure JupyterHub to do something specific + that is not present here as an option, you can write the raw Python to do it here. + + extraConfig is a *dict*, so there can be multiple configuration + snippets under different names. The configuration sections are run in + alphabetical order based on the keys. + + Non-exhaustive examples of things you can do here: + - Subclass authenticator / spawner to do a custom thing + - Dynamically launch different images for different sets of images + - Inject an auth token from GitHub authenticator into user pod + - Anything else you can think of! + + Since this is usually a multi-line string, you want to format it using YAML's + [| operator](https://yaml.org/spec/1.2.2/#23-scalars). + + For example: + + ```yaml + hub: + extraConfig: + myConfig.py: | + c.JupyterHub.something = 'something' + c.Spawner.something_else = 'something else' + ``` + + ```{note} + No code validation is performed until JupyterHub loads it! If you make + a typo here, it will probably manifest itself as the hub pod failing + to start up and instead entering an `Error` state or the subsequent + `CrashLoopBackoff` state. + + To make use of your own programs linters etc, it would be useful to + not embed Python code inside a YAML file. To do that, consider using + [`hub.extraFiles`](schema_hub.extraFiles) and mounting a file to + `/usr/local/etc/jupyterhub/jupyterhub_config.d` in order to load your + extra configuration logic. + ``` + + fsGid: + type: [integer, "null"] + minimum: 0 + # This schema entry is needed to help us print a more helpful error + # message in NOTES.txt if hub.fsGid is set. + # + description: | + ```{note} + Removed in version 2.0.0. Use + [`hub.podSecurityContext`](schema_hub.podSecurityContext) and specify + `fsGroup` instead. + ``` + service: + type: object + additionalProperties: false + description: | + Object to configure the service the JupyterHub will be exposed on by the Kubernetes server. + properties: + type: + enum: [ClusterIP, NodePort, LoadBalancer, ExternalName] + description: | + The Kubernetes ServiceType to be used. + + The default type is `ClusterIP`. + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types) + to learn more about service types. + ports: + type: object + additionalProperties: false + description: | + Object to configure the ports the hub service will be deployed on. + properties: + nodePort: + type: [integer, "null"] + minimum: 0 + description: | + The nodePort to deploy the hub service on. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Kubernetes annotations to apply to the hub service. + extraPorts: + type: array + description: | + Extra ports to add to the Hub Service object besides `hub` / `8081`. + This should be an array that includes `name`, `port`, and `targetPort`. + See [Multi-port Services](https://kubernetes.io/docs/concepts/services-networking/service/#multi-port-services) for more details. + loadBalancerIP: + type: [string, "null"] + description: | + A public IP address the hub Kubernetes service should be exposed + on. To expose the hub directly is not recommended. Instead route + traffic through the proxy-public service towards the hub. + + pdb: &pdb-spec + type: object + additionalProperties: false + description: | + Configure a PodDisruptionBudget for this Deployment. + + These are disabled by default for our deployments that don't support + being run in parallel with multiple replicas. Only the user-scheduler + currently supports being run in parallel with multiple replicas. If + they are enabled for a Deployment with only one replica, they will + block `kubectl drain` of a node for example. + + Note that if you aim to block scaling down a node with the + hub/proxy/autohttps pod that would cause disruptions of the + deployment, then you should instead annotate the pods of the + Deployment [as described + here](https://github.com/kubernetes/autoscaler/blob/HEAD/cluster-autoscaler/FAQ.md#what-types-of-pods-can-prevent-ca-from-removing-a-node). + + "cluster-autoscaler.kubernetes.io/safe-to-evict": "false" + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/workloads/pods/disruptions/) + for more details about disruptions. + properties: + enabled: + type: boolean + description: | + Decides if a PodDisruptionBudget is created targeting the + Deployment's pods. + maxUnavailable: + type: [integer, "null"] + description: | + The maximum number of pods that can be unavailable during + voluntary disruptions. + minAvailable: + type: [integer, "null"] + description: | + The minimum number of pods required to be available during + voluntary disruptions. + existingSecret: + type: [string, "null"] + description: | + This option allow you to provide the name of an existing k8s Secret to + use alongside of the chart managed k8s Secret. The content of this k8s + Secret will be merged with the chart managed k8s Secret, giving + priority to the self-managed k8s Secret. + + ```{warning} + 1. The self managed k8s Secret must mirror the structure in the chart + managed secret. + 2. [`proxy.secretToken`](schema_proxy.secretToken) (aka. + `hub.config.ConfigurableHTTPProxy.auth_token`) is only read from + the chart managed k8s Secret. + ``` + nodeSelector: &nodeSelector-spec + type: object + additionalProperties: true + description: | + An object with key value pairs representing labels. K8s Nodes are + required to have match all these labels for this Pod to scheduled on + them. + + ```yaml + disktype: ssd + nodetype: awesome + ``` + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#nodeselector) + for more details. + tolerations: &tolerations-spec + type: array + description: | + Tolerations allow a pod to be scheduled on nodes with taints. These + tolerations are additional tolerations to the tolerations common to + all pods of a their respective kind + ([scheduling.corePods.tolerations](schema_scheduling.corePods.tolerations), + [scheduling.userPods.tolerations](schema_scheduling.userPods.tolerations)). + + Pass this field an array of + [`Toleration`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#toleration-v1-core) + objects. + + See the [Kubernetes + docs](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/) + for more info. + activeServerLimit: + type: [integer, "null"] + description: &jupyterhub-native-config-description | + JupyterHub native configuration, see the [JupyterHub + documentation](https://jupyterhub.readthedocs.io/en/stable/reference/api/app.html) + for more information. + allowNamedServers: + type: [boolean, "null"] + description: *jupyterhub-native-config-description + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + K8s annotations for the hub pod. + authenticatePrometheus: + type: [boolean, "null"] + description: *jupyterhub-native-config-description + concurrentSpawnLimit: + type: [integer, "null"] + description: *jupyterhub-native-config-description + consecutiveFailureLimit: + type: [integer, "null"] + description: *jupyterhub-native-config-description + podSecurityContext: &podSecurityContext-spec + additionalProperties: true + description: | + A k8s native specification of the pod's security context, see [the + documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#podsecuritycontext-v1-core) + for details. + containerSecurityContext: &containerSecurityContext-spec + type: object + additionalProperties: true + description: | + A k8s native specification of the container's security context, see [the + documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#securitycontext-v1-core) + for details. + deploymentStrategy: + type: object + additionalProperties: false + properties: + rollingUpdate: + type: [string, "null"] + type: + type: [string, "null"] + description: | + JupyterHub does not support running in parallel, due to this we + default to using a deployment strategy of Recreate. + extraContainers: &extraContainers-spec + type: array + description: | + Additional containers for the Pod. Use a k8s native syntax. + extraVolumeMounts: &extraVolumeMounts-spec + type: array + description: | + Additional volume mounts for the Container. Use a k8s native syntax. + extraVolumes: &extraVolumes-spec + type: array + description: | + Additional volumes for the Pod. Use a k8s native syntax. + livenessProbe: &probe-spec + type: object + additionalProperties: true + required: [enabled] + if: + properties: + enabled: + const: true + then: + description: | + This config option is like the k8s native specification of a + container probe, except that it also supports an `enabled` boolean + flag. + + See [the k8s + documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#probe-v1-core) + for more details. + readinessProbe: *probe-spec + namedServerLimitPerUser: + type: [integer, "null"] + description: *jupyterhub-native-config-description + redirectToServer: + type: [boolean, "null"] + description: *jupyterhub-native-config-description + resources: &resources-spec + type: object + additionalProperties: true + description: | + A k8s native specification of resources, see [the + documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core). + lifecycle: &lifecycle-spec + type: object + additionalProperties: false + description: | + A k8s native specification of lifecycle hooks on the container, see [the + documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#lifecycle-v1-core). + properties: + postStart: + type: object + additionalProperties: true + preStop: + type: object + additionalProperties: true + services: + type: object + additionalProperties: true + description: | + This is where you register JupyterHub services. For details on how to + configure these services in this Helm chart just keep reading but for + details on services themselves instead read [JupyterHub's + documentation](https://jupyterhub.readthedocs.io/en/stable/reference/api/service.html). + + ```{note} + Only a selection of JupyterHub's configuration options that can be + configured for a service are documented below. All configuration set + here will be applied even if this Helm chart doesn't recognize it. + ``` + + JupyterHub's native configuration accepts a list of service objects, + this Helm chart only accept a dictionary where each key represents the + name of a service and the value is the actual service objects. + + When configuring JupyterHub services via this Helm chart, the `name` + field can be omitted as it can be implied by the dictionary key. + Further, the `api_token` field can be omitted as it will be + automatically generated as of version 1.1.0 of this Helm chart. + + If you have an external service that needs to access the automatically + generated api_token for the service, you can access it from the `hub` + k8s Secret part of this Helm chart under the key + `hub.services.my-service-config-key.apiToken`. + + Here is an example configuration of two services where the first + explicitly sets a name and api_token, while the second omits those and + lets the name be implied from the key name and the api_token be + automatically generated. + + ```yaml + hub: + services: + my-service-1: + admin: true + name: my-explicitly-set-service-name + api_token: my-explicitly-set-api_token + + # the name of the following service will be my-service-2 + # the api_token of the following service will be generated + my-service-2: {} + ``` + + If you develop a Helm chart depending on the JupyterHub Helm chart and + want to let some Pod's environment variable be populated with the + api_token of a service registered like above, then do something along + these lines. + + ```yaml + # ... container specification of a pod ... + env: + - name: MY_SERVICE_1_API_TOKEN + valueFrom: + secretKeyRef: + # Don't hardcode the name, use the globally accessible + # named templates part of the JupyterHub Helm chart. + name: {{ include "jupyterhub.hub.fullname" . }} + # Note below the use of the configuration key my-service-1 + # rather than the explicitly set service name. + key: hub.services.my-service-1.apiToken + ``` + properties: + name: + type: string + description: | + The name can be implied via the key name under which this + service is configured, and is due to that allowed to be + omitted in this Helm chart configuration of JupyterHub. + admin: + type: boolean + command: + type: [string, array] + url: + type: string + api_token: + type: [string, "null"] + description: | + The api_token will be automatically generated if not + explicitly set. It will also be exposed in via a k8s Secret + part of this Helm chart under a specific key. + + See the documentation under + [`hub.services`](schema_hub.services) for details about this. + apiToken: + type: [string, "null"] + description: | + An alias for api_token provided for backward compatibility by + the JupyterHub Helm chart that will be transformed to + api_token. + loadRoles: + type: object + additionalProperties: true + description: | + This is where you should define JupyterHub roles and apply them to + JupyterHub users, groups, and services to grant them additional + permissions as defined in JupyterHub's RBAC system. + + Complement this documentation with [JupyterHub's + documentation](https://jupyterhub.readthedocs.io/en/stable/rbac/roles.html#defining-roles) + about `load_roles`. + + Note that while JupyterHub's native configuration `load_roles` accepts + a list of role objects, this Helm chart only accepts a dictionary where + each key represents the name of a role and the value is the actual + role object. + + ```yaml + hub: + loadRoles: + teacher: + description: Access to users' information and group membership + + # this role provides permissions to... + scopes: [users, groups] + + # this role will be assigned to... + users: [erik] + services: [grading-service] + groups: [teachers] + ``` + + When configuring JupyterHub roles via this Helm chart, the `name` + field can be omitted as it can be implied by the dictionary key. + shutdownOnLogout: + type: [boolean, "null"] + description: *jupyterhub-native-config-description + templatePaths: + type: array + description: *jupyterhub-native-config-description + templateVars: + type: object + additionalProperties: true + description: *jupyterhub-native-config-description + serviceAccount: &serviceAccount + type: object + required: [create] + additionalProperties: false + description: | + Configuration for a k8s ServiceAccount dedicated for use by the + specific pod which this configuration is nested under. + properties: + create: + type: boolean + description: | + Whether or not to create the `ServiceAccount` resource. + name: + type: ["string", "null"] + description: | + This configuration serves multiple purposes: + + - It will be the `serviceAccountName` referenced by related Pods. + - If `create` is set, the created ServiceAccount resource will be named like this. + - If [`rbac.create`](schema_rbac.create) is set, the associated (Cluster)RoleBindings will bind to this name. + + If not explicitly provided, a default name will be used. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Kubernetes annotations to apply to the k8s ServiceAccount. + extraPodSpec: &extraPodSpec-spec + type: object + additionalProperties: true + description: | + Arbitrary extra k8s pod specification as a YAML object. The default + value of this setting is an empty object, i.e. no extra configuration. + The value of this property is augmented to the pod specification as-is. + + This is a powerful tool for expert k8s administrators with advanced + configuration requirements. This setting should only be used for + configuration that cannot be accomplished through the other settings. + Misusing this setting can break your deployment and/or compromise + your system security. + + This is one of four related settings for inserting arbitrary pod + specification: + + 1. hub.extraPodSpec + 2. proxy.chp.extraPodSpec + 3. proxy.traefik.extraPodSpec + 4. scheduling.userScheduler.extraPodSpec + + One real-world use of these settings is to enable host networking. For + example, to configure host networking for the hub pod, add the + following to your helm configuration values: + + ```yaml + hub: + extraPodSpec: + hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet + ``` + + Likewise, to configure host networking for the proxy pod, add the + following: + + ```yaml + proxy: + chp: + extraPodSpec: + hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet + ``` + + N.B. Host networking has special security implications and can easily + break your deployment. This is an example—not an endorsement. + + See [PodSpec](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#PodSpec) + for the latest pod resource specification. + + proxy: + type: object + additionalProperties: false + properties: + chp: + type: object + additionalProperties: false + description: | + Configure the configurable-http-proxy (chp) pod managed by jupyterhub to route traffic + both to itself and to user pods. + properties: + revisionHistoryLimit: *revisionHistoryLimit + networkPolicy: *networkPolicy-spec + extraCommandLineFlags: + type: array + description: | + A list of strings to be added as command line options when + starting + [configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy#command-line-options) + that will be expanded with Helm's template function `tpl` which + can render Helm template logic inside curly braces (`{{ ... }}`). + + ```yaml + proxy: + chp: + extraCommandLineFlags: + - "--auto-rewrite" + - "--custom-header {{ .Values.myCustomStuff }}" + ``` + + Note that these will be appended last, and if you provide the same + flag twice, the last flag will be used, which mean you can + override the default flag values as well. + extraEnv: + type: [object, array] + additionalProperties: true + description: | + Extra environment variables that should be set for the chp pod. + + Environment variables are usually used here to: + - override HUB_SERVICE_PORT or HUB_SERVICE_HOST default values + - set CONFIGPROXY_SSL_KEY_PASSPHRASE for setting passphrase of SSL keys + + String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which + is a part of Kubernetes. + + ```yaml + proxy: + chp: + extraEnv: + # basic notation (for literal values only) + MY_ENV_VARS_NAME1: "my env var value 1" + + # explicit notation (the "name" field takes precedence) + CHP_NAMESPACE: + name: CHP_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + + # implicit notation (the "name" field is implied) + PREFIXED_CHP_NAMESPACE: + value: "my-prefix-$(CHP_NAMESPACE)" + SECRET_VALUE: + valueFrom: + secretKeyRef: + name: my-k8s-secret + key: password + ``` + + For more information, see the [Kubernetes EnvVar + specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). + pdb: *pdb-spec + nodeSelector: *nodeSelector-spec + tolerations: *tolerations-spec + containerSecurityContext: *containerSecurityContext-spec + image: *image-spec + livenessProbe: *probe-spec + readinessProbe: *probe-spec + resources: *resources-spec + defaultTarget: + type: [string, "null"] + description: | + Override the URL for the default routing target for the proxy. + Defaults to JupyterHub itself. + This will generally only have an effect while JupyterHub is not running, + as JupyterHub adds itself as the default target after it starts. + errorTarget: + type: [string, "null"] + description: | + Override the URL for the error target for the proxy. + Defaults to JupyterHub itself. + Useful to reduce load on the Hub + or produce more informative error messages than the Hub's default, + e.g. in highly customized deployments such as BinderHub. + See Configurable HTTP Proxy for details on implementing an error target. + extraPodSpec: *extraPodSpec-spec + secretToken: + type: [string, "null"] + description: | + ```{note} + As of version 1.0.0 this will automatically be generated and there is + no need to set it manually. + + If you wish to reset a generated key, you can use `kubectl edit` on + the k8s Secret typically named `hub` and remove the + `hub.config.ConfigurableHTTPProxy.auth_token` entry in the k8s Secret, + then perform a new `helm upgrade`. + ``` + + A 32-byte cryptographically secure randomly generated string used to + secure communications between the hub pod and the proxy pod running a + [configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy) + instance. + + ```sh + # to generate a value, run + openssl rand -hex 32 + ``` + + Changing this value will cause the proxy and hub pods to restart. It is good security + practice to rotate these values over time. If this secret leaks, *immediately* change + it to something else, or user data can be compromised. + service: + type: object + additionalProperties: false + description: | + Configuration of the k8s Service `proxy-public` which either will + point to the `autohttps` pod running Traefik for TLS termination, or + the `proxy` pod running ConfigurableHTTPProxy. Incoming traffic from + users on the internet should always go through this k8s Service. + + When this service targets the `autohttps` pod which then routes to the + `proxy` pod, a k8s Service named `proxy-http` will be added targeting + the `proxy` pod and only accepting HTTP traffic on port 80. + properties: + type: + enum: [ClusterIP, NodePort, LoadBalancer, ExternalName] + description: | + Default `LoadBalancer`. + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types) + to learn more about service types. + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra labels to add to the proxy service. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + to learn more about labels. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Annotations to apply to the service that is exposing the proxy. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) + for more details about annotations. + nodePorts: + type: object + additionalProperties: false + description: | + Object to set NodePorts to expose the service on for http and https. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport) + for more details about NodePorts. + properties: + http: + type: [integer, "null"] + description: | + The HTTP port the proxy-public service should be exposed on. + https: + type: [integer, "null"] + description: | + The HTTPS port the proxy-public service should be exposed on. + disableHttpPort: + type: boolean + description: | + Default `false`. + + If `true`, port 80 for incoming HTTP traffic will no longer be exposed. This should not be used with `proxy.https.type=letsencrypt` or `proxy.https.enabled=false` as it would remove the only exposed port. + extraPorts: + type: array + description: | + Extra ports the k8s Service should accept incoming traffic on, + which will be redirected to either the `autohttps` pod (treafik) + or the `proxy` pod (chp). + + See [the Kubernetes + documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#serviceport-v1-core) + for the structure of the items in this list. + loadBalancerIP: + type: [string, "null"] + description: | + The public IP address the proxy-public Kubernetes service should + be exposed on. This entry will end up at the configurable proxy + server that JupyterHub manages, which will direct traffic to user + pods at the `/user` path and the hub pod at the `/hub` path. + + Set this if you want to use a fixed external IP address instead of + a dynamically acquired one. This is relevant if you have a domain + name that you want to point to a specific IP and want to ensure it + doesn't change. + loadBalancerSourceRanges: + type: array + description: | + A list of IP CIDR ranges that are allowed to access the load balancer service. + Defaults to allowing everyone to access it. + https: + type: object + additionalProperties: false + description: | + Object for customizing the settings for HTTPS used by the JupyterHub's proxy. + For more information on configuring HTTPS for your JupyterHub, see the [HTTPS section in our security guide](https) + properties: + enabled: + type: [boolean, "null"] + description: | + Indicator to set whether HTTPS should be enabled or not on the proxy. Defaults to `true` if the https object is provided. + type: + enum: [null, "", letsencrypt, manual, offload, secret] + description: | + The type of HTTPS encryption that is used. + Decides on which ports and network policies are used for communication via HTTPS. Setting this to `secret` sets the type to manual HTTPS with a secret that has to be provided in the `https.secret` object. + Defaults to `letsencrypt`. + letsencrypt: + type: object + additionalProperties: false + properties: + contactEmail: + type: [string, "null"] + description: | + The contact email to be used for automatically provisioned HTTPS certificates by Let's Encrypt. For more information see [Set up automatic HTTPS](setup-automatic-https). + Required for automatic HTTPS. + acmeServer: + type: [string, "null"] + description: | + Let's Encrypt is one of various ACME servers that can provide + a certificate, and by default their production server is used. + + Let's Encrypt staging: https://acme-staging-v02.api.letsencrypt.org/directory + Let's Encrypt production: acmeServer: https://acme-v02.api.letsencrypt.org/directory + manual: + type: object + additionalProperties: false + description: | + Object for providing own certificates for manual HTTPS configuration. To be provided when setting `https.type` to `manual`. + See [Set up manual HTTPS](setup-manual-https) + properties: + key: + type: [string, "null"] + description: | + The RSA private key to be used for HTTPS. + To be provided in the form of + + ``` + key: | + -----BEGIN RSA PRIVATE KEY----- + ... + -----END RSA PRIVATE KEY----- + ``` + cert: + type: [string, "null"] + description: | + The certificate to be used for HTTPS. + To be provided in the form of + + ``` + cert: | + -----BEGIN CERTIFICATE----- + ... + -----END CERTIFICATE----- + ``` + secret: + type: object + additionalProperties: false + description: | + Secret to be provided when setting `https.type` to `secret`. + properties: + name: + type: [string, "null"] + description: | + Name of the secret + key: + type: [string, "null"] + description: | + Path to the private key to be used for HTTPS. + Example: `'tls.key'` + crt: + type: [string, "null"] + description: | + Path to the certificate to be used for HTTPS. + Example: `'tls.crt'` + hosts: + type: array + description: | + You domain in list form. + Required for automatic HTTPS. See [Set up automatic HTTPS](setup-automatic-https). + To be provided like: + ``` + hosts: + - + ``` + traefik: + type: object + additionalProperties: false + description: | + Configure the traefik proxy used to terminate TLS when 'autohttps' is enabled + properties: + revisionHistoryLimit: *revisionHistoryLimit + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra labels to add to the traefik pod. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + to learn more about labels. + networkPolicy: *networkPolicy-spec + extraInitContainers: + type: array + description: | + list of extraInitContainers to be run with traefik pod, after the containers set in the chart. See [Kubernetes Docs](https://kubernetes.io/docs/concepts/workloads/pods/init-containers/) + + ```yaml + proxy: + traefik: + extraInitContainers: + - name: init-myservice + image: busybox:1.28 + command: ['sh', '-c', 'command1'] + - name: init-mydb + image: busybox:1.28 + command: ['sh', '-c', 'command2'] + ``` + extraEnv: + type: [object, array] + additionalProperties: true + description: | + Extra environment variables that should be set for the traefik pod. + + Environment Variables here may be used to configure traefik. + + String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which + is a part of Kubernetes. + + ```yaml + proxy: + traefik: + extraEnv: + # basic notation (for literal values only) + MY_ENV_VARS_NAME1: "my env var value 1" + + # explicit notation (the "name" field takes precedence) + TRAEFIK_NAMESPACE: + name: TRAEFIK_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + + # implicit notation (the "name" field is implied) + PREFIXED_TRAEFIK_NAMESPACE: + value: "my-prefix-$(TRAEFIK_NAMESPACE)" + SECRET_VALUE: + valueFrom: + secretKeyRef: + name: my-k8s-secret + key: password + ``` + + For more information, see the [Kubernetes EnvVar + specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). + pdb: *pdb-spec + nodeSelector: *nodeSelector-spec + tolerations: *tolerations-spec + containerSecurityContext: *containerSecurityContext-spec + extraDynamicConfig: + type: object + additionalProperties: true + description: | + This refers to traefik's post-startup configuration. + + This Helm chart already provide such configuration, so this is a + place where you can merge in additional configuration. If you are + about to use this configuration, you may want to inspect the + default configuration declared + [here](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/blob/HEAD/jupyterhub/templates/proxy/autohttps/_configmap-dynamic.yaml). + extraPorts: + type: array + description: | + Extra ports for the traefik container within the autohttps pod + that you would like to expose, formatted in a k8s native way. + extraStaticConfig: + type: object + additionalProperties: true + description: | + This refers to traefik's startup configuration. + + This Helm chart already provide such configuration, so this is a + place where you can merge in additional configuration. If you are + about to use this configuration, you may want to inspect the + default configuration declared + [here](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/blob/HEAD/jupyterhub/templates/proxy/autohttps/_configmap-traefik.yaml). + extraVolumes: *extraVolumes-spec + extraVolumeMounts: *extraVolumeMounts-spec + hsts: + type: object + additionalProperties: false + required: [includeSubdomains, maxAge, preload] + description: | + This section regards a HTTP Strict-Transport-Security (HSTS) + response header. It can act as a request for a visiting web + browsers to enforce HTTPS on their end in for a given time into + the future, and optionally also for future requests to subdomains. + + These settings relate to traefik configuration which we use as a + TLS termination proxy. + + See [Mozilla's + documentation](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Strict-Transport-Security) + for more information. + properties: + includeSubdomains: + type: boolean + maxAge: + type: integer + preload: + type: boolean + image: *image-spec + resources: *resources-spec + serviceAccount: *serviceAccount + extraPodSpec: *extraPodSpec-spec + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + K8s labels for the proxy pod. + + ```{note} + For consistency, this should really be located under + proxy.chp.labels but isn't for historical reasons. + ``` + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + K8s annotations for the proxy pod. + + ```{note} + For consistency, this should really be located under + proxy.chp.annotations but isn't for historical reasons. + ``` + deploymentStrategy: + type: object + additionalProperties: false + properties: + rollingUpdate: + type: [string, "null"] + type: + type: [string, "null"] + description: | + While the proxy pod running + [configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy) + could run in parallel, two instances running in parallel wouldn't + both receive updates from JupyterHub regarding how it should route + traffic. Due to this we default to using a deployment strategy of + Recreate instead of RollingUpdate. + secretSync: + type: object + additionalProperties: false + description: | + This configuration section refers to configuration of the sidecar + container in the autohttps pod running next to its traefik container + responsible for TLS termination. + + The purpose of this container is to store away and load TLS + certificates from a k8s Secret. The TLS certificates are acquired by + the ACME client (LEGO) that is running within the traefik container, + where traefik is using them for TLS termination. + properties: + containerSecurityContext: *containerSecurityContext-spec + image: *image-spec + resources: *resources-spec + + singleuser: + type: object + additionalProperties: false + description: | + Options for customizing the environment that is provided to the users after they log in. + properties: + networkPolicy: *networkPolicy-spec + podNameTemplate: + type: [string, "null"] + description: | + Passthrough configuration for + [KubeSpawner.pod_name_template](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.pod_name_template). + cpu: + type: object + additionalProperties: false + description: | + Set CPU limits & guarantees that are enforced for each user. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/) + for more info. + properties: + limit: + type: [number, "null"] + guarantee: + type: [number, "null"] + memory: + type: object + additionalProperties: false + description: | + Set Memory limits & guarantees that are enforced for each user. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/) + for more info. + properties: + limit: + type: [number, string, "null"] + guarantee: + type: [number, string, "null"] + description: | + Note that this field is referred to as *requests* by the Kubernetes API. + image: *image-spec + initContainers: + type: array + description: | + list of initContainers to be run every singleuser pod. See [Kubernetes Docs](https://kubernetes.io/docs/concepts/workloads/pods/init-containers/) + + ```yaml + singleuser: + initContainers: + - name: init-myservice + image: busybox:1.28 + command: ['sh', '-c', 'command1'] + - name: init-mydb + image: busybox:1.28 + command: ['sh', '-c', 'command2'] + ``` + profileList: + type: array + description: | + For more information about the profile list, see [KubeSpawner's + documentation](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner) + as this is simply a passthrough to that configuration. + + ```{note} + The image-pullers are aware of the overrides of images in + `singleuser.profileList` but they won't be if you configure it in + JupyterHub's configuration of '`c.KubeSpawner.profile_list`. + ``` + + ```yaml + singleuser: + profileList: + - display_name: "Default: Shared, 8 CPU cores" + description: "Your code will run on a shared machine with CPU only." + default: True + - display_name: "Personal, 4 CPU cores & 26GB RAM, 1 NVIDIA Tesla K80 GPU" + description: "Your code will run a personal machine with a GPU." + kubespawner_override: + extra_resource_limits: + nvidia.com/gpu: "1" + ``` + extraFiles: *extraFiles + extraEnv: + type: [object, array] + additionalProperties: true + description: | + Extra environment variables that should be set for the user pods. + + String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which + is a part of Kubernetes. Note that the user pods will already have + access to a set of environment variables that you can use, like + `JUPYTERHUB_USER` and `JUPYTERHUB_HOST`. For more information about these + inspect [this source + code](https://github.com/jupyterhub/jupyterhub/blob/cc8e7806530466dce8968567d1bbd2b39a7afa26/jupyterhub/spawner.py#L763). + + ```yaml + singleuser: + extraEnv: + # basic notation (for literal values only) + MY_ENV_VARS_NAME1: "my env var value 1" + + # explicit notation (the "name" field takes precedence) + USER_NAMESPACE: + name: USER_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + + # implicit notation (the "name" field is implied) + PREFIXED_USER_NAMESPACE: + value: "my-prefix-$(USER_NAMESPACE)" + SECRET_VALUE: + valueFrom: + secretKeyRef: + name: my-k8s-secret + key: password + ``` + + For more information, see the [Kubernetes EnvVar + specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). + nodeSelector: *nodeSelector-spec + extraTolerations: *tolerations-spec + extraNodeAffinity: + type: object + additionalProperties: false + description: | + Affinities describe where pods prefer or require to be scheduled, they + may prefer or require a node where they are to be scheduled to have a + certain label (node affinity). They may also require to be scheduled + in proximity or with a lack of proximity to another pod (pod affinity + and anti pod affinity). + + See the [Kubernetes + docs](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/) + for more info. + properties: + required: + type: array + description: | + Pass this field an array of + [`NodeSelectorTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#nodeselectorterm-v1-core) + objects. + preferred: + type: array + description: | + Pass this field an array of + [`PreferredSchedulingTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#preferredschedulingterm-v1-core) + objects. + extraPodAffinity: + type: object + additionalProperties: false + description: | + See the description of `singleuser.extraNodeAffinity`. + properties: + required: + type: array + description: | + Pass this field an array of + [`PodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#podaffinityterm-v1-core) + objects. + preferred: + type: array + description: | + Pass this field an array of + [`WeightedPodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#weightedpodaffinityterm-v1-core) + objects. + extraPodAntiAffinity: + type: object + additionalProperties: false + description: | + See the description of `singleuser.extraNodeAffinity`. + properties: + required: + type: array + description: | + Pass this field an array of + [`PodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#podaffinityterm-v1-core) + objects. + preferred: + type: array + description: | + Pass this field an array of + [`WeightedPodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#weightedpodaffinityterm-v1-core) + objects. + cloudMetadata: + type: object + additionalProperties: false + required: [blockWithIptables, ip] + description: | + Please refer to dedicated section in [the Helm chart + documentation](block-metadata-iptables) for more information about + this. + properties: + blockWithIptables: + type: boolean + ip: + type: string + + cmd: + type: [array, string, "null"] + description: | + Passthrough configuration for + [KubeSpawner.cmd](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.cmd). + The default is "jupyterhub-singleuser". + Use `cmd: null` to launch a custom CMD from the image, + which must launch jupyterhub-singleuser or an equivalent process eventually. + For example: Jupyter's docker-stacks images. + defaultUrl: + type: [string, "null"] + description: | + Passthrough configuration for + [KubeSpawner.default_url](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.default_url). + # FIXME: name mismatch, named events_enabled in kubespawner + events: + type: [boolean, "null"] + description: | + Passthrough configuration for + [KubeSpawner.events_enabled](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.events_enabled). + extraAnnotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Passthrough configuration for + [KubeSpawner.extra_annotations](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_annotations). + extraContainers: + type: array + description: | + Passthrough configuration for + [KubeSpawner.extra_containers](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_containers). + extraLabels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Passthrough configuration for + [KubeSpawner.extra_labels](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_labels). + extraPodConfig: + type: object + additionalProperties: true + description: | + Passthrough configuration for + [KubeSpawner.extra_pod_config](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_pod_config). + extraResource: + type: object + additionalProperties: false + properties: + # FIXME: name mismatch, named extra_resource_guarantees in kubespawner + guarantees: + type: object + additionalProperties: true + description: | + Passthrough configuration for + [KubeSpawner.extra_resource_guarantees](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_resource_guarantees). + # FIXME: name mismatch, named extra_resource_limits in kubespawner + limits: + type: object + additionalProperties: true + description: | + Passthrough configuration for + [KubeSpawner.extra_resource_limits](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_resource_limits). + fsGid: + type: [integer, "null"] + description: | + Passthrough configuration for + [KubeSpawner.fs_gid](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.fs_gid). + lifecycleHooks: + type: object + additionalProperties: false + description: | + Passthrough configuration for + [KubeSpawner.lifecycle_hooks](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.lifecycle_hooks). + properties: + postStart: + type: object + additionalProperties: true + preStop: + type: object + additionalProperties: true + networkTools: + type: object + additionalProperties: false + description: | + This configuration section refers to configuration of a conditionally + created initContainer for the user pods with a purpose to block a + specific IP address. + + This initContainer will be created if + [`singleuser.cloudMetadata.blockWithIptables`](schema_singleuser.cloudMetadata.blockWithIptables) + is set to true. + properties: + image: *image-spec + resources: *resources-spec + # FIXME: name mismatch, named service_account in kubespawner + serviceAccountName: + type: [string, "null"] + description: | + Passthrough configuration for + [KubeSpawner.service_account](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.service_account). + startTimeout: + type: [integer, "null"] + description: | + Passthrough configuration for + [KubeSpawner.start_timeout](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.start_timeout). + storage: + type: object + additionalProperties: false + required: [type, homeMountPath] + description: | + This section configures KubeSpawner directly to some extent but also + indirectly through Helm chart specific configuration options such as + [`singleuser.storage.type`](schema_singleuser.storage.type). + properties: + capacity: + type: [string, "null"] + description: | + Configures `KubeSpawner.storage_capacity`. + + See the [KubeSpawner + documentation](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html) + for more information. + dynamic: + type: object + additionalProperties: false + properties: + pvcNameTemplate: + type: [string, "null"] + description: | + Configures `KubeSpawner.pvc_name_template` which will be the + resource name of the PVC created by KubeSpawner for each user + if needed. + storageAccessModes: + type: array + items: + type: [string, "null"] + description: | + Configures `KubeSpawner.storage_access_modes`. + + See KubeSpawners documentation and [the k8s + documentation](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes) + for more information. + storageClass: + type: [string, "null"] + description: | + Configures `KubeSpawner.storage_class`, which can be an + explicit StorageClass to dynamically provision storage for the + PVC that KubeSpawner will create. + + There is of a default StorageClass available in k8s clusters + for use if this is unspecified. + volumeNameTemplate: + type: [string, "null"] + description: | + Configures `KubeSpawner.volume_name_template`, which is the + name to reference from the containers volumeMounts section. + extraLabels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Configures `KubeSpawner.storage_extra_labels`. Note that these + labels are set on the PVC during creation only and won't be + updated after creation. + extraVolumeMounts: *extraVolumeMounts-spec + extraVolumes: *extraVolumes-spec + homeMountPath: + type: string + description: | + The location within the container where the home folder storage + should be mounted. + static: + type: object + additionalProperties: false + properties: + pvcName: + type: [string, "null"] + description: | + Configures `KubeSpawner.pvc_claim_name` to reference + pre-existing storage. + subPath: + type: [string, "null"] + description: | + Configures the `subPath` field of a + `KubeSpawner.volume_mounts` entry added by the Helm chart. + + Path within the volume from which the container's volume + should be mounted. + type: + enum: [dynamic, static, none] + description: | + Decide if you want storage to be provisioned dynamically + (dynamic), or if you want to attach existing storage (static), or + don't want any storage to be attached (none). + allowPrivilegeEscalation: + type: [boolean, "null"] + description: | + Passthrough configuration for + [KubeSpawner.allow_privilege_escalation](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.allow_privilege_escalation). + uid: + type: [integer, "null"] + description: | + Passthrough configuration for + [KubeSpawner.uid](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.uid). + + This dictates as what user the main container will start up as. + + As an example of when this is needed, consider if you want to enable + sudo rights for some of your users. This can be done by starting up as + root, enabling it from the container in a startup script, and then + transitioning to the normal user. + + Default is 1000, set to null to use the container's default. + + scheduling: + type: object + additionalProperties: false + description: | + Objects for customizing the scheduling of various pods on the nodes and + related labels. + properties: + userScheduler: + type: object + additionalProperties: false + required: [enabled, plugins, pluginConfig, logLevel] + description: | + The user scheduler is making sure that user pods are scheduled + tight on nodes, this is useful for autoscaling of user node pools. + properties: + enabled: + type: boolean + description: | + Enables the user scheduler. + revisionHistoryLimit: *revisionHistoryLimit + replicas: + type: integer + description: | + You can have multiple schedulers to share the workload or improve + availability on node failure. + image: *image-spec + pdb: *pdb-spec + nodeSelector: *nodeSelector-spec + tolerations: *tolerations-spec + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra labels to add to the userScheduler pods. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + to learn more about labels. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra annotations to add to the user-scheduler pods. + containerSecurityContext: *containerSecurityContext-spec + logLevel: + type: integer + description: | + Corresponds to the verbosity level of logging made by the + kube-scheduler binary running within the user-scheduler pod. + plugins: + type: object + additionalProperties: true + description: | + These plugins refers to kube-scheduler plugins as documented + [here](https://kubernetes.io/docs/reference/scheduling/config/). + + The user-scheduler is really just a kube-scheduler configured in a + way to pack users tight on nodes using these plugins. See + values.yaml for information about the default plugins. + pluginConfig: + type: array + description: | + Individually activated plugins can be configured further. + resources: *resources-spec + serviceAccount: *serviceAccount + extraPodSpec: *extraPodSpec-spec + podPriority: + type: object + additionalProperties: false + description: | + Pod Priority is used to allow real users evict user placeholder pods + that in turn by entering a Pending state can trigger a scale up by a + cluster autoscaler. + + Having this option enabled only make sense if the following conditions + are met: + + 1. A cluster autoscaler is installed. + 2. user-placeholer pods are configured to have a priority equal or + higher than the cluster autoscaler's "priority cutoff" so that the + cluster autoscaler scales up a node in advance for a pending user + placeholder pod. + 3. Normal user pods have a higher priority than the user-placeholder + pods. + 4. Image puller pods have a priority between normal user pods and + user-placeholder pods. + + Note that if the default priority cutoff if not configured on cluster + autoscaler, it will currently default to 0, and that in the future + this is meant to be lowered. If your cloud provider is installing the + cluster autoscaler for you, they may also configure this specifically. + + Recommended settings for a cluster autoscaler... + + ... with a priority cutoff of -10 (GKE): + + ```yaml + podPriority: + enabled: true + globalDefault: false + defaultPriority: 0 + imagePullerPriority: -5 + userPlaceholderPriority: -10 + ``` + + ... with a priority cutoff of 0: + + ```yaml + podPriority: + enabled: true + globalDefault: true + defaultPriority: 10 + imagePullerPriority: 5 + userPlaceholderPriority: 0 + ``` + properties: + enabled: + type: boolean + globalDefault: + type: boolean + description: | + Warning! This will influence all pods in the cluster. + + The priority a pod usually get is 0. But this can be overridden + with a PriorityClass resource if it is declared to be the global + default. This configuration option allows for the creation of such + global default. + defaultPriority: + type: integer + description: | + The actual value for the default pod priority. + imagePullerPriority: + type: integer + description: | + The actual value for the [hook|continuous]-image-puller pods' priority. + userPlaceholderPriority: + type: integer + description: | + The actual value for the user-placeholder pods' priority. + userPlaceholder: + type: object + additionalProperties: false + description: | + User placeholders simulate users but will thanks to PodPriority be + evicted by the cluster autoscaler if a real user shows up. In this way + placeholders allow you to create a headroom for the real users and + reduce the risk of a user having to wait for a node to be added. Be + sure to use the the continuous image puller as well along with + placeholders, so the images are also available when real users arrive. + + To test your setup efficiently, you can adjust the amount of user + placeholders with the following command: + ```sh + # Configure to have 3 user placeholders + kubectl scale sts/user-placeholder --replicas=3 + ``` + properties: + enabled: + type: boolean + image: *image-spec + revisionHistoryLimit: *revisionHistoryLimit + replicas: + type: integer + description: | + How many placeholder pods would you like to have? + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra labels to add to the userPlaceholder pods. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + to learn more about labels. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra annotations to add to the placeholder pods. + resources: + type: object + additionalProperties: true + description: | + Unless specified here, the placeholder pods will request the same + resources specified for the real singleuser pods. + containerSecurityContext: *containerSecurityContext-spec + corePods: + type: object + additionalProperties: false + description: | + These settings influence the core pods like the hub, proxy and + user-scheduler pods. + These settings influence all pods considered core pods, namely: + + - hub + - proxy + - autohttps + - hook-image-awaiter + - user-scheduler + + By defaults, the tolerations are: + + - hub.jupyter.org/dedicated=core:NoSchedule + - hub.jupyter.org_dedicated=core:NoSchedule + + Note that tolerations set here are combined with the respective + components dedicated tolerations, and that `_` is available in case + `/` isn't allowed in the clouds tolerations. + properties: + tolerations: *tolerations-spec + nodeAffinity: + type: object + additionalProperties: false + description: | + Where should pods be scheduled? Perhaps on nodes with a certain + label is preferred or even required? + properties: + matchNodePurpose: + enum: [ignore, prefer, require] + description: | + Decide if core pods *ignore*, *prefer* or *require* to + schedule on nodes with this label: + ``` + hub.jupyter.org/node-purpose=core + ``` + userPods: + type: object + additionalProperties: false + description: | + These settings influence all pods considered user pods, namely: + + - user-placeholder + - hook-image-puller + - continuous-image-puller + - jupyter- + + By defaults, the tolerations are: + + - hub.jupyter.org/dedicated=core:NoSchedule + - hub.jupyter.org_dedicated=core:NoSchedule + + Note that tolerations set here are combined with the respective + components dedicated tolerations, and that `_` is available in case + `/` isn't allowed in the clouds tolerations. + properties: + tolerations: *tolerations-spec + nodeAffinity: + type: object + additionalProperties: false + description: | + Where should pods be scheduled? Perhaps on nodes with a certain + label is preferred or even required? + properties: + matchNodePurpose: + enum: [ignore, prefer, require] + description: | + Decide if user pods *ignore*, *prefer* or *require* to + schedule on nodes with this label: + ``` + hub.jupyter.org/node-purpose=user + ``` + + ingress: + type: object + additionalProperties: false + required: [enabled] + properties: + enabled: + type: boolean + description: | + Enable the creation of a Kubernetes Ingress to proxy-public service. + + See [Advanced Topics — Zero to JupyterHub with Kubernetes + 0.7.0 documentation](ingress) + for more details. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Annotations to apply to the Ingress resource. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) + for more details about annotations. + ingressClassName: + type: [string, "null"] + description: | + Maps directly to the Ingress resource's `spec.ingressClassName``. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/services-networking/ingress/#ingress-class) + for more details. + hosts: + type: array + description: | + List of hosts to route requests to the proxy. + pathSuffix: + type: [string, "null"] + description: | + Suffix added to Ingress's routing path pattern. + + Specify `*` if your ingress matches path by glob pattern. + pathType: + enum: [Prefix, Exact, ImplementationSpecific] + description: | + The path type to use. The default value is 'Prefix'. + + See [the Kubernetes documentation](https://kubernetes.io/docs/concepts/services-networking/ingress/#path-types) + for more details about path types. + tls: + type: array + description: | + TLS configurations for Ingress. + + See [the Kubernetes + documentation](https://kubernetes.io/docs/concepts/services-networking/ingress/#tls) + for more details about annotations. + + prePuller: + type: object + additionalProperties: false + required: [hook, continuous] + properties: + revisionHistoryLimit: *revisionHistoryLimit + labels: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Extra labels to add to the pre puller job pods. + + See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) + to learn more about labels. + annotations: + type: object + additionalProperties: false + patternProperties: *labels-and-annotations-patternProperties + description: | + Annotations to apply to the hook and continous image puller pods. One example use case is to + disable istio sidecars which could interfere with the image pulling. + resources: + type: object + additionalProperties: true + description: | + These are standard Kubernetes resources with requests and limits for + cpu and memory. They will be used on the containers in the pods + pulling images. These should be set extremely low as the containers + shut down directly or is a pause container that just idles. + + They were made configurable as usage of ResourceQuota may require + containers in the namespace to have explicit resources set. + extraTolerations: *tolerations-spec + hook: + type: object + additionalProperties: false + required: [enabled] + description: | + See the [*optimization + section*](pulling-images-before-users-arrive) + for more details. + properties: + enabled: + type: boolean + pullOnlyOnChanges: + type: boolean + description: | + Pull only if changes have been made to the images to pull, or more + accurately if the hook-image-puller daemonset has changed in any + way. + podSchedulingWaitDuration: + description: | + The `hook-image-awaiter` has a criteria to await all the + `hook-image-puller` DaemonSet's pods to both schedule and finish + their image pulling. This flag can be used to relax this criteria + to instead only await the pods that _has already scheduled_ to + finish image pulling after a certain duration. + + The value of this is that sometimes the newly created + `hook-image-puller` pods cannot be scheduled because nodes are + full, and then it probably won't make sense to block a `helm + upgrade`. + + An infinite duration to wait for pods to schedule can be + represented by `-1`. This was the default behavior of version + 0.9.0 and earlier. + type: integer + nodeSelector: *nodeSelector-spec + tolerations: *tolerations-spec + containerSecurityContext: *containerSecurityContext-spec + image: *image-spec + resources: *resources-spec + serviceAccount: *serviceAccount + continuous: + type: object + additionalProperties: false + required: [enabled] + description: | + See the [*optimization + section*](pulling-images-before-users-arrive) + for more details. + + ```{note} + If used with a Cluster Autoscaler (an autoscaling node pool), also add + user-placeholders and enable pod priority. + ``` + properties: + enabled: + type: boolean + pullProfileListImages: + type: boolean + description: | + The singleuser.profileList configuration can provide a selection of + images. This option determines if all images identified there should + be pulled, both by the hook and continuous pullers. + + Images are looked for under `kubespawner_override`, and also + `profile_options.choices.kubespawner_override` since version 3.2.0. + + The reason to disable this, is that if you have for example 10 images + which start pulling in order from 1 to 10, a user that arrives and + wants to start a pod with image number 10 will need to wait for all + images to be pulled, and then it may be preferable to just let the + user arriving wait for a single image to be pulled on arrival. + extraImages: + type: object + additionalProperties: false + description: | + See the [*optimization section*](images-that-will-be-pulled) for more + details. + + ```yaml + prePuller: + extraImages: + my-extra-image-i-want-pulled: + name: jupyter/all-spark-notebook + tag: 2343e33dec46 + ``` + patternProperties: + ".*": + type: object + additionalProperties: false + required: [name, tag] + properties: + name: + type: string + tag: + type: string + containerSecurityContext: *containerSecurityContext-spec + pause: + type: object + additionalProperties: false + description: | + The image-puller pods rely on initContainer to pull all images, and + their actual container when they are done is just running a `pause` + container. These are settings for that pause container. + properties: + containerSecurityContext: *containerSecurityContext-spec + image: *image-spec + + custom: + type: object + additionalProperties: true + description: | + Additional values to pass to the Hub. + JupyterHub will not itself look at these, + but you can read values in your own custom config via `hub.extraConfig`. + For example: + + ```yaml + custom: + myHost: "https://example.horse" + hub: + extraConfig: + myConfig.py: | + c.MyAuthenticator.host = get_config("custom.myHost") + ``` + + cull: + type: object + additionalProperties: false + required: [enabled] + description: | + The + [jupyterhub-idle-culler](https://github.com/jupyterhub/jupyterhub-idle-culler) + can run as a JupyterHub managed service to _cull_ running servers. + properties: + enabled: + type: boolean + description: | + Enable/disable use of jupyter-idle-culler. + users: + type: [boolean, "null"] + description: See the `--cull-users` flag. + adminUsers: + type: [boolean, "null"] + description: See the `--cull-admin-users` flag. + removeNamedServers: + type: [boolean, "null"] + description: See the `--remove-named-servers` flag. + timeout: + type: [integer, "null"] + description: See the `--timeout` flag. + every: + type: [integer, "null"] + description: See the `--cull-every` flag. + concurrency: + type: [integer, "null"] + description: See the `--concurrency` flag. + maxAge: + type: [integer, "null"] + description: See the `--max-age` flag. + + debug: + type: object + additionalProperties: false + required: [enabled] + properties: + enabled: + type: boolean + description: | + Increases the loglevel throughout the resources in the Helm chart. + + rbac: + type: object + additionalProperties: false + required: [create] + properties: + enabled: + type: boolean + # This schema entry is needed to help us print a more helpful error + # message in NOTES.txt if hub.fsGid is set. + # + description: | + ````{note} + Removed in version 2.0.0. If you have been using `rbac.enable=false` + (strongly discouraged), then the equivalent configuration would be: + + ```yaml + rbac: + create: false + hub: + serviceAccount: + create: false + proxy: + traefik: + serviceAccount: + create: false + scheduling: + userScheduler: + serviceAccount: + create: false + prePuller: + hook: + serviceAccount: + create: false + ``` + ```` + create: + type: boolean + description: | + Decides if (Cluster)Role and (Cluster)RoleBinding resources are + created and bound to the configured serviceAccounts. + + global: + type: object + additionalProperties: true + properties: + safeToShowValues: + type: boolean + description: | + A flag that should only be set to true temporarily when experiencing a + deprecation message that contain censored content that you wish to + reveal. diff --git a/applications/jupyterhub/deploy/values.yaml b/applications/jupyterhub/deploy/values.yaml index 2f5cbca3..41e108d6 100755 --- a/applications/jupyterhub/deploy/values.yaml +++ b/applications/jupyterhub/deploy/values.yaml @@ -1,4 +1,4 @@ -harness: +harness: # EDIT: CLOUDHARNESS subdomain: hub service: auto: false @@ -31,6 +31,11 @@ harness: fullnameOverride: "" nameOverride: +# enabled is ignored by the jupyterhub chart itself, but a chart depending on +# the jupyterhub chart conditionally can make use this config option as the +# condition. +enabled: + # custom can contain anything you want to pass to the hub pod, as all passed # Helm template values will be made available there. custom: {} @@ -54,10 +59,11 @@ imagePullSecrets: [] # ConfigurableHTTPProxy speaks with the actual ConfigurableHTTPProxy server in # the proxy pod. hub: + revisionHistoryLimit: config: JupyterHub: admin_access: true - authenticator_class: keycloak + authenticator_class: keycloak # EDIT: CLOUDHARNESS service: type: ClusterIP annotations: {} @@ -68,7 +74,6 @@ hub: baseUrl: / cookieSecret: initContainers: [] - fsGid: 1000 nodeSelector: {} tolerations: [] concurrentSpawnLimit: 64 @@ -106,37 +111,38 @@ hub: extraVolumes: [] extraVolumeMounts: [] image: - name: jupyterhub/k8s-hub - tag: "1.1.3" + name: quay.io/jupyterhub/k8s-hub + tag: "3.2.1" pullPolicy: pullSecrets: [] resources: {} + podSecurityContext: + fsGroup: 1000 containerSecurityContext: runAsUser: 1000 runAsGroup: 1000 allowPrivilegeEscalation: false lifecycle: {} + loadRoles: {} services: {} pdb: enabled: false maxUnavailable: minAvailable: 1 networkPolicy: - enabled: false + enabled: true ingress: [] - ## egress for JupyterHub already includes Kubernetes internal DNS and - ## access to the proxy, but can be restricted further, but ensure to allow - ## access to the Kubernetes API server that couldn't be pinned ahead of - ## time. - ## - ## ref: https://stackoverflow.com/a/59016417/2220152 - egress: - - to: - - ipBlock: - cidr: 0.0.0.0/0 + egress: [] + egressAllowRules: + cloudMetadataServer: true + dnsPortsCloudMetadataServer: true + dnsPortsKubeSystemNamespace: true + dnsPortsPrivateIPs: true + nonPrivateIPs: true + privateIPs: true interNamespaceAccessLabels: ignore allowedIngressPorts: [] - allowNamedServers: true + allowNamedServers: true # EDIT: CLOUDHARNESS namedServerLimitPerUser: authenticatePrometheus: redirectToServer: @@ -163,11 +169,13 @@ hub: timeoutSeconds: 1 existingSecret: serviceAccount: + create: true + name: annotations: {} extraPodSpec: {} rbac: - enabled: true + create: true # proxy relates to the proxy pod, the proxy-public service, and the autohttps # pod and proxy-http service. @@ -202,7 +210,7 @@ proxy: rollingUpdate: # service relates to the proxy-public service service: - type: NodePort + type: NodePort # EDIT: CLOUDHARNESS labels: {} annotations: {} nodePorts: @@ -215,13 +223,17 @@ proxy: # chp relates to the proxy pod, which is responsible for routing traffic based # on dynamic configuration sent from JupyterHub to CHP's REST API. chp: + revisionHistoryLimit: containerSecurityContext: runAsUser: 65534 # nobody user runAsGroup: 65534 # nobody group allowPrivilegeEscalation: false image: - name: jupyterhub/configurable-http-proxy - tag: 4.5.0 # https://github.com/jupyterhub/configurable-http-proxy/releases + name: quay.io/jupyterhub/configurable-http-proxy + # tag is automatically bumped to new patch versions by the + # watch-dependencies.yaml workflow. + # + tag: "4.6.1" # https://github.com/jupyterhub/configurable-http-proxy/tags pullPolicy: pullSecrets: [] extraCommandLineFlags: [] @@ -229,11 +241,14 @@ proxy: enabled: true initialDelaySeconds: 60 periodSeconds: 10 + failureThreshold: 30 + timeoutSeconds: 3 readinessProbe: enabled: true initialDelaySeconds: 0 periodSeconds: 2 failureThreshold: 1000 + timeoutSeconds: 1 resources: {} defaultTarget: errorTarget: @@ -241,12 +256,16 @@ proxy: nodeSelector: {} tolerations: [] networkPolicy: - enabled: false + enabled: true ingress: [] - egress: - - to: - - ipBlock: - cidr: 0.0.0.0/0 + egress: [] + egressAllowRules: + cloudMetadataServer: true + dnsPortsCloudMetadataServer: true + dnsPortsKubeSystemNamespace: true + dnsPortsPrivateIPs: true + nonPrivateIPs: true + privateIPs: true interNamespaceAccessLabels: ignore allowedIngressPorts: [http, https] pdb: @@ -257,13 +276,17 @@ proxy: # traefik relates to the autohttps pod, which is responsible for TLS # termination when proxy.https.type=letsencrypt. traefik: + revisionHistoryLimit: containerSecurityContext: runAsUser: 65534 # nobody user runAsGroup: 65534 # nobody group allowPrivilegeEscalation: false image: name: traefik - tag: v2.4.11 # ref: https://hub.docker.com/_/traefik?tab=tags + # tag is automatically bumped to new patch versions by the + # watch-dependencies.yaml workflow. + # + tag: "v2.10.7" # ref: https://hub.docker.com/_/traefik?tab=tags pullPolicy: pullSecrets: [] hsts: @@ -272,6 +295,7 @@ proxy: maxAge: 15724800 # About 6 months resources: {} labels: {} + extraInitContainers: [] extraEnv: {} extraVolumes: [] extraVolumeMounts: [] @@ -283,10 +307,14 @@ proxy: networkPolicy: enabled: true ingress: [] - egress: - - to: - - ipBlock: - cidr: 0.0.0.0/0 + egress: [] + egressAllowRules: + cloudMetadataServer: true + dnsPortsCloudMetadataServer: true + dnsPortsKubeSystemNamespace: true + dnsPortsPrivateIPs: true + nonPrivateIPs: true + privateIPs: true interNamespaceAccessLabels: ignore allowedIngressPorts: [http, https] pdb: @@ -294,6 +322,8 @@ proxy: maxUnavailable: minAvailable: 1 serviceAccount: + create: true + name: annotations: {} extraPodSpec: {} secretSync: @@ -302,8 +332,8 @@ proxy: runAsGroup: 65534 # nobody group allowPrivilegeEscalation: false image: - name: jupyterhub/k8s-secret-sync - tag: "1.1.3" + name: quay.io/jupyterhub/k8s-secret-sync + tag: "3.2.1" pullPolicy: pullSecrets: [] resources: {} @@ -342,29 +372,27 @@ singleuser: preferred: [] networkTools: image: - name: jupyterhub/k8s-network-tools - tag: "1.1.3" + name: quay.io/jupyterhub/k8s-network-tools + tag: "3.2.1" pullPolicy: pullSecrets: [] + resources: {} cloudMetadata: # block set to true will append a privileged initContainer using the # iptables to block the sensitive metadata server at the provided ip. - blockWithIptables: false + blockWithIptables: true + ip: 169.254.169.254 networkPolicy: - enabled: false + enabled: true ingress: [] - egress: - # Required egress to communicate with the hub and DNS servers will be - # augmented to these egress rules. - # - # This default rule explicitly allows all outbound traffic from singleuser - # pods, except to a typical IP used to return metadata that can be used by - # someone with malicious intent. - - to: - - ipBlock: - cidr: 0.0.0.0/0 - except: - - 169.254.169.254/32 + egress: [] + egressAllowRules: + cloudMetadataServer: false + dnsPortsCloudMetadataServer: true + dnsPortsKubeSystemNamespace: true + dnsPortsPrivateIPs: true + nonPrivateIPs: true + privateIPs: false interNamespaceAccessLabels: ignore allowedIngressPorts: [] events: true @@ -376,6 +404,7 @@ singleuser: lifecycleHooks: {} initContainers: [] extraContainers: [] + allowPrivilegeEscalation: false uid: 1000 fsGid: 100 serviceAccountName: @@ -387,29 +416,29 @@ singleuser: static: pvcName: subPath: "{username}" - capacity: 10Mi - homeMountPath: /home/workspace + capacity: 10Mi # EDIT: CLOUDHARNESS + homeMountPath: /home/workspace # EDIT: CLOUDHARNESS dynamic: storageClass: - pvcNameTemplate: jupyter-{username} - volumeNameTemplate: jupyter-{username} + pvcNameTemplate: jupyter-{username} # EDIT: CLOUDHARNESS + volumeNameTemplate: jupyter-{username} # EDIT: CLOUDHARNESS storageAccessModes: [ReadWriteOnce] image: - name: jupyter/base-notebook - tag: "hub-1.4.2" + name: quay.io/jupyterhub/k8s-singleuser-sample + tag: "3.2.1" pullPolicy: pullSecrets: [] startTimeout: 300 cpu: - limit: 0.4 - guarantee: 0.05 + limit: 0.4 # EDIT: CLOUDHARNESS + guarantee: 0.05 # EDIT: CLOUDHARNESS memory: - limit: 0.5G - guarantee: 0.1G + limit: 0.5G # EDIT: CLOUDHARNESS + guarantee: 0.1G # EDIT: CLOUDHARNESS extraResource: limits: {} guarantees: {} - cmd: /usr/local/bin/start-singleuser.sh + cmd: jupyterhub-singleuser defaultUrl: extraPodConfig: {} profileList: [] @@ -417,74 +446,146 @@ singleuser: # scheduling relates to the user-scheduler pods and user-placeholder pods. scheduling: userScheduler: - enabled: false + enabled: false # EDIT: CLOUDHARNESS + revisionHistoryLimit: replicas: 2 logLevel: 4 + # plugins are configured on the user-scheduler to make us score how we + # schedule user pods in a way to help us schedule on the most busy node. By + # doing this, we help scale down more effectively. It isn't obvious how to + # enable/disable scoring plugins, and configure them, to accomplish this. + # # plugins ref: https://kubernetes.io/docs/reference/scheduling/config/#scheduling-plugins-1 + # migration ref: https://kubernetes.io/docs/reference/scheduling/config/#scheduler-configuration-migrations + # plugins: score: + # These scoring plugins are enabled by default according to + # https://kubernetes.io/docs/reference/scheduling/config/#scheduling-plugins + # 2022-02-22. + # + # Enabled with high priority: + # - NodeAffinity + # - InterPodAffinity + # - NodeResourcesFit + # - ImageLocality + # Remains enabled with low default priority: + # - TaintToleration + # - PodTopologySpread + # - VolumeBinding + # Disabled for scoring: + # - NodeResourcesBalancedAllocation + # disabled: - - name: SelectorSpread - - name: TaintToleration - - name: PodTopologySpread + # We disable these plugins (with regards to scoring) to not interfere + # or complicate our use of NodeResourcesFit. - name: NodeResourcesBalancedAllocation - - name: NodeResourcesLeastAllocated # Disable plugins to be allowed to enable them again with a different # weight and avoid an error. - - name: NodePreferAvoidPods - name: NodeAffinity - name: InterPodAffinity + - name: NodeResourcesFit - name: ImageLocality enabled: - - name: NodePreferAvoidPods - weight: 161051 - name: NodeAffinity weight: 14631 - name: InterPodAffinity weight: 1331 - - name: NodeResourcesMostAllocated + - name: NodeResourcesFit weight: 121 - name: ImageLocality weight: 11 + pluginConfig: + # Here we declare that we should optimize pods to fit based on a + # MostAllocated strategy instead of the default LeastAllocated. + - name: NodeResourcesFit + args: + scoringStrategy: + resources: + - name: cpu + weight: 1 + - name: memory + weight: 1 + type: MostAllocated containerSecurityContext: runAsUser: 65534 # nobody user runAsGroup: 65534 # nobody group allowPrivilegeEscalation: false image: # IMPORTANT: Bumping the minor version of this binary should go hand in - # hand with an inspection of the user-scheduelrs RBAC resources - # that we have forked. - name: k8s.gcr.io/kube-scheduler - tag: v1.19.13 # ref: https://github.com/kubernetes/website/blob/main/content/en/releases/patch-releases.md + # hand with an inspection of the user-scheduelr's RBAC + # resources that we have forked in + # templates/scheduling/user-scheduler/rbac.yaml. + # + # Debugging advice: + # + # - Is configuration of kube-scheduler broken in + # templates/scheduling/user-scheduler/configmap.yaml? + # + # - Is the kube-scheduler binary's compatibility to work + # against a k8s api-server that is too new or too old? + # + # - You can update the GitHub workflow that runs tests to + # include "deploy/user-scheduler" in the k8s namespace report + # and reduce the user-scheduler deployments replicas to 1 in + # dev-config.yaml to get relevant logs from the user-scheduler + # pods. Inspect the "Kubernetes namespace report" action! + # + # - Typical failures are that kube-scheduler fails to search for + # resources via its "informers", and won't start trying to + # schedule pods before they succeed which may require + # additional RBAC permissions or that the k8s api-server is + # aware of the resources. + # + # - If "successfully acquired lease" can be seen in the logs, it + # is a good sign kube-scheduler is ready to schedule pods. + # + name: registry.k8s.io/kube-scheduler + # tag is automatically bumped to new patch versions by the + # watch-dependencies.yaml workflow. The minor version is pinned in the + # workflow, and should be updated there if a minor version bump is done + # here. We aim to stay around 1 minor version behind the latest k8s + # version. + # + tag: "v1.28.6" # ref: https://github.com/kubernetes/kubernetes/tree/master/CHANGELOG pullPolicy: pullSecrets: [] nodeSelector: {} tolerations: [] + labels: {} + annotations: {} pdb: enabled: true maxUnavailable: 1 minAvailable: resources: {} serviceAccount: + create: true + name: annotations: {} extraPodSpec: {} podPriority: enabled: false globalDefault: false defaultPriority: 0 + imagePullerPriority: -5 userPlaceholderPriority: -10 userPlaceholder: enabled: true image: - name: k8s.gcr.io/pause - # tag's can be updated by inspecting the output of the command: - # gcloud container images list-tags k8s.gcr.io/pause --sort-by=~tags + name: registry.k8s.io/pause + # tag is automatically bumped to new patch versions by the + # watch-dependencies.yaml workflow. # # If you update this, also update prePuller.pause.image.tag - tag: "3.5" + # + tag: "3.9" pullPolicy: pullSecrets: [] + revisionHistoryLimit: replicas: 0 + labels: {} + annotations: {} containerSecurityContext: runAsUser: 65534 # nobody user runAsGroup: 65534 # nobody group @@ -517,6 +618,8 @@ scheduling: # prePuller relates to the hook|continuous-image-puller DaemonsSets prePuller: + revisionHistoryLimit: + labels: {} annotations: {} resources: {} containerSecurityContext: @@ -530,8 +633,8 @@ prePuller: pullOnlyOnChanges: true # image and the configuration below relates to the hook-image-awaiter Job image: - name: jupyterhub/k8s-image-awaiter - tag: "1.1.3" + name: quay.io/jupyterhub/k8s-image-awaiter + tag: "3.2.1" pullPolicy: pullSecrets: [] containerSecurityContext: @@ -543,6 +646,8 @@ prePuller: tolerations: [] resources: {} serviceAccount: + create: true + name: annotations: {} continuous: enabled: true @@ -554,18 +659,20 @@ prePuller: runAsGroup: 65534 # nobody group allowPrivilegeEscalation: false image: - name: k8s.gcr.io/pause - # tag's can be updated by inspecting the output of the command: - # gcloud container images list-tags k8s.gcr.io/pause --sort-by=~tags + name: registry.k8s.io/pause + # tag is automatically bumped to new patch versions by the + # watch-dependencies.yaml workflow. # # If you update this, also update scheduling.userPlaceholder.image.tag - tag: "3.5" + # + tag: "3.9" pullPolicy: pullSecrets: [] ingress: enabled: false annotations: {} + ingressClassName: hosts: [] pathSuffix: pathType: Prefix @@ -581,7 +688,8 @@ ingress: cull: enabled: true users: false # --cull-users - removeNamedServers: true # --remove-named-servers + adminUsers: true # --cull-admin-users + removeNamedServers: true # EDIT: CLOUDHARNESS timeout: 3600 # --timeout every: 600 # --cull-every concurrency: 10 # --concurrency diff --git a/applications/jupyterhub/zero-to-jupyterhub-k8s b/applications/jupyterhub/zero-to-jupyterhub-k8s new file mode 160000 index 00000000..c92c1237 --- /dev/null +++ b/applications/jupyterhub/zero-to-jupyterhub-k8s @@ -0,0 +1 @@ +Subproject commit c92c12374795e84f36f5f16c4e8b8a448ad2f230 From cff3c6b7d85b0d6c3f99f3d84222eb6062bd3d85 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Sat, 20 Jan 2024 11:31:34 +0100 Subject: [PATCH 002/210] CH-110 jupyterhub update wip --- applications/jupyterhub/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/applications/jupyterhub/README.md b/applications/jupyterhub/README.md index d961d034..d7d67d4d 100755 --- a/applications/jupyterhub/README.md +++ b/applications/jupyterhub/README.md @@ -31,3 +31,13 @@ To support the pre pulling of task images see (https://github.com/MetaCell/cloud the template `templates/image-puller/_helpers-daemonset.tpl` has been changed (see line 167 and on) TODO: remember to implement/revise this code after you have updated/changed the templates of JupyterHub + +## How to update + +The helm chart is based on the [zero-to-jupyterhub](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/) helm chart. + +1. Run update.sh [TAG] # Do not use latest! +2. Restore from the diff files with EDIT: CLOUDHARNESS + +Customize notebook image: quay.io/jupyterhub/k8s-singleuser-sample:[TAG] + From 428d83d75b91cb956c6cf758cc87cb5ab4a25efa Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Sat, 20 Jan 2024 11:31:46 +0100 Subject: [PATCH 003/210] CH-110 jupyterhub update wip --- applications/jupyterhub/Dockerfile | 26 +- applications/jupyterhub/README.md | 3 +- .../deploy/resources/hub/jupyterhub_config.py | 1 + .../jupyterhub/deploy/resources/hub/z2jh.py | 1 + .../deploy/templates/_helpers-auth-rework.tpl | 4 +- .../jupyterhub/deploy/templates/_helpers.tpl | 2 +- applications/jupyterhub/update.patch | 5845 +++++++++++++++++ applications/jupyterhub/update.sh | 28 + deployment/codefresh-test-local.yaml | 439 +- 9 files changed, 6050 insertions(+), 299 deletions(-) create mode 100644 applications/jupyterhub/update.patch create mode 100644 applications/jupyterhub/update.sh diff --git a/applications/jupyterhub/Dockerfile b/applications/jupyterhub/Dockerfile index 8b279adc..907ce672 100755 --- a/applications/jupyterhub/Dockerfile +++ b/applications/jupyterhub/Dockerfile @@ -1,31 +1,39 @@ ARG CLOUDHARNESS_BASE FROM $CLOUDHARNESS_BASE as base -FROM jupyterhub/k8s-hub:1.1.3 +FROM quay.io/jupyterhub/k8s-hub:3.2.1 USER root COPY --from=base libraries/models/requirements.txt /libraries/models/requirements.txt -RUN pip install -r /libraries/models/requirements.txt +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install -r /libraries/models/requirements.txt COPY --from=base libraries/cloudharness-common/requirements.txt /libraries/cloudharness-common/requirements.txt -RUN pip install -r /libraries/cloudharness-common/requirements.txt +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install -r /libraries/cloudharness-common/requirements.txt COPY --from=base libraries/client/cloudharness_cli/requirements.txt /libraries/client/cloudharness_cli/requirements.txt -RUN pip install -r /libraries/client/cloudharness_cli/requirements.txt +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install -r /libraries/client/cloudharness_cli/requirements.txt COPY --from=base libraries/models /libraries/models -RUN pip install -e /libraries/models +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install -e /libraries/models COPY --from=base libraries/cloudharness-common /libraries/cloudharness-common COPY --from=base libraries/client/cloudharness_cli /libraries/client/cloudharness_cli # -RUN pip install -e /libraries/cloudharness-common -RUN pip install -e /libraries/client/cloudharness_cli +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install -e /libraries/cloudharness-common +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install -e /libraries/client/cloudharness_cli COPY src src -RUN pip install ./src/harness_jupyter -RUN pip install ./src/chauthenticator +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install ./src/harness_jupyter +RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ + pip install ./src/chauthenticator USER jovyan diff --git a/applications/jupyterhub/README.md b/applications/jupyterhub/README.md index d7d67d4d..9ad78d2f 100755 --- a/applications/jupyterhub/README.md +++ b/applications/jupyterhub/README.md @@ -37,7 +37,8 @@ TODO: remember to implement/revise this code after you have updated/changed the The helm chart is based on the [zero-to-jupyterhub](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/) helm chart. 1. Run update.sh [TAG] # Do not use latest! -2. Restore from the diff files with EDIT: CLOUDHARNESS +2. Restore from the diff files with EDIT: CLOUDHARNESS. Use update.patch as a reference +3. 3. Update Dockerfile to use the same base image you see on values.yaml: hub/image Customize notebook image: quay.io/jupyterhub/k8s-singleuser-sample:[TAG] diff --git a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py index 8ec801ee..5ebe20b5 100755 --- a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py +++ b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py @@ -537,6 +537,7 @@ def camelCaseify(s): c.Authenticator.auto_login = True c.OAuthenticator.client_id = client_id c.OAuthenticator.client_secret = client_secret + c.OAuthenticator.allow_all = True c.GenericOAuthenticator.login_service = "CH" c.GenericOAuthenticator.username_key = "email" diff --git a/applications/jupyterhub/deploy/resources/hub/z2jh.py b/applications/jupyterhub/deploy/resources/hub/z2jh.py index fc368f64..2fe0d25b 100755 --- a/applications/jupyterhub/deploy/resources/hub/z2jh.py +++ b/applications/jupyterhub/deploy/resources/hub/z2jh.py @@ -119,6 +119,7 @@ def get_config(key, default=None): value = value[level] # EDIT: CLOUDHARNESS START + import re if value and isinstance(value, str): replace_var = re.search("{{.*?}}", value) if replace_var: diff --git a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl index 3159d103..e9d2b4f4 100644 --- a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl +++ b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl @@ -178,7 +178,7 @@ ldap.dn.user.useLookupName: LDAPAuthenticator.use_lookup_dn_username representing the old z2jh config, output the result in $c. */}} - {{- include "jupyterhub.authDep.remapOldToNew.mappable" (list $c .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub. }} + {{- include "jupyterhub.authDep.remapOldToNew.mappable" (list $c .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub) }} {{- $class_old_config_key := .Values.apps.jupyterhub.apps.jupyterhub.auth.type | default "" }} {{- /* ldap - github */}} {{- $class_new_entrypoint := "" }} {{- /* ldapauthenticator.LDAPAuthenticator - github */}} @@ -191,7 +191,7 @@ ldap.dn.user.useLookupName: LDAPAuthenticator.use_lookup_dn_username {{- /* UPDATE c dict explicitly with auth.custom.config */}} {{- if .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.config }} {{- $custom_config := merge (dict) .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.config }} - {{- if not .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub.}} + {{- if not .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub }} {{- range $key, $val := $custom_config }} {{- $_ := set $custom_config $key "***" }} {{- end }} diff --git a/applications/jupyterhub/deploy/templates/_helpers.tpl b/applications/jupyterhub/deploy/templates/_helpers.tpl index a2023639..1737f3d6 100755 --- a/applications/jupyterhub/deploy/templates/_helpers.tpl +++ b/applications/jupyterhub/deploy/templates/_helpers.tpl @@ -194,7 +194,7 @@ component: {{ include "jupyterhub.componentLabel" . }} using "toYaml | fromYaml" in order to be able to use normal helm template functions on it. */}} - {{- $jupyterhub_values := .root.Values.apps.jupyterhub.}} + {{- $jupyterhub_values := .root.Values.apps.jupyterhub }} {{- if ne .root.Chart.Name "jupyterhub" }} {{- if .root.Values.apps.jupyterhub.jupyterhub }} {{- $jupyterhub_values = .root.Values.apps.jupyterhub.jupyterhub }} diff --git a/applications/jupyterhub/update.patch b/applications/jupyterhub/update.patch new file mode 100644 index 00000000..5241525b --- /dev/null +++ b/applications/jupyterhub/update.patch @@ -0,0 +1,5845 @@ +diff --git a/applications/jupyterhub/README.md b/applications/jupyterhub/README.md +index d961d03..d7d67d4 100755 +--- a/applications/jupyterhub/README.md ++++ b/applications/jupyterhub/README.md +@@ -31,3 +31,13 @@ To support the pre pulling of task images see (https://github.com/MetaCell/cloud + the template `templates/image-puller/_helpers-daemonset.tpl` has been changed (see line 167 and on) + + TODO: remember to implement/revise this code after you have updated/changed the templates of JupyterHub ++ ++## How to update ++ ++The helm chart is based on the [zero-to-jupyterhub](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/) helm chart. ++ ++1. Run update.sh [TAG] # Do not use latest! ++2. Restore from the diff files with EDIT: CLOUDHARNESS ++ ++Customize notebook image: quay.io/jupyterhub/k8s-singleuser-sample:[TAG] ++ +diff --git a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py +index d4b3cee..8ec801e 100755 +--- a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py ++++ b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py +@@ -1,9 +1,17 @@ ++# load the config object (satisfies linters) ++c = get_config() # noqa ++ ++import glob + import os + import re + import sys +-import logging + ++from jupyterhub.utils import url_path_join ++from kubernetes_asyncio import client + from tornado.httpclient import AsyncHTTPClient ++ ++#CLOUDHARNESS: EDIT START ++import logging + from kubernetes import client + from jupyterhub.utils import url_path_join + +@@ -12,7 +20,7 @@ try: + harness_hub() # activates harness hooks on jupyterhub + except Exception as e: + logging.error("could not import harness_jupyter", exc_info=True) +- ++# CLOUDHARNESS: EDIT END + + # Make sure that modules placed in the same directory as the jupyterhub config are added to the pythonpath + configuration_directory = os.path.dirname(os.path.realpath(__file__)) +@@ -20,39 +28,13 @@ sys.path.insert(0, configuration_directory) + + from z2jh import ( + get_config, +- set_config_if_not_none, + get_name, + get_name_env, + get_secret_value, ++ set_config_if_not_none, + ) + + +-print('Base url is', c.JupyterHub.get('base_url', '/')) +- +-# Configure JupyterHub to use the curl backend for making HTTP requests, +-# rather than the pure-python implementations. The default one starts +-# being too slow to make a large number of requests to the proxy API +-# at the rate required. +-AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") +- +-c.JupyterHub.spawner_class = 'kubespawner.KubeSpawner' +- +-# Connect to a proxy running in a different pod +-c.ConfigurableHTTPProxy.api_url = 'http://{}:{}'.format(os.environ['PROXY_API_SERVICE_HOST'], int(os.environ['PROXY_API_SERVICE_PORT'])) +-c.ConfigurableHTTPProxy.should_start = False +- +-# Do not shut down user pods when hub is restarted +-c.JupyterHub.cleanup_servers = False +- +-# Check that the proxy has routes appropriately setup +-c.JupyterHub.last_activity_interval = 60 +- +-# Don't wait at all before redirecting a spawning user to the progress page +-c.JupyterHub.tornado_settings = { +- 'slow_spawn_timeout': 0, +-} +- +- + def camelCaseify(s): + """convert snake_case to camelCase + +@@ -173,6 +155,7 @@ for trait, cfg_key in ( + ("events_enabled", "events"), + ("extra_labels", None), + ("extra_annotations", None), ++ # ("allow_privilege_escalation", None), # Managed manually below + ("uid", None), + ("fs_gid", None), + ("service_account", "serviceAccountName"), +@@ -206,10 +189,19 @@ image = get_config("singleuser.image.name") + if image: + tag = get_config("singleuser.image.tag") + if tag: +- image = "{}:{}".format(image, tag) ++ image = f"{image}:{tag}" + + c.KubeSpawner.image = image + ++# allow_privilege_escalation defaults to False in KubeSpawner 2+. Since its a ++# property where None, False, and True all are valid values that users of the ++# Helm chart may want to set, we can't use the set_config_if_not_none helper ++# function as someone may want to override the default False value to None. ++# ++c.KubeSpawner.allow_privilege_escalation = get_config( ++ "singleuser.allowPrivilegeEscalation" ++) ++ + # Combine imagePullSecret.create (single), imagePullSecrets (list), and + # singleuser.image.pullSecrets (list). + image_pull_secrets = [] +@@ -255,7 +247,7 @@ if match_node_purpose: + pass + else: + raise ValueError( +- "Unrecognized value for matchNodePurpose: %r" % match_node_purpose ++ f"Unrecognized value for matchNodePurpose: {match_node_purpose}" + ) + + # Combine the common tolerations for user pods with singleuser tolerations +@@ -271,7 +263,7 @@ if storage_type == "dynamic": + pvc_name_template = get_config("singleuser.storage.dynamic.pvcNameTemplate") + c.KubeSpawner.pvc_name_template = pvc_name_template + volume_name_template = get_config("singleuser.storage.dynamic.volumeNameTemplate") +- c.KubeSpawner.storage_pvc_ensure = False ++ c.KubeSpawner.storage_pvc_ensure = True + set_config_if_not_none( + c.KubeSpawner, "storage_class", "singleuser.storage.dynamic.storageClass" + ) +@@ -354,41 +346,62 @@ c.KubeSpawner.volume_mounts.extend( + ) + + c.JupyterHub.services = [] ++c.JupyterHub.load_roles = [] + ++# jupyterhub-idle-culler's permissions are scoped to what it needs only, see ++# https://github.com/jupyterhub/jupyterhub-idle-culler#permissions. ++# + if get_config("cull.enabled", False): ++ jupyterhub_idle_culler_role = { ++ "name": "jupyterhub-idle-culler", ++ "scopes": [ ++ "list:users", ++ "read:users:activity", ++ "read:servers", ++ "delete:servers", ++ # "admin:users", # dynamically added if --cull-users is passed ++ ], ++ # assign the role to a jupyterhub service, so it gains these permissions ++ "services": ["jupyterhub-idle-culler"], ++ } ++ + cull_cmd = ["python3", "-m", "jupyterhub_idle_culler"] + base_url = c.JupyterHub.get("base_url", "/") + cull_cmd.append("--url=http://localhost:8081" + url_path_join(base_url, "hub/api")) + + cull_timeout = get_config("cull.timeout") + if cull_timeout: +- cull_cmd.append("--timeout=%s" % cull_timeout) ++ cull_cmd.append(f"--timeout={cull_timeout}") + + cull_every = get_config("cull.every") + if cull_every: +- cull_cmd.append("--cull-every=%s" % cull_every) ++ cull_cmd.append(f"--cull-every={cull_every}") + + cull_concurrency = get_config("cull.concurrency") + if cull_concurrency: +- cull_cmd.append("--concurrency=%s" % cull_concurrency) ++ cull_cmd.append(f"--concurrency={cull_concurrency}") + + if get_config("cull.users"): + cull_cmd.append("--cull-users") ++ jupyterhub_idle_culler_role["scopes"].append("admin:users") ++ ++ if not get_config("cull.adminUsers"): ++ cull_cmd.append("--cull-admin-users=false") + + if get_config("cull.removeNamedServers"): + cull_cmd.append("--remove-named-servers") + + cull_max_age = get_config("cull.maxAge") + if cull_max_age: +- cull_cmd.append("--max-age=%s" % cull_max_age) ++ cull_cmd.append(f"--max-age={cull_max_age}") + + c.JupyterHub.services.append( + { +- "name": "cull-idle", +- "admin": True, ++ "name": "jupyterhub-idle-culler", + "command": cull_cmd, + } + ) ++ c.JupyterHub.load_roles.append(jupyterhub_idle_culler_role) + + for key, service in get_config("hub.services", {}).items(): + # c.JupyterHub.services is a list of dicts, but +@@ -402,26 +415,44 @@ for key, service in get_config("hub.services", {}).items(): + + c.JupyterHub.services.append(service) + ++for key, role in get_config("hub.loadRoles", {}).items(): ++ # c.JupyterHub.load_roles is a list of dicts, but ++ # hub.loadRoles is a dict of dicts to make the config mergable ++ role.setdefault("name", key) ++ ++ c.JupyterHub.load_roles.append(role) ++ ++# respect explicit null command (distinct from unspecified) ++# this avoids relying on KubeSpawner.cmd's default being None ++_unspecified = object() ++specified_cmd = get_config("singleuser.cmd", _unspecified) ++if specified_cmd is not _unspecified: ++ c.Spawner.cmd = specified_cmd + +-set_config_if_not_none(c.Spawner, "cmd", "singleuser.cmd") + set_config_if_not_none(c.Spawner, "default_url", "singleuser.defaultUrl") + +-cloud_metadata = get_config("singleuser.cloudMetadata", {}) ++cloud_metadata = get_config("singleuser.cloudMetadata") + + if cloud_metadata.get("blockWithIptables") == True: + # Use iptables to block access to cloud metadata by default + network_tools_image_name = get_config("singleuser.networkTools.image.name") + network_tools_image_tag = get_config("singleuser.networkTools.image.tag") ++ network_tools_resources = get_config("singleuser.networkTools.resources") ++ ip = cloud_metadata["ip"] + ip_block_container = client.V1Container( + name="block-cloud-metadata", + image=f"{network_tools_image_name}:{network_tools_image_tag}", + command=[ + "iptables", +- "-A", ++ "--append", + "OUTPUT", +- "-d", +- cloud_metadata.get("ip", "169.254.169.254"), +- "-j", ++ "--protocol", ++ "tcp", ++ "--destination", ++ ip, ++ "--destination-port", ++ "80", ++ "--jump", + "DROP", + ], + security_context=client.V1SecurityContext( +@@ -429,6 +460,7 @@ if cloud_metadata.get("blockWithIptables") == True: + run_as_user=0, + capabilities=client.V1Capabilities(add=["NET_ADMIN"]), + ), ++ resources=network_tools_resources, + ) + + c.KubeSpawner.init_containers.append(ip_block_container) +@@ -438,17 +470,6 @@ if get_config("debug.enabled", False): + c.JupyterHub.log_level = "DEBUG" + c.Spawner.debug = True + +-# load /usr/local/etc/jupyterhub/jupyterhub_config.d config files +-config_dir = "/usr/local/etc/jupyterhub/jupyterhub_config.d" +-if os.path.isdir(config_dir): +- for file_path in sorted(glob.glob(f"{config_dir}/*.py")): +- file_name = os.path.basename(file_path) +- print(f"Loading {config_dir} config: {file_name}") +- with open(file_path) as f: +- file_content = f.read() +- # compiling makes debugging easier: https://stackoverflow.com/a/437857 +- exec(compile(source=file_content, filename=file_name, mode="exec")) +- + # load potentially seeded secrets + # + # NOTE: ConfigurableHTTPProxy.auth_token is set through an environment variable +@@ -471,11 +492,23 @@ for app, cfg in get_config("hub.config", {}).items(): + cfg.pop("keys", None) + c[app].update(cfg) + ++# load /usr/local/etc/jupyterhub/jupyterhub_config.d config files ++config_dir = "/usr/local/etc/jupyterhub/jupyterhub_config.d" ++if os.path.isdir(config_dir): ++ for file_path in sorted(glob.glob(f"{config_dir}/*.py")): ++ file_name = os.path.basename(file_path) ++ print(f"Loading {config_dir} config: {file_name}") ++ with open(file_path) as f: ++ file_content = f.read() ++ # compiling makes debugging easier: https://stackoverflow.com/a/437857 ++ exec(compile(source=file_content, filename=file_name, mode="exec")) ++ + # execute hub.extraConfig entries + for key, config_py in sorted(get_config("hub.extraConfig", {}).items()): +- print("Loading extra config: %s" % key) ++ print(f"Loading extra config: {key}") + exec(config_py) + ++# CLOUDHARNESS: EDIT START + # Allow switching authenticators easily + auth_type = get_config('hub.config.JupyterHub.authenticator_class') + email_domain = 'local' +@@ -525,4 +558,5 @@ set_config_if_not_none(c.Authenticator, 'whitelist', 'auth.whitelist.users') + c.apps = get_config('apps') + c.registry = get_config('registry') + c.domain = get_config('root.domain') +-c.namespace = get_config('root.namespace') +\ No newline at end of file ++c.namespace = get_config('root.namespace') ++# CLOUDHARNESS: EDIT END +\ No newline at end of file +diff --git a/applications/jupyterhub/deploy/resources/hub/z2jh.py b/applications/jupyterhub/deploy/resources/hub/z2jh.py +index 834a6b6..fc368f6 100755 +--- a/applications/jupyterhub/deploy/resources/hub/z2jh.py ++++ b/applications/jupyterhub/deploy/resources/hub/z2jh.py +@@ -3,15 +3,15 @@ Utility methods for use in jupyterhub_config.py and dynamic subconfigs. + + Methods here can be imported by extraConfig in values.yaml + """ +-from collections import Mapping +-from functools import lru_cache + import os +-import re ++from collections.abc import Mapping ++from functools import lru_cache + + import yaml + ++ + # memoize so we only load config once +-@lru_cache() ++@lru_cache + def _load_config(): + """Load the Helm chart configuration used to render the Helm templates of + the chart from a mounted k8s Secret, and merge in values from an optionally +@@ -27,6 +27,7 @@ def _load_config(): + cfg = _merge_dictionaries(cfg, values) + else: + print(f"No config at {path}") ++ # EDIT: CLOUDHARNESS START + path = f"/opt/cloudharness/resources/allvalues.yaml" + if os.path.exists(path): + print("Loading global CloudHarness config at", path) +@@ -34,11 +35,11 @@ def _load_config(): + values = yaml.safe_load(f) + cfg = _merge_dictionaries(cfg, values) + cfg['root'] = values +- ++ # EDIT: CLOUDHARNESS END + return cfg + + +-@lru_cache() ++@lru_cache + def _get_config_value(key): + """Load value from the k8s ConfigMap given a key.""" + +@@ -50,7 +51,7 @@ def _get_config_value(key): + raise Exception(f"{path} not found!") + + +-@lru_cache() ++@lru_cache + def get_secret_value(key, default="never-explicitly-set"): + """Load value from the user managed k8s Secret or the default k8s Secret + given a key.""" +@@ -117,7 +118,7 @@ def get_config(key, default=None): + else: + value = value[level] + +- ++ # EDIT: CLOUDHARNESS START + if value and isinstance(value, str): + replace_var = re.search("{{.*?}}", value) + if replace_var: +@@ -128,6 +129,7 @@ def get_config(key, default=None): + if repl: + print("replace", variable, "in", value, ":", repl) + value = re.sub("{{.*?}}", repl, value) ++ # EDIT: CLOUDHARNESS END + return value + + +@@ -137,6 +139,5 @@ def set_config_if_not_none(cparent, name, key): + configuration item if not None + """ + data = get_config(key) +- + if data is not None: +- setattr(cparent, name, data) +\ No newline at end of file ++ setattr(cparent, name, data) +diff --git a/applications/jupyterhub/deploy/templates/NOTES.txt b/applications/jupyterhub/deploy/templates/NOTES.txt +new file mode 100644 +index 0000000..9769a9c +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/NOTES.txt +@@ -0,0 +1,158 @@ ++{{- $proxy_service := include "jupyterhub.proxy-public.fullname" . -}} ++ ++{{- /* Generated with https://patorjk.com/software/taag/#p=display&h=0&f=Slant&t=JupyterHub */}} ++. __ __ __ __ __ ++ / / __ __ ____ __ __ / /_ ___ _____ / / / / __ __ / /_ ++ __ / / / / / / / __ \ / / / / / __/ / _ \ / ___/ / /_/ / / / / / / __ \ ++/ /_/ / / /_/ / / /_/ / / /_/ / / /_ / __/ / / / __ / / /_/ / / /_/ / ++\____/ \__,_/ / .___/ \__, / \__/ \___/ /_/ /_/ /_/ \__,_/ /_.___/ ++ /_/ /____/ ++ ++ You have successfully installed the official JupyterHub Helm chart! ++ ++### Installation info ++ ++ - Kubernetes namespace: {{ .Release.Namespace }} ++ - Helm release name: {{ .Release.Name }} ++ - Helm chart version: {{ .Chart.Version }} ++ - JupyterHub version: {{ .Chart.AppVersion }} ++ - Hub pod packages: See https://github.com/jupyterhub/zero-to-jupyterhub-k8s/blob/{{ include "jupyterhub.chart-version-to-git-ref" .Chart.Version }}/images/hub/requirements.txt ++ ++### Followup links ++ ++ - Documentation: https://z2jh.jupyter.org ++ - Help forum: https://discourse.jupyter.org ++ - Social chat: https://gitter.im/jupyterhub/jupyterhub ++ - Issue tracking: https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues ++ ++### Post-installation checklist ++ ++ - Verify that created Pods enter a Running state: ++ ++ kubectl --namespace={{ .Release.Namespace }} get pod ++ ++ If a pod is stuck with a Pending or ContainerCreating status, diagnose with: ++ ++ kubectl --namespace={{ .Release.Namespace }} describe pod ++ ++ If a pod keeps restarting, diagnose with: ++ ++ kubectl --namespace={{ .Release.Namespace }} logs --previous ++ {{- println }} ++ ++ {{- if eq .Values.apps.jupyterhub.proxy.service.type "LoadBalancer" }} ++ - Verify an external IP is provided for the k8s Service {{ $proxy_service }}. ++ ++ kubectl --namespace={{ .Release.Namespace }} get service {{ $proxy_service }} ++ ++ If the external ip remains , diagnose with: ++ ++ kubectl --namespace={{ .Release.Namespace }} describe service {{ $proxy_service }} ++ {{- end }} ++ ++ - Verify web based access: ++ {{- println }} ++ {{- if .Values.apps.jupyterhub.ingress.enabled }} ++ {{- range $host := .Values.apps.jupyterhub.ingress.hosts }} ++ Try insecure HTTP access: http://{{ $host }}{{ $.Values.apps.jupyterhub.hub.baseUrl | trimSuffix "/" }}/ ++ {{- end }} ++ ++ {{- range $tls := .Values.apps.jupyterhub.ingress.tls }} ++ {{- range $host := $tls.hosts }} ++ Try secure HTTPS access: https://{{ $host }}{{ $.Values.apps.jupyterhub.hub.baseUrl | trimSuffix "/" }}/ ++ {{- end }} ++ {{- end }} ++ {{- else }} ++ You have not configured a k8s Ingress resource so you need to access the k8s ++ Service {{ $proxy_service }} directly. ++ {{- println }} ++ ++ {{- if eq .Values.apps.jupyterhub.proxy.service.type "NodePort" }} ++ The k8s Service {{ $proxy_service }} is exposed via NodePorts. That means ++ that all the k8s cluster's nodes are exposing the k8s Service via those ++ ports. ++ ++ Try insecure HTTP access: http://:{{ .Values.apps.jupyterhub.proxy.service.nodePorts.http | default "no-http-nodeport-set"}} ++ Try secure HTTPS access: https://:{{ .Values.apps.jupyterhub.proxy.service.nodePorts.https | default "no-https-nodeport-set" }} ++ ++ {{- else }} ++ If your computer is outside the k8s cluster, you can port-forward traffic to ++ the k8s Service {{ $proxy_service }} with kubectl to access it from your ++ computer. ++ ++ kubectl --namespace={{ .Release.Namespace }} port-forward service/{{ $proxy_service }} 8080:http ++ ++ Try insecure HTTP access: http://localhost:8080 ++ {{- end }} ++ {{- end }} ++ {{- println }} ++ ++ ++ ++ ++ ++{{- /* ++ Warnings for likely misconfigurations ++*/}} ++ ++{{- if and (not .Values.apps.jupyterhub.scheduling.podPriority.enabled) (and .Values.apps.jupyterhub.scheduling.userPlaceholder.enabled .Values.apps.jupyterhub.scheduling.userPlaceholder.replicas) }} ++################################################################################# ++###### WARNING: You are using user placeholders without pod priority ##### ++###### enabled*, either enable pod priority or stop using the ##### ++###### user placeholders** to avoid having placeholders that ##### ++###### refuse to make room for a real user. ##### ++###### ##### ++###### *scheduling.podPriority.enabled ##### ++###### **scheduling.userPlaceholder.enabled ##### ++###### **scheduling.userPlaceholder.replicas ##### ++################################################################################# ++{{- println }} ++{{- end }} ++ ++ ++ ++ ++ ++{{- /* ++ Breaking changes and failures for likely misconfigurations. ++*/}} ++ ++{{- $breaking := "" }} ++{{- $breaking_title := "\n" }} ++{{- $breaking_title = print $breaking_title "\n#################################################################################" }} ++{{- $breaking_title = print $breaking_title "\n###### BREAKING: The config values passed contained no longer accepted #####" }} ++{{- $breaking_title = print $breaking_title "\n###### options. See the messages below for more details. #####" }} ++{{- $breaking_title = print $breaking_title "\n###### #####" }} ++{{- $breaking_title = print $breaking_title "\n###### To verify your updated config is accepted, you can use #####" }} ++{{- $breaking_title = print $breaking_title "\n###### the `helm template` command. #####" }} ++{{- $breaking_title = print $breaking_title "\n#################################################################################" }} ++ ++ ++{{- /* ++ This is an example (in a helm template comment) on how to detect and ++ communicate with regards to a breaking chart config change. ++ ++ {{- if hasKey .Values.apps.jupyterhub.singleuser.cloudMetadata "enabled" }} ++ {{- $breaking = print $breaking "\n\nCHANGED: singleuser.cloudMetadata.enabled must as of 1.0.0 be configured using singleuser.cloudMetadata.blockWithIptables with the opposite value." }} ++ {{- end }} ++*/}} ++ ++ ++{{- if hasKey .Values.apps.jupyterhub.rbac "enabled" }} ++{{- $breaking = print $breaking "\n\nCHANGED: rbac.enabled must as of version 2.0.0 be configured via rbac.create and .serviceAccount.create." }} ++{{- end }} ++ ++ ++{{- if hasKey .Values.apps.jupyterhub.hub "fsGid" }} ++{{- $breaking = print $breaking "\n\nCHANGED: hub.fsGid must as of version 2.0.0 be configured via hub.podSecurityContext.fsGroup." }} ++{{- end }} ++ ++ ++{{- if and .Values.apps.jupyterhub.singleuser.cloudMetadata.blockWithIptables (and .Values.apps.jupyterhub.singleuser.networkPolicy.enabled .Values.apps.jupyterhub.singleuser.networkPolicy.egressAllowRules.cloudMetadataServer) }} ++{{- $breaking = print $breaking "\n\nCHANGED: singleuser.cloudMetadata.blockWithIptables must as of version 3.0.0 not be configured together with singleuser.networkPolicy.egressAllowRules.cloudMetadataServer as it leads to an ambiguous configuration." }} ++{{- end }} ++ ++ ++{{- if $breaking }} ++{{- fail (print $breaking_title $breaking "\n\n") }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl +index b742a12..3159d10 100644 +--- a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl ++++ b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl +@@ -168,30 +168,30 @@ ldap.dn.user.useLookupName: LDAPAuthenticator.use_lookup_dn_username + {{- $c := dict }} + {{- $result := (dict "hub" (dict "config" $c)) }} + {{- /* +- Flattens the config in .Values.apps.jupyterhub.auth to a format of ++ Flattens the config in .Values.apps.jupyterhub.apps.jupyterhub.auth to a format of + "keyX.keyY...": "value". Writes output to $c. + */}} +- {{- include "jupyterhub.flattenDict" (list $c (omit .Values.apps.jupyterhub.auth "type" "custom")) }} ++ {{- include "jupyterhub.flattenDict" (list $c (omit .Values.apps.jupyterhub.apps.jupyterhub.auth "type" "custom")) }} + + {{- /* + Transform the flattened config using a dictionary + representing the old z2jh config, output the result + in $c. + */}} +- {{- include "jupyterhub.authDep.remapOldToNew.mappable" (list $c .Values.apps.jupyterhub.global.safeToShowValues) }} ++ {{- include "jupyterhub.authDep.remapOldToNew.mappable" (list $c .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub. }} + +- {{- $class_old_config_key := .Values.apps.jupyterhub.auth.type | default "" }} {{- /* ldap - github */}} ++ {{- $class_old_config_key := .Values.apps.jupyterhub.apps.jupyterhub.auth.type | default "" }} {{- /* ldap - github */}} + {{- $class_new_entrypoint := "" }} {{- /* ldapauthenticator.LDAPAuthenticator - github */}} + {{- $class_new_config_key := "" }} {{- /* LDAPAuthenticator - GitHubOAuthenticator */}} + + {{- /* SET $class_new_entrypoint, $class_new_config_key */}} + {{- if eq $class_old_config_key "custom" }} +- {{- $class_new_entrypoint = .Values.apps.jupyterhub.auth.custom.className | default "custom.className wasn't configured!" }} ++ {{- $class_new_entrypoint = .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.className | default "custom.className wasn't configured!" }} + {{- $class_new_config_key = $class_new_entrypoint | splitList "." | last }} + {{- /* UPDATE c dict explicitly with auth.custom.config */}} +- {{- if .Values.apps.jupyterhub.auth.custom.config }} +- {{- $custom_config := merge (dict) .Values.apps.jupyterhub.auth.custom.config }} +- {{- if not .Values.apps.jupyterhub.global.safeToShowValues }} ++ {{- if .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.config }} ++ {{- $custom_config := merge (dict) .Values.apps.jupyterhub.apps.jupyterhub.auth.custom.config }} ++ {{- if not .Values.apps.jupyterhub.apps.jupyterhub.global.safeToSho.Values.apps.jupyterhub.}} + {{- range $key, $val := $custom_config }} + {{- $_ := set $custom_config $key "***" }} + {{- end }} +@@ -213,7 +213,7 @@ The JupyterHub Helm chart's auth config has been reworked and requires changes. + + The new way to configure authentication in chart version 0.11.0+ is printed + below for your convenience. The values are not shown by default to ensure no +-secrets are exposed, run helm upgrade with --set global.safeToShowValues=true ++secrets are exposed, run helm upgrade with --set global.safeToSho.Values.apps.jupyterhub.true + to show them. + + {{ $result | toYaml }} +diff --git a/applications/jupyterhub/deploy/templates/_helpers-names.tpl b/applications/jupyterhub/deploy/templates/_helpers-names.tpl +index e9cf7bb..401d601 100644 +--- a/applications/jupyterhub/deploy/templates/_helpers-names.tpl ++++ b/applications/jupyterhub/deploy/templates/_helpers-names.tpl +@@ -3,8 +3,8 @@ + parent charts to reference these dynamic resource names. + + To avoid duplicating documentation, for more information, please see the the +- fullnameOverride entry in schema.yaml or the configuration reference that +- schema.yaml renders to. ++ fullnameOverride entry in values.schema.yaml or the configuration reference ++ that values.schema.yaml renders to. + + https://z2jh.jupyter.org/en/latest/resources/reference.html#fullnameOverride + */}} +@@ -38,8 +38,8 @@ + {{- $name_override := .Values.apps.jupyterhub.nameOverride }} + {{- if ne .Chart.Name "jupyterhub" }} + {{- if .Values.apps.jupyterhub.jupyterhub }} +- {{- $fullname_override = .Values.apps.jupyterhub.fullnameOverride }} +- {{- $name_override = .Values.apps.jupyterhub.nameOverride }} ++ {{- $fullname_override = .Values.apps.jupyterhub.jupyterhub.fullnameOverride }} ++ {{- $name_override = .Values.apps.jupyterhub.jupyterhub.nameOverride }} + {{- end }} + {{- end }} + +@@ -76,12 +76,23 @@ + {{- include "jupyterhub.fullname.dash" . }}hub + {{- end }} + ++{{- /* hub-serviceaccount ServiceAccount */}} ++{{- define "jupyterhub.hub-serviceaccount.fullname" -}} ++ {{- if .Values.apps.jupyterhub.hub.serviceAccount.create }} ++ {{- .Values.apps.jupyterhub.hub.serviceAccount.name | default (include "jupyterhub.hub.fullname" .) }} ++ {{- else }} ++ {{- .Values.apps.jupyterhub.hub.serviceAccount.name | default "default" }} ++ {{- end }} ++{{- end }} ++ + {{- /* hub-existing-secret Secret */}} + {{- define "jupyterhub.hub-existing-secret.fullname" -}} + {{- /* A hack to avoid issues from invoking this from a parent Helm chart. */}} + {{- $existing_secret := .Values.apps.jupyterhub.hub.existingSecret }} + {{- if ne .Chart.Name "jupyterhub" }} +- {{- $existing_secret = .Values.apps.jupyterhub.hub.existingSecret }} ++ {{- if .Values.apps.jupyterhub.jupyterhub }} ++ {{- $existing_secret = .Values.apps.jupyterhub.jupyterhub.hub.existingSecret }} ++ {{- end }} + {{- end }} + {{- if $existing_secret }} + {{- $existing_secret }} +@@ -133,11 +144,29 @@ + {{- include "jupyterhub.fullname.dash" . }}autohttps + {{- end }} + ++{{- /* autohttps-serviceaccount ServiceAccount */}} ++{{- define "jupyterhub.autohttps-serviceaccount.fullname" -}} ++ {{- if .Values.apps.jupyterhub.proxy.traefik.serviceAccount.create }} ++ {{- .Values.apps.jupyterhub.proxy.traefik.serviceAccount.name | default (include "jupyterhub.autohttps.fullname" .) }} ++ {{- else }} ++ {{- .Values.apps.jupyterhub.proxy.traefik.serviceAccount.name | default "default" }} ++ {{- end }} ++{{- end }} ++ + {{- /* user-scheduler Deployment */}} + {{- define "jupyterhub.user-scheduler-deploy.fullname" -}} + {{- include "jupyterhub.fullname.dash" . }}user-scheduler + {{- end }} + ++{{- /* user-scheduler-serviceaccount ServiceAccount */}} ++{{- define "jupyterhub.user-scheduler-serviceaccount.fullname" -}} ++ {{- if .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.create }} ++ {{- .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.name | default (include "jupyterhub.user-scheduler-deploy.fullname" .) }} ++ {{- else }} ++ {{- .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.name | default "default" }} ++ {{- end }} ++{{- end }} ++ + {{- /* user-scheduler leader election lock resource */}} + {{- define "jupyterhub.user-scheduler-lock.fullname" -}} + {{- include "jupyterhub.user-scheduler-deploy.fullname" . }}-lock +@@ -153,6 +182,15 @@ + {{- include "jupyterhub.fullname.dash" . }}hook-image-awaiter + {{- end }} + ++{{- /* image-awaiter-serviceaccount ServiceAccount */}} ++{{- define "jupyterhub.hook-image-awaiter-serviceaccount.fullname" -}} ++ {{- if .Values.apps.jupyterhub.prePuller.hook.serviceAccount.create }} ++ {{- .Values.apps.jupyterhub.prePuller.hook.serviceAccount.name | default (include "jupyterhub.hook-image-awaiter.fullname" .) }} ++ {{- else }} ++ {{- .Values.apps.jupyterhub.prePuller.hook.serviceAccount.name | default "default" }} ++ {{- end }} ++{{- end }} ++ + {{- /* hook-image-puller DaemonSet */}} + {{- define "jupyterhub.hook-image-puller.fullname" -}} + {{- include "jupyterhub.fullname.dash" . }}hook-image-puller +@@ -210,6 +248,15 @@ + {{- end }} + {{- end }} + ++{{- /* image-puller Priority */}} ++{{- define "jupyterhub.image-puller-priority.fullname" -}} ++ {{- if (include "jupyterhub.fullname" .) }} ++ {{- include "jupyterhub.fullname.dash" . }}image-puller ++ {{- else }} ++ {{- .Release.Name }}-image-puller-priority ++ {{- end }} ++{{- end }} ++ + {{- /* user-scheduler's registered name */}} + {{- define "jupyterhub.user-scheduler.fullname" -}} + {{- if (include "jupyterhub.fullname" .) }} +@@ -231,6 +278,7 @@ + fullname: {{ include "jupyterhub.fullname" . | quote }} + fullname-dash: {{ include "jupyterhub.fullname.dash" . | quote }} + hub: {{ include "jupyterhub.hub.fullname" . | quote }} ++hub-serviceaccount: {{ include "jupyterhub.hub-serviceaccount.fullname" . | quote }} + hub-existing-secret: {{ include "jupyterhub.hub-existing-secret.fullname" . | quote }} + hub-existing-secret-or-default: {{ include "jupyterhub.hub-existing-secret-or-default.fullname" . | quote }} + hub-pvc: {{ include "jupyterhub.hub-pvc.fullname" . | quote }} +@@ -241,10 +289,14 @@ proxy-public: {{ include "jupyterhub.proxy-public.fullname" . | quote }} + proxy-public-tls: {{ include "jupyterhub.proxy-public-tls.fullname" . | quote }} + proxy-public-manual-tls: {{ include "jupyterhub.proxy-public-manual-tls.fullname" . | quote }} + autohttps: {{ include "jupyterhub.autohttps.fullname" . | quote }} ++autohttps-serviceaccount: {{ include "jupyterhub.autohttps-serviceaccount.fullname" . | quote }} + user-scheduler-deploy: {{ include "jupyterhub.user-scheduler-deploy.fullname" . | quote }} ++user-scheduler-serviceaccount: {{ include "jupyterhub.user-scheduler-serviceaccount.fullname" . | quote }} + user-scheduler-lock: {{ include "jupyterhub.user-scheduler-lock.fullname" . | quote }} + user-placeholder: {{ include "jupyterhub.user-placeholder.fullname" . | quote }} ++image-puller-priority: {{ include "jupyterhub.image-puller-priority.fullname" . | quote }} + hook-image-awaiter: {{ include "jupyterhub.hook-image-awaiter.fullname" . | quote }} ++hook-image-awaiter-serviceaccount: {{ include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . | quote }} + hook-image-puller: {{ include "jupyterhub.hook-image-puller.fullname" . | quote }} + continuous-image-puller: {{ include "jupyterhub.continuous-image-puller.fullname" . | quote }} + singleuser: {{ include "jupyterhub.singleuser.fullname" . | quote }} +diff --git a/applications/jupyterhub/deploy/templates/_helpers-netpol.tpl b/applications/jupyterhub/deploy/templates/_helpers-netpol.tpl +new file mode 100644 +index 0000000..4075569 +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/_helpers-netpol.tpl +@@ -0,0 +1,101 @@ ++{{- /* ++ This named template renders egress rules for NetworkPolicy resources based on ++ common configuration. ++ ++ It is rendering based on the `egressAllowRules` and `egress` keys of the ++ passed networkPolicy config object. Each flag set to true under ++ `egressAllowRules` is rendered to a egress rule that next to any custom user ++ defined rules from the `egress` config. ++ ++ This named template needs to render based on a specific networkPolicy ++ resource, but also needs access to the root context. Due to that, it ++ accepts a list as its scope, where the first element is supposed to be the ++ root context and the second element is supposed to be the networkPolicy ++ configuration object. ++ ++ As an example, this is how you would render this named template from a ++ NetworkPolicy resource under its egress: ++ ++ egress: ++ # other rules here... ++ ++ {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.hub.networkPolicy)) }} ++ {{- . | nindent 4 }} ++ {{- end }} ++ ++ Note that the reference to privateIPs and nonPrivateIPs relate to ++ https://en.wikipedia.org/wiki/Private_network#Private_IPv4_addresses. ++*/}} ++ ++{{- define "jupyterhub.networkPolicy.renderEgressRules" -}} ++{{- $root := index . 0 }} ++{{- $netpol := index . 1 }} ++{{- if or (or $netpol.egressAllowRules.dnsPortsCloudMetadataServer $netpol.egressAllowRules.dnsPortsKubeSystemNamespace) $netpol.egressAllowRules.dnsPortsPrivateIPs }} ++- ports: ++ - port: 53 ++ protocol: UDP ++ - port: 53 ++ protocol: TCP ++ to: ++ {{- if $netpol.egressAllowRules.dnsPortsCloudMetadataServer }} ++ # Allow outbound connections to DNS ports on the cloud metadata server ++ - ipBlock: ++ cidr: {{ $root.Values.apps.jupyterhub.singleuser.cloudMetadata.ip }}/32 ++ {{- end }} ++ {{- if $netpol.egressAllowRules.dnsPortsKubeSystemNamespace }} ++ # Allow outbound connections to DNS ports on pods in the kube-system ++ # namespace ++ - namespaceSelector: ++ matchLabels: ++ kubernetes.io/metadata.name: kube-system ++ {{- end }} ++ {{- if $netpol.egressAllowRules.dnsPortsPrivateIPs }} ++ # Allow outbound connections to DNS ports on destinations in the private IP ++ # ranges ++ - ipBlock: ++ cidr: 10.0.0.0/8 ++ - ipBlock: ++ cidr: 172.16.0.0/12 ++ - ipBlock: ++ cidr: 192.168.0.0/16 ++ {{- end }} ++{{- end }} ++ ++{{- if $netpol.egressAllowRules.nonPrivateIPs }} ++# Allow outbound connections to non-private IP ranges ++- to: ++ - ipBlock: ++ cidr: 0.0.0.0/0 ++ except: ++ # As part of this rule: ++ # - don't allow outbound connections to private IPs ++ - 10.0.0.0/8 ++ - 172.16.0.0/12 ++ - 192.168.0.0/16 ++ # - don't allow outbound connections to the cloud metadata server ++ - {{ $root.Values.apps.jupyterhub.singleuser.cloudMetadata.ip }}/32 ++{{- end }} ++ ++{{- if $netpol.egressAllowRules.privateIPs }} ++# Allow outbound connections to private IP ranges ++- to: ++ - ipBlock: ++ cidr: 10.0.0.0/8 ++ - ipBlock: ++ cidr: 172.16.0.0/12 ++ - ipBlock: ++ cidr: 192.168.0.0/16 ++{{- end }} ++ ++{{- if $netpol.egressAllowRules.cloudMetadataServer }} ++# Allow outbound connections to the cloud metadata server ++- to: ++ - ipBlock: ++ cidr: {{ $root.Values.apps.jupyterhub.singleuser.cloudMetadata.ip }}/32 ++{{- end }} ++ ++{{- with $netpol.egress }} ++# Allow outbound connections based on user specified rules ++{{ . | toYaml }} ++{{- end }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/_helpers.tpl b/applications/jupyterhub/deploy/templates/_helpers.tpl +index efea86d..a202363 100755 +--- a/applications/jupyterhub/deploy/templates/_helpers.tpl ++++ b/applications/jupyterhub/deploy/templates/_helpers.tpl +@@ -12,7 +12,7 @@ + + When you ask a helper to render its content, one often forward the current + scope to the helper in order to allow it to access .Release.Name, +- .Values.apps.jupyterhub.rbac.enabled and similar values. ++ .Values.apps.jupyterhub.rbac.create and similar values. + + #### Example - Passing the current scope + {{ include "jupyterhub.commonLabels" . }} +@@ -180,8 +180,51 @@ component: {{ include "jupyterhub.componentLabel" . }} + Augments passed .pullSecrets with $.Values.apps.jupyterhub.imagePullSecrets + */}} + {{- define "jupyterhub.imagePullSecrets" -}} ++ {{- /* ++ We have implemented a trick to allow a parent chart depending on this ++ chart to call this named templates. ++ ++ Caveats and notes: ++ ++ 1. While parent charts can reference these, grandparent charts can't. ++ 2. Parent charts must not use an alias for this chart. ++ 3. There is no failsafe workaround to above due to ++ https://github.com/helm/helm/issues/9214. ++ 4. .Chart is of its own type (*chart.Metadata) and needs to be casted ++ using "toYaml | fromYaml" in order to be able to use normal helm ++ template functions on it. ++ */}} ++ {{- $jupyterhub_values := .root.Values.apps.jupyterhub.}} ++ {{- if ne .root.Chart.Name "jupyterhub" }} ++ {{- if .root.Values.apps.jupyterhub.jupyterhub }} ++ {{- $jupyterhub_values = .root.Values.apps.jupyterhub.jupyterhub }} ++ {{- end }} ++ {{- end }} + ++ {{- /* Populate $_.list with all relevant entries */}} ++ {{- $_ := dict "list" (concat .image.pullSecrets $jupyterhub_values.imagePullSecrets | uniq) }} ++ {{- if and $jupyterhub_values.imagePullSecret.create $jupyterhub_values.imagePullSecret.automaticReferenceInjection }} ++ {{- $__ := set $_ "list" (append $_.list (include "jupyterhub.image-pull-secret.fullname" .root) | uniq) }} ++ {{- end }} + ++ {{- /* Decide if something should be written */}} ++ {{- if not (eq ($_.list | toJson) "[]") }} ++ ++ {{- /* Process the $_.list where strings become dicts with a name key and the ++ strings become the name keys' values into $_.res */}} ++ {{- $_ := set $_ "res" list }} ++ {{- range $_.list }} ++ {{- if eq (typeOf .) "string" }} ++ {{- $__ := set $_ "res" (append $_.res (dict "name" .)) }} ++ {{- else }} ++ {{- $__ := set $_ "res" (append $_.res .) }} ++ {{- end }} ++ {{- end }} ++ ++ {{- /* Write the results */}} ++ {{- $_.res | toJson }} ++ ++ {{- end }} + {{- end }} + + {{- /* +@@ -339,3 +382,21 @@ limits: + {{- print "\n\nextraFiles entries (" $file_key ") must only contain one of the fields: 'data', 'stringData', and 'binaryData'." | fail }} + {{- end }} + {{- end }} ++ ++{{- /* ++ jupyterhub.chart-version-to-git-ref: ++ Renders a valid git reference from a chartpress generated version string. ++ In practice, either a git tag or a git commit hash will be returned. ++ ++ - The version string will follow a chartpress pattern, see ++ https://github.com/jupyterhub/chartpress#examples-chart-versions-and-image-tags. ++ ++ - The regexReplaceAll function is a sprig library function, see ++ https://masterminds.github.io/sprig/strings.html. ++ ++ - The regular expression is in golang syntax, but \d had to become \\d for ++ example. ++*/}} ++{{- define "jupyterhub.chart-version-to-git-ref" -}} ++{{- regexReplaceAll ".*[.-]n\\d+[.]h(.*)" . "${1}" }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/hub/configmap.yaml b/applications/jupyterhub/deploy/templates/hub/configmap.yaml +index c913f67..f52feb6 100755 +--- a/applications/jupyterhub/deploy/templates/hub/configmap.yaml ++++ b/applications/jupyterhub/deploy/templates/hub/configmap.yaml +@@ -29,5 +29,6 @@ data: + */}} + checksum_hook-image-puller: {{ include "jupyterhub.imagePuller.daemonset.hook.checksum" . | quote }} + ++ # EDIT: CLOUDHARNESS + allvalues.yaml: | + {{- .Values | toYaml | nindent 4 }} +\ No newline at end of file +diff --git a/applications/jupyterhub/deploy/templates/hub/deployment.yaml b/applications/jupyterhub/deploy/templates/hub/deployment.yaml +index 82132c6..d105ecc 100755 +--- a/applications/jupyterhub/deploy/templates/hub/deployment.yaml ++++ b/applications/jupyterhub/deploy/templates/hub/deployment.yaml +@@ -5,6 +5,9 @@ metadata: + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + spec: ++ {{- if typeIs "int" .Values.apps.jupyterhub.hub.revisionHistoryLimit }} ++ revisionHistoryLimit: {{ .Values.apps.jupyterhub.hub.revisionHistoryLimit }} ++ {{- end }} + replicas: 1 + selector: + matchLabels: +@@ -30,11 +33,14 @@ spec: + {{- . | toYaml | nindent 8 }} + {{- end }} + spec: +-{{ include "deploy_utils.etcHosts" . | indent 6 }} ++{{ include "deploy_utils.etcHosts" . | indent 6 }} # EDIT: CLOUDHARNESS + {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} + priorityClassName: {{ include "jupyterhub.priority.fullname" . }} + {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.hub.nodeSelector }} ++ {{- with .Values.apps.jupyterhub.hub.nodeSelector }} ++ nodeSelector: ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.hub.tolerations }} + tolerations: + {{- . | toYaml | nindent 8 }} +@@ -44,7 +50,7 @@ spec: + - name: config + configMap: + name: {{ include "jupyterhub.hub.fullname" . }} +- {{- /* This is needed by cloudharness libraries */}} ++ {{- /* EDIT: CLOUDHARNESS This is needed by cloudharness libraries */}} + - name: cloudharness-allvalues + configMap: + name: cloudharness-allvalues +@@ -82,11 +88,13 @@ spec: + persistentVolumeClaim: + claimName: {{ include "jupyterhub.hub-pvc.fullname" . }} + {{- end }} +- {{- if .Values.apps.jupyterhub.rbac.enabled }} +- serviceAccountName: {{ include "jupyterhub.hub.fullname" . }} ++ {{- with include "jupyterhub.hub-serviceaccount.fullname" . }} ++ serviceAccountName: {{ . }} + {{- end }} ++ {{- with .Values.apps.jupyterhub.hub.podSecurityContext }} + securityContext: +- fsGroup: {{ .Values.apps.jupyterhub.hub.fsGid }} ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + {{- with include "jupyterhub.imagePullSecrets" (dict "root" . "image" .Values.apps.jupyterhub.hub.image) }} + imagePullSecrets: {{ . }} + {{- end }} +@@ -153,14 +161,14 @@ spec: + name: config + - mountPath: /usr/local/etc/jupyterhub/secret/ + name: secret +- - name: cloudharness-allvalues ++ - name: cloudharness-allvalues # EDIT: CLOUDHARNESS START + mountPath: /opt/cloudharness/resources/allvalues.yaml + subPath: allvalues.yaml + {{- if .Values.apps.accounts }} + - name: cloudharness-kc-accounts + mountPath: /opt/cloudharness/resources/auth + readOnly: true +- {{- end }} ++ {{- end }} # EDIT: CLOUDHARNESS END + {{- if (include "jupyterhub.hub-existing-secret.fullname" .) }} + - mountPath: /usr/local/etc/jupyterhub/existing-secret/ + name: existing-secret +diff --git a/applications/jupyterhub/deploy/templates/hub/netpol.yaml b/applications/jupyterhub/deploy/templates/hub/netpol.yaml +index 9a7a6bc..d9508e2 100755 +--- a/applications/jupyterhub/deploy/templates/hub/netpol.yaml ++++ b/applications/jupyterhub/deploy/templates/hub/netpol.yaml +@@ -61,31 +61,24 @@ spec: + + egress: + # hub --> proxy +- - ports: +- - port: 8001 +- to: ++ - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "proxy") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8001 ++ + # hub --> singleuser-server +- - ports: +- - port: 8888 +- to: ++ - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "singleuser-server") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8888 + +- # hub --> Kubernetes internal DNS +- - ports: +- - protocol: UDP +- port: 53 +- - protocol: TCP +- port: 53 +- +- {{- with .Values.apps.jupyterhub.hub.networkPolicy.egress }} +- # hub --> depends, but the default is everything +- {{- . | toYaml | nindent 4 }} ++ {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.hub.networkPolicy)) }} ++ {{- . | nindent 4 }} + {{- end }} + {{- end }} +diff --git a/applications/jupyterhub/deploy/templates/hub/pdb.yaml b/applications/jupyterhub/deploy/templates/hub/pdb.yaml +index 855609d..bb6c7b1 100755 +--- a/applications/jupyterhub/deploy/templates/hub/pdb.yaml ++++ b/applications/jupyterhub/deploy/templates/hub/pdb.yaml +@@ -1,9 +1,5 @@ + {{- if .Values.apps.jupyterhub.hub.pdb.enabled -}} +-{{- if .Capabilities.APIVersions.Has "policy/v1" }} + apiVersion: policy/v1 +-{{- else }} +-apiVersion: policy/v1beta1 +-{{- end }} + kind: PodDisruptionBudget + metadata: + name: {{ include "jupyterhub.hub.fullname" . }} +diff --git a/applications/jupyterhub/deploy/templates/hub/rbac.yaml b/applications/jupyterhub/deploy/templates/hub/rbac.yaml +index 738daab..1b689af 100755 +--- a/applications/jupyterhub/deploy/templates/hub/rbac.yaml ++++ b/applications/jupyterhub/deploy/templates/hub/rbac.yaml +@@ -1,15 +1,4 @@ +-{{- if .Values.apps.jupyterhub.rbac.enabled -}} +-apiVersion: v1 +-kind: ServiceAccount +-metadata: +- name: {{ include "jupyterhub.hub.fullname" . }} +- {{- with .Values.apps.jupyterhub.hub.serviceAccount.annotations }} +- annotations: +- {{- . | toYaml | nindent 4 }} +- {{- end }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +---- ++{{- if .Values.apps.jupyterhub.rbac.create -}} + kind: Role + apiVersion: rbac.authorization.k8s.io/v1 + metadata: +@@ -32,7 +21,7 @@ metadata: + {{- include "jupyterhub.labels" . | nindent 4 }} + subjects: + - kind: ServiceAccount +- name: {{ include "jupyterhub.hub.fullname" . }} ++ name: {{ include "jupyterhub.hub-serviceaccount.fullname" . }} + namespace: "{{ .Release.Namespace }}" + roleRef: + kind: Role +diff --git a/applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml b/applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml +new file mode 100644 +index 0000000..817ed66 +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/hub/serviceaccount.yaml +@@ -0,0 +1,12 @@ ++{{- if .Values.apps.jupyterhub.hub.serviceAccount.create -}} ++apiVersion: v1 ++kind: ServiceAccount ++metadata: ++ name: {{ include "jupyterhub.hub-serviceaccount.fullname" . }} ++ {{- with .Values.apps.jupyterhub.hub.serviceAccount.annotations }} ++ annotations: ++ {{- . | toYaml | nindent 4 }} ++ {{- end }} ++ labels: ++ {{- include "jupyterhub.labels" . | nindent 4 }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/image-pull-secret.yaml b/applications/jupyterhub/deploy/templates/image-pull-secret.yaml +new file mode 100644 +index 0000000..b7544db +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/image-pull-secret.yaml +@@ -0,0 +1,15 @@ ++{{- if .Values.apps.jupyterhub.imagePullSecret.create }} ++kind: Secret ++apiVersion: v1 ++metadata: ++ name: {{ include "jupyterhub.image-pull-secret.fullname" . }} ++ labels: ++ {{- include "jupyterhub.labels" . | nindent 4 }} ++ annotations: ++ "helm.sh/hook": pre-install,pre-upgrade ++ "helm.sh/hook-delete-policy": before-hook-creation ++ "helm.sh/hook-weight": "-20" ++type: kubernetes.io/dockerconfigjson ++data: ++ .dockerconfigjson: {{ include "jupyterhub.dockerconfigjson" . }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl +index e16fd1a..528345c 100644 +--- a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl ++++ b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl +@@ -34,6 +34,9 @@ spec: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 100% ++ {{- if typeIs "int" .Values.apps.jupyterhub.prePuller.revisionHistoryLimit }} ++ revisionHistoryLimit: {{ .Values.apps.jupyterhub.prePuller.revisionHistoryLimit }} ++ {{- end }} + template: + metadata: + labels: +@@ -44,13 +47,17 @@ spec: + {{- end }} + spec: + {{- /* +- continuous-image-puller pods are made evictable to save on the k8s pods +- per node limit all k8s clusters have. ++ image-puller pods are made evictable to save on the k8s pods ++ per node limit all k8s clusters have and have a higher priority ++ than user-placeholder pods that could block an entire node. + */}} +- {{- if and (not .hook) .Values.apps.jupyterhub.scheduling.podPriority.enabled }} +- priorityClassName: {{ include "jupyterhub.user-placeholder-priority.fullname" . }} ++ {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} ++ priorityClassName: {{ include "jupyterhub.image-puller-priority.fullname" . }} ++ {{- end }} ++ {{- with .Values.apps.jupyterhub.singleuser.nodeSelector }} ++ nodeSelector: ++ {{- . | toYaml | nindent 8 }} + {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.singleuser.nodeSelector }} + {{- with concat .Values.apps.jupyterhub.scheduling.userPods.tolerations .Values.apps.jupyterhub.singleuser.extraTolerations .Values.apps.jupyterhub.prePuller.extraTolerations }} + tolerations: + {{- . | toYaml | nindent 8 }} +@@ -127,6 +134,7 @@ spec: + {{- /* --- Conditionally pull profileList images --- */}} + {{- if .Values.apps.jupyterhub.prePuller.pullProfileListImages }} + {{- range $k, $container := .Values.apps.jupyterhub.singleuser.profileList }} ++ {{- /* profile's kubespawner_override */}} + {{- if $container.kubespawner_override }} + {{- if $container.kubespawner_override.image }} + - name: image-pull-singleuser-profilelist-{{ $k }} +@@ -145,13 +153,15 @@ spec: + {{- end }} + {{- end }} + {{- end }} +- {{- end }} +- {{- end }} +- +- {{- /* --- Pull extra images --- */}} +- {{- range $k, $v := .Values.apps.jupyterhub.prePuller.extraImages }} +- - name: image-pull-{{ $k }} +- image: {{ $v.name }}:{{ $v.tag }} ++ {{- /* kubespawner_override in profile's profile_options */}} ++ {{- if $container.profile_options }} ++ {{- range $option, $option_spec := $container.profile_options }} ++ {{- if $option_spec.choices }} ++ {{- range $choice, $choice_spec := $option_spec.choices }} ++ {{- if $choice_spec.kubespawner_override }} ++ {{- if $choice_spec.kubespawner_override.image }} ++ - name: image-pull-profile-{{ $k }}-option-{{ $option }}-{{ $choice }} ++ image: {{ $choice_spec.kubespawner_override.image }} + command: + - /bin/sh + - -c +@@ -163,13 +173,20 @@ spec: + {{- with $.Values.apps.jupyterhub.prePuller.containerSecurityContext }} + securityContext: + {{- . | toYaml | nindent 12 }} +- {{- end }} ++ {{- end }} ++ {{- end }} ++ {{- end }} ++ {{- end }} ++ {{- end }} ++ {{- end }} ++ {{- end }} ++ {{- end }} + {{- end }} + +- {{- /* --- Pull CloudHarness tasks images --- */}} +- {{- range $k, $v := ( index .Values "task-images" ) }} +- - name: image-pull-{{ $k | replace "-" "" }} +- image: {{ $v }} ++ {{- /* --- Pull extra images --- */}} ++ {{- range $k, $v := .Values.apps.jupyterhub.prePuller.extraImages }} ++ - name: image-pull-{{ $k }} ++ image: {{ $v.name }}:{{ $v.tag }} + command: + - /bin/sh + - -c +diff --git a/applications/jupyterhub/deploy/templates/image-puller/job.yaml b/applications/jupyterhub/deploy/templates/image-puller/job.yaml +index bdd9f63..cc6db3e 100755 +--- a/applications/jupyterhub/deploy/templates/image-puller/job.yaml ++++ b/applications/jupyterhub/deploy/templates/image-puller/job.yaml +@@ -28,16 +28,22 @@ spec: + labels: + {{- /* Changes here will cause the Job to restart the pods. */}} + {{- include "jupyterhub.matchLabels" . | nindent 8 }} ++ {{- with .Values.apps.jupyterhub.prePuller.labels }} ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + {{- with .Values.apps.jupyterhub.prePuller.annotations }} + annotations: + {{- . | toYaml | nindent 8 }} + {{- end }} + spec: + restartPolicy: Never +- {{- if .Values.apps.jupyterhub.rbac.enabled }} +- serviceAccountName: {{ include "jupyterhub.hook-image-awaiter.fullname" . }} ++ {{- with include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . }} ++ serviceAccountName: {{ . }} ++ {{- end }} ++ {{- with .Values.apps.jupyterhub.prePuller.hook.nodeSelector }} ++ nodeSelector: ++ {{- . | toYaml | nindent 8 }} + {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.prePuller.hook.nodeSelector }} + {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.prePuller.hook.tolerations }} + tolerations: + {{- . | toYaml | nindent 8 }} +@@ -58,6 +64,7 @@ spec: + - -api-server-address=https://kubernetes.default.svc:$(KUBERNETES_SERVICE_PORT) + - -namespace={{ .Release.Namespace }} + - -daemonset={{ include "jupyterhub.hook-image-puller.fullname" . }} ++ - -pod-scheduling-wait-duration={{ .Values.apps.jupyterhub.prePuller.hook.podSchedulingWaitDuration }} + {{- with .Values.apps.jupyterhub.prePuller.hook.containerSecurityContext }} + securityContext: + {{- . | toYaml | nindent 12 }} +diff --git a/applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml b/applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml +new file mode 100644 +index 0000000..1a3fca3 +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/image-puller/priorityclass.yaml +@@ -0,0 +1,18 @@ ++{{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} ++{{- if or .Values.apps.jupyterhub.prePuller.hook.enabled .Values.apps.jupyterhub.prePuller.continuous.enabled -}} ++apiVersion: scheduling.k8s.io/v1 ++kind: PriorityClass ++metadata: ++ name: {{ include "jupyterhub.image-puller-priority.fullname" . }} ++ annotations: ++ meta.helm.sh/release-name: "{{ .Release.Name }}" ++ meta.helm.sh/release-namespace: "{{ .Release.Namespace }}" ++ labels: ++ {{- include "jupyterhub.labels" . | nindent 4 }} ++value: {{ .Values.apps.jupyterhub.scheduling.podPriority.imagePullerPriority }} ++globalDefault: false ++description: >- ++ Enables [hook|continuous]-image-puller pods to fit on nodes even though they ++ are clogged by user-placeholder pods, while not evicting normal user pods. ++{{- end }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml b/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml +index 95c86dd..5946896 100755 +--- a/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml ++++ b/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml +@@ -1,29 +1,8 @@ + {{- /* + Permissions to be used by the hook-image-awaiter job + */}} +-{{- if .Values.apps.jupyterhub.rbac.enabled }} +-{{- if (include "jupyterhub.imagePuller.daemonset.hook.install" .) }} +-{{- /* +-This service account... +-*/ -}} +-apiVersion: v1 +-kind: ServiceAccount +-metadata: +- name: {{ include "jupyterhub.hook-image-awaiter.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +- hub.jupyter.org/deletable: "true" +- annotations: +- "helm.sh/hook": pre-install,pre-upgrade +- "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded +- "helm.sh/hook-weight": "0" +- {{- with .Values.apps.jupyterhub.prePuller.hook.serviceAccount.annotations }} +- {{- . | toYaml | nindent 4 }} +- {{- end }} +---- +-{{- /* +-... will be used by this role... +-*/}} ++{{- if .Values.apps.jupyterhub.rbac.create -}} ++{{- if (include "jupyterhub.imagePuller.daemonset.hook.install" .) -}} + kind: Role + apiVersion: rbac.authorization.k8s.io/v1 + metadata: +@@ -56,7 +35,7 @@ metadata: + "helm.sh/hook-weight": "0" + subjects: + - kind: ServiceAccount +- name: {{ include "jupyterhub.hook-image-awaiter.fullname" . }} ++ name: {{ include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . }} + namespace: "{{ .Release.Namespace }}" + roleRef: + kind: Role +diff --git a/applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml b/applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml +new file mode 100644 +index 0000000..2e5fa72 +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/image-puller/serviceaccount.yaml +@@ -0,0 +1,21 @@ ++{{- /* ++ServiceAccount for the pre-puller hook's image-awaiter-job ++*/}} ++{{- if .Values.apps.jupyterhub.prePuller.hook.serviceAccount.create -}} ++{{- if (include "jupyterhub.imagePuller.daemonset.hook.install" .) -}} ++apiVersion: v1 ++kind: ServiceAccount ++metadata: ++ name: {{ include "jupyterhub.hook-image-awaiter-serviceaccount.fullname" . }} ++ labels: ++ {{- include "jupyterhub.labels" . | nindent 4 }} ++ hub.jupyter.org/deletable: "true" ++ annotations: ++ "helm.sh/hook": pre-install,pre-upgrade ++ "helm.sh/hook-delete-policy": before-hook-creation,hook-succeeded ++ "helm.sh/hook-weight": "0" ++ {{- with .Values.apps.jupyterhub.prePuller.hook.serviceAccount.annotations }} ++ {{- . | toYaml | nindent 4 }} ++ {{- end }} ++{{- end }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt b/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt +deleted file mode 100755 +index 08bd7bb..0000000 +--- a/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt ++++ /dev/null +@@ -1,9 +0,0 @@ +-# Automatic HTTPS Terminator +- +-This directory has Kubernetes objects for automatic Let's Encrypt Support. +-When enabled, we create a new deployment object that has an nginx-ingress +-and kube-lego container in it. This is responsible for requesting, +-storing and renewing certificates as needed from Let's Encrypt. +- +-The only change required outside of this directory is in the `proxy-public` +-service, which targets different hubs based on automatic HTTPS status. +\ No newline at end of file +diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml +deleted file mode 100755 +index 8d71a97..0000000 +--- a/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml ++++ /dev/null +@@ -1,28 +0,0 @@ +-{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} +-{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} +-{{- if $autoHTTPS -}} +-{{- $_ := .Values.apps.jupyterhub.proxy.https.letsencrypt.contactEmail | required "proxy.https.letsencrypt.contactEmail is a required field" -}} +- +-# This configmap contains Traefik configuration files to be mounted. +-# - traefik.yaml will only be read during startup (static configuration) +-# - dynamic.yaml will be read on change (dynamic configuration) +-# +-# ref: https://docs.traefik.io/getting-started/configuration-overview/ +-# +-# The configuration files are first rendered with Helm templating to large YAML +-# strings. Then we use the fromYAML function on these strings to get an object, +-# that we in turn merge with user provided extra configuration. +-# +-kind: ConfigMap +-apiVersion: v1 +-metadata: +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +-data: +- traefik.yaml: | +- {{- include "jupyterhub.traefik.yaml" . | fromYaml | merge .Values.apps.jupyterhub.proxy.traefik.extraStaticConfig | toYaml | nindent 4 }} +- dynamic.yaml: | +- {{- include "jupyterhub.dynamic.yaml" . | fromYaml | merge .Values.apps.jupyterhub.proxy.traefik.extraDynamicConfig | toYaml | nindent 4 }} +- +-{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml +deleted file mode 100755 +index fcb062f..0000000 +--- a/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml ++++ /dev/null +@@ -1,141 +0,0 @@ +-{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} +-{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} +-{{- if $autoHTTPS -}} +-apiVersion: apps/v1 +-kind: Deployment +-metadata: +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +-spec: +- replicas: 1 +- selector: +- matchLabels: +- {{- include "jupyterhub.matchLabels" . | nindent 6 }} +- template: +- metadata: +- labels: +- {{- include "jupyterhub.matchLabels" . | nindent 8 }} +- hub.jupyter.org/network-access-proxy-http: "true" +- {{- with .Values.apps.jupyterhub.proxy.traefik.labels }} +- {{- . | toYaml | nindent 8 }} +- {{- end }} +- annotations: +- # Only force a restart through a change to this checksum when the static +- # configuration is changed, as the dynamic can be updated after start. +- # Any disruptions to this deployment impacts everything, it is the +- # entrypoint of all network traffic. +- checksum/static-config: {{ include "jupyterhub.traefik.yaml" . | fromYaml | merge .Values.apps.jupyterhub.proxy.traefik.extraStaticConfig | toYaml | sha256sum }} +- spec: +- {{- if .Values.apps.jupyterhub.rbac.enabled }} +- serviceAccountName: {{ include "jupyterhub.autohttps.fullname" . }} +- {{- end }} +- {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} +- priorityClassName: {{ include "jupyterhub.priority.fullname" . }} +- {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.proxy.traefik.nodeSelector }} +- {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.proxy.traefik.tolerations }} +- tolerations: +- {{- . | toYaml | nindent 8 }} +- {{- end }} +- {{- include "jupyterhub.coreAffinity" . | nindent 6 }} +- volumes: +- - name: certificates +- emptyDir: {} +- - name: traefik-config +- configMap: +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- {{- with .Values.apps.jupyterhub.proxy.traefik.extraVolumes }} +- {{- . | toYaml | nindent 8 }} +- {{- end }} +- {{- with include "jupyterhub.imagePullSecrets" (dict "root" . "image" .Values.apps.jupyterhub.proxy.traefik.image) }} +- imagePullSecrets: {{ . }} +- {{- end }} +- initContainers: +- - name: load-acme +- image: "{{ .Values.apps.jupyterhub.proxy.secretSync.image.name }}:{{ .Values.apps.jupyterhub.proxy.secretSync.image.tag }}" +- {{- with .Values.apps.jupyterhub.proxy.secretSync.image.pullPolicy }} +- imagePullPolicy: {{ . }} +- {{- end }} +- args: +- - load +- - {{ include "jupyterhub.proxy-public-tls.fullname" . }} +- - acme.json +- - /etc/acme/acme.json +- env: +- # We need this to get logs immediately +- - name: PYTHONUNBUFFERED +- value: "True" +- {{- with .Values.apps.jupyterhub.proxy.traefik.extraEnv }} +- {{- include "jupyterhub.extraEnv" . | nindent 12 }} +- {{- end }} +- volumeMounts: +- - name: certificates +- mountPath: /etc/acme +- {{- with .Values.apps.jupyterhub.proxy.secretSync.containerSecurityContext }} +- securityContext: +- {{- . | toYaml | nindent 12 }} +- {{- end }} +- containers: +- - name: traefik +- image: "{{ .Values.apps.jupyterhub.proxy.traefik.image.name }}:{{ .Values.apps.jupyterhub.proxy.traefik.image.tag }}" +- {{- with .Values.apps.jupyterhub.proxy.traefik.image.pullPolicy }} +- imagePullPolicy: {{ . }} +- {{- end }} +- {{- with .Values.apps.jupyterhub.proxy.traefik.resources }} +- resources: +- {{- . | toYaml | nindent 12 }} +- {{- end }} +- ports: +- - name: http +- containerPort: 8080 +- - name: https +- containerPort: 8443 +- {{- with .Values.apps.jupyterhub.proxy.traefik.extraPorts }} +- {{- . | toYaml | nindent 12 }} +- {{- end }} +- volumeMounts: +- - name: traefik-config +- mountPath: /etc/traefik +- - name: certificates +- mountPath: /etc/acme +- {{- with .Values.apps.jupyterhub.proxy.traefik.extraVolumeMounts }} +- {{- . | toYaml | nindent 12 }} +- {{- end }} +- {{- with .Values.apps.jupyterhub.proxy.traefik.extraEnv }} +- env: +- {{- include "jupyterhub.extraEnv" . | nindent 12 }} +- {{- end }} +- {{- with .Values.apps.jupyterhub.proxy.traefik.containerSecurityContext }} +- securityContext: +- {{- . | toYaml | nindent 12 }} +- {{- end }} +- - name: secret-sync +- image: "{{ .Values.apps.jupyterhub.proxy.secretSync.image.name }}:{{ .Values.apps.jupyterhub.proxy.secretSync.image.tag }}" +- {{- with .Values.apps.jupyterhub.proxy.secretSync.image.pullPolicy }} +- imagePullPolicy: {{ . }} +- {{- end }} +- args: +- - watch-save +- - --label=app={{ include "jupyterhub.appLabel" . }} +- - --label=release={{ .Release.Name }} +- - --label=chart={{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }} +- - --label=heritage=secret-sync +- - {{ include "jupyterhub.proxy-public-tls.fullname" . }} +- - acme.json +- - /etc/acme/acme.json +- env: +- # We need this to get logs immediately +- - name: PYTHONUNBUFFERED +- value: "True" +- volumeMounts: +- - name: certificates +- mountPath: /etc/acme +- {{- with .Values.apps.jupyterhub.proxy.secretSync.containerSecurityContext }} +- securityContext: +- {{- . | toYaml | nindent 12 }} +- {{- end }} +- {{- with .Values.apps.jupyterhub.proxy.traefik.extraPodSpec }} +- {{- . | toYaml | nindent 6 }} +- {{- end }} +-{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml +deleted file mode 100755 +index ea43b67..0000000 +--- a/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml ++++ /dev/null +@@ -1,40 +0,0 @@ +-{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} +-{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} +-{{- if (and $autoHTTPS .Values.apps.jupyterhub.rbac.enabled) -}} +-apiVersion: rbac.authorization.k8s.io/v1 +-kind: Role +-metadata: +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +- {{- with .Values.apps.jupyterhub.proxy.traefik.serviceAccount.annotations }} +- annotations: +- {{- . | toYaml | nindent 4 }} +- {{- end }} +-rules: +-- apiGroups: [""] +- resources: ["secrets"] +- verbs: ["get", "patch", "list", "create"] +---- +-apiVersion: rbac.authorization.k8s.io/v1 +-kind: RoleBinding +-metadata: +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +-subjects: +-- kind: ServiceAccount +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- apiGroup: +-roleRef: +- kind: Role +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- apiGroup: rbac.authorization.k8s.io +---- +-apiVersion: v1 +-kind: ServiceAccount +-metadata: +- name: {{ include "jupyterhub.autohttps.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +-{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml +deleted file mode 100755 +index d57c135..0000000 +--- a/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml ++++ /dev/null +@@ -1,25 +0,0 @@ +-{{- $HTTPS := (and .Values.apps.jupyterhub.proxy.https.hosts .Values.apps.jupyterhub.proxy.https.enabled) }} +-{{- $autoHTTPS := (and $HTTPS (eq .Values.apps.jupyterhub.proxy.https.type "letsencrypt")) }} +-{{- if $autoHTTPS -}} +-apiVersion: v1 +-kind: Service +-metadata: +- name: {{ include "jupyterhub.proxy-http.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +- {{- with .Values.apps.jupyterhub.proxy.service.labels }} +- {{- . | toYaml | nindent 4 }} +- {{- end }} +- {{- with .Values.apps.jupyterhub.proxy.service.annotations }} +- annotations: +- {{- . | toYaml | nindent 4 }} +- {{- end }} +-spec: +- type: ClusterIP +- selector: +- {{- $_ := merge (dict "componentLabel" "proxy") . }} +- {{- include "jupyterhub.matchLabels" $_ | nindent 4 }} +- ports: +- - port: 8000 +- targetPort: http +-{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/deployment.yaml +index 6d63ba8..bb37b8f 100755 +--- a/applications/jupyterhub/deploy/templates/proxy/deployment.yaml ++++ b/applications/jupyterhub/deploy/templates/proxy/deployment.yaml +@@ -7,6 +7,9 @@ metadata: + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + spec: ++ {{- if typeIs "int" .Values.apps.jupyterhub.proxy.chp.revisionHistoryLimit }} ++ revisionHistoryLimit: {{ .Values.apps.jupyterhub.proxy.chp.revisionHistoryLimit }} ++ {{- end }} + replicas: 1 + selector: + matchLabels: +@@ -35,7 +38,7 @@ spec: + # match the k8s Secret during the first upgrade following an auth_token + # was generated. + checksum/auth-token: {{ include "jupyterhub.hub.config.ConfigurableHTTPProxy.auth_token" . | sha256sum | trunc 4 | quote }} +- checksum/proxy-secret: {{ include (print $.Template.BasePath "/jupyterhub/hub/secret.yaml") . | sha256sum }} ++ checksum/proxy-secret: {{ include (print $.Template.BasePath "/jupyterhub/proxy/secret.yaml") . | sha256sum | quote }} + {{- with .Values.apps.jupyterhub.proxy.annotations }} + {{- . | toYaml | nindent 8 }} + {{- end }} +@@ -44,7 +47,10 @@ spec: + {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} + priorityClassName: {{ include "jupyterhub.priority.fullname" . }} + {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.proxy.chp.nodeSelector }} ++ {{- with .Values.apps.jupyterhub.proxy.chp.nodeSelector }} ++ nodeSelector: ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.proxy.chp.tolerations }} + tolerations: + {{- . | toYaml | nindent 8 }} +@@ -135,6 +141,8 @@ spec: + livenessProbe: + initialDelaySeconds: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.periodSeconds }} ++ timeoutSeconds: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.timeoutSeconds }} ++ failureThreshold: {{ .Values.apps.jupyterhub.proxy.chp.livenessProbe.failureThreshold }} + httpGet: + path: /_chp_healthz + {{- if or $manualHTTPS $manualHTTPSwithsecret }} +@@ -149,6 +157,8 @@ spec: + readinessProbe: + initialDelaySeconds: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.periodSeconds }} ++ timeoutSeconds: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.timeoutSeconds }} ++ failureThreshold: {{ .Values.apps.jupyterhub.proxy.chp.readinessProbe.failureThreshold }} + httpGet: + path: /_chp_healthz + {{- if or $manualHTTPS $manualHTTPSwithsecret }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/netpol.yaml b/applications/jupyterhub/deploy/templates/proxy/netpol.yaml +index adc8277..88a00be 100755 +--- a/applications/jupyterhub/deploy/templates/proxy/netpol.yaml ++++ b/applications/jupyterhub/deploy/templates/proxy/netpol.yaml +@@ -85,32 +85,24 @@ spec: + + egress: + # proxy --> hub +- - ports: +- - port: 8081 +- to: ++ - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "hub") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8081 + + # proxy --> singleuser-server +- - ports: +- - port: 8888 +- to: ++ - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "singleuser-server") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8888 + +- # proxy --> Kubernetes internal DNS +- - ports: +- - protocol: UDP +- port: 53 +- - protocol: TCP +- port: 53 +- +- {{- with .Values.apps.jupyterhub.proxy.chp.networkPolicy.egress }} +- # proxy --> depends, but the default is everything +- {{- . | toYaml | nindent 4 }} ++ {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.proxy.chp.networkPolicy)) }} ++ {{- . | nindent 4 }} + {{- end }} + {{- end }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/pdb.yaml b/applications/jupyterhub/deploy/templates/proxy/pdb.yaml +index 1846a3b..155895b 100755 +--- a/applications/jupyterhub/deploy/templates/proxy/pdb.yaml ++++ b/applications/jupyterhub/deploy/templates/proxy/pdb.yaml +@@ -1,9 +1,5 @@ + {{- if .Values.apps.jupyterhub.proxy.chp.pdb.enabled -}} +-{{- if .Capabilities.APIVersions.Has "policy/v1" }} + apiVersion: policy/v1 +-{{- else }} +-apiVersion: policy/v1beta1 +-{{- end }} + kind: PodDisruptionBudget + metadata: + name: {{ include "jupyterhub.proxy.fullname" . }} +diff --git a/applications/jupyterhub/deploy/templates/proxy/service.yaml b/applications/jupyterhub/deploy/templates/proxy/service.yaml +index 0d9ca5b..f634ba9 100755 +--- a/applications/jupyterhub/deploy/templates/proxy/service.yaml ++++ b/applications/jupyterhub/deploy/templates/proxy/service.yaml +@@ -35,12 +35,15 @@ metadata: + {{- end }} + spec: + selector: ++ # This service will target the autohttps pod if autohttps is configured, and ++ # the proxy pod if not. When autohttps is configured, the service proxy-http ++ # will be around to target the proxy pod directly. + {{- if $autoHTTPS }} +- component: autohttps ++ {{- $_ := merge (dict "componentLabel" "autohttps") . -}} ++ {{- include "jupyterhub.matchLabels" $_ | nindent 4 }} + {{- else }} +- component: proxy ++ {{- include "jupyterhub.matchLabels" . | nindent 4 }} + {{- end }} +- release: {{ .Release.Name }} + ports: + {{- if $HTTPS }} + - name: https +diff --git a/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml +index 588cf19..1bed905 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml +@@ -4,22 +4,9 @@ kind: PriorityClass + metadata: + name: {{ include "jupyterhub.priority.fullname" . }} + annotations: +- # FIXME: PriorityClasses must be added before the other resources reference +- # them, and in the past a workaround was needed to accomplish this: +- # to make the resource a Helm hook. +- # +- # To transition this resource to no longer be a Helm hook resource, +- # we explicitly add ownership annotations/labels (in 1.0.0) which +- # will allow a future upgrade (in 2.0.0) to remove all hook and +- # ownership annotations/labels. +- # +- helm.sh/hook: pre-install,pre-upgrade +- helm.sh/hook-delete-policy: before-hook-creation +- helm.sh/hook-weight: "-100" + meta.helm.sh/release-name: "{{ .Release.Name }}" + meta.helm.sh/release-namespace: "{{ .Release.Namespace }}" + labels: +- app.kubernetes.io/managed-by: Helm + {{- $_ := merge (dict "componentLabel" "default-priority") . }} + {{- include "jupyterhub.labels" $_ | nindent 4 }} + value: {{ .Values.apps.jupyterhub.scheduling.podPriority.defaultPriority }} +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml +index b1dc6c5..800ac20 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml +@@ -3,11 +3,7 @@ The cluster autoscaler should be allowed to evict and reschedule these pods if + it would help in order to scale down a node. + */}} + {{- if .Values.apps.jupyterhub.scheduling.userPlaceholder.enabled -}} +-{{- if .Capabilities.APIVersions.Has "policy/v1" }} + apiVersion: policy/v1 +-{{- else }} +-apiVersion: policy/v1beta1 +-{{- end }} + kind: PodDisruptionBudget + metadata: + name: {{ include "jupyterhub.user-placeholder.fullname" . }} +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml +index e03497d..688e217 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml +@@ -5,22 +5,9 @@ kind: PriorityClass + metadata: + name: {{ include "jupyterhub.user-placeholder-priority.fullname" . }} + annotations: +- # FIXME: PriorityClasses must be added before the other resources reference +- # them, and in the past a workaround was needed to accomplish this: +- # to make the resource a Helm hook. +- # +- # To transition this resource to no longer be a Helm hook resource, +- # we explicitly add ownership annotations/labels (in 1.0.0) which +- # will allow a future upgrade (in 2.0.0) to remove all hook and +- # ownership annotations/labels. +- # +- helm.sh/hook: pre-install,pre-upgrade +- helm.sh/hook-delete-policy: before-hook-creation +- helm.sh/hook-weight: "-100" + meta.helm.sh/release-name: "{{ .Release.Name }}" + meta.helm.sh/release-namespace: "{{ .Release.Namespace }}" + labels: +- app.kubernetes.io/managed-by: Helm + {{- include "jupyterhub.labels" . | nindent 4 }} + value: {{ .Values.apps.jupyterhub.scheduling.podPriority.userPlaceholderPriority }} + globalDefault: false +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml +index 114f626..c243bee 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml +@@ -16,6 +16,9 @@ metadata: + {{- include "jupyterhub.labels" . | nindent 4 }} + spec: + podManagementPolicy: Parallel ++ {{- if typeIs "int" .Values.apps.jupyterhub.scheduling.userPlaceholder.revisionHistoryLimit }} ++ revisionHistoryLimit: {{ .Values.apps.jupyterhub.scheduling.userPlaceholder.revisionHistoryLimit }} ++ {{- end }} + replicas: {{ .Values.apps.jupyterhub.scheduling.userPlaceholder.replicas }} + selector: + matchLabels: +@@ -23,9 +26,16 @@ spec: + serviceName: {{ include "jupyterhub.user-placeholder.fullname" . }} + template: + metadata: ++ {{- with .Values.apps.jupyterhub.scheduling.userPlaceholder.annotations }} ++ annotations: ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + labels: + {{- /* Changes here will cause the Deployment to restart the pods. */}} + {{- include "jupyterhub.matchLabels" . | nindent 8 }} ++ {{- with .Values.apps.jupyterhub.scheduling.userPlaceholder.labels }} ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + spec: + {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} + priorityClassName: {{ include "jupyterhub.user-placeholder-priority.fullname" . }} +@@ -33,7 +43,10 @@ spec: + {{- if .Values.apps.jupyterhub.scheduling.userScheduler.enabled }} + schedulerName: {{ include "jupyterhub.user-scheduler.fullname" . }} + {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.singleuser.nodeSelector }} ++ {{- with .Values.apps.jupyterhub.singleuser.nodeSelector }} ++ nodeSelector: ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + {{- with concat .Values.apps.jupyterhub.scheduling.userPods.tolerations .Values.apps.jupyterhub.singleuser.extraTolerations }} + tolerations: + {{- . | toYaml | nindent 8 }} +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml +index ef8a37f..3e83b44 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml +@@ -6,16 +6,28 @@ metadata: + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + data: +- # ref: https://kubernetes.io/docs/reference/scheduling/config/ ++ {{- /* ++ This is configuration of a k8s official kube-scheduler binary running in the ++ user-scheduler. ++ ++ ref: https://kubernetes.io/docs/reference/scheduling/config/ ++ ref: https://kubernetes.io/docs/reference/config-api/kube-scheduler-config.v1/ ++ */}} + config.yaml: | +- apiVersion: kubescheduler.config.k8s.io/v1beta1 ++ apiVersion: kubescheduler.config.k8s.io/v1 + kind: KubeSchedulerConfiguration + leaderElection: +- resourceLock: endpoints ++ resourceLock: leases + resourceName: {{ include "jupyterhub.user-scheduler-lock.fullname" . }} + resourceNamespace: "{{ .Release.Namespace }}" + profiles: + - schedulerName: {{ include "jupyterhub.user-scheduler.fullname" . }} ++ {{- with .Values.apps.jupyterhub.scheduling.userScheduler.plugins }} + plugins: +- {{- .Values.apps.jupyterhub.scheduling.userScheduler.plugins | toYaml | nindent 10 }} ++ {{- . | toYaml | nindent 10 }} ++ {{- end }} ++ {{- with .Values.apps.jupyterhub.scheduling.userScheduler.pluginConfig }} ++ pluginConfig: ++ {{- . | toYaml | nindent 10 }} ++ {{- end }} + {{- end }} +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml +index 1bcaf31..f22d0de 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml +@@ -6,6 +6,9 @@ metadata: + labels: + {{- include "jupyterhub.labels" . | nindent 4 }} + spec: ++ {{- if typeIs "int" .Values.apps.jupyterhub.scheduling.userScheduler.revisionHistoryLimit }} ++ revisionHistoryLimit: {{ .Values.apps.jupyterhub.scheduling.userScheduler.revisionHistoryLimit }} ++ {{- end }} + replicas: {{ .Values.apps.jupyterhub.scheduling.userScheduler.replicas }} + selector: + matchLabels: +@@ -14,16 +17,25 @@ spec: + metadata: + labels: + {{- include "jupyterhub.matchLabels" . | nindent 8 }} ++ {{- with .Values.apps.jupyterhub.scheduling.userScheduler.labels }} ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + annotations: + checksum/config-map: {{ include (print $.Template.BasePath "/jupyterhub/scheduling/user-scheduler/configmap.yaml") . | sha256sum }} ++ {{- with .Values.apps.jupyterhub.scheduling.userScheduler.annotations }} ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + spec: +- {{- if .Values.apps.jupyterhub.rbac.enabled }} +- serviceAccountName: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} ++ {{ with include "jupyterhub.user-scheduler-serviceaccount.fullname" . }} ++ serviceAccountName: {{ . }} + {{- end }} + {{- if .Values.apps.jupyterhub.scheduling.podPriority.enabled }} + priorityClassName: {{ include "jupyterhub.priority.fullname" . }} + {{- end }} +- nodeSelector: {{ toJson .Values.apps.jupyterhub.scheduling.userScheduler.nodeSelector }} ++ {{- with .Values.apps.jupyterhub.scheduling.userScheduler.nodeSelector }} ++ nodeSelector: ++ {{- . | toYaml | nindent 8 }} ++ {{- end }} + {{- with concat .Values.apps.jupyterhub.scheduling.corePods.tolerations .Values.apps.jupyterhub.scheduling.userScheduler.tolerations }} + tolerations: + {{- . | toYaml | nindent 8 }} +@@ -44,13 +56,6 @@ spec: + {{- end }} + command: + - /usr/local/bin/kube-scheduler +- # NOTE: --leader-elect-... (new) and --lock-object-... (deprecated) +- # flags are silently ignored in favor of whats defined in the +- # passed KubeSchedulerConfiguration whenever --config is +- # passed. +- # +- # ref: https://kubernetes.io/docs/reference/command-line-tools-reference/kube-scheduler/ +- # + # NOTE: --authentication-skip-lookup=true is used to avoid a + # seemingly harmless error, if we need to not skip + # "authentication lookup" in the future, see the linked issue. +@@ -65,12 +70,14 @@ spec: + livenessProbe: + httpGet: + path: /healthz +- port: 10251 ++ scheme: HTTPS ++ port: 10259 + initialDelaySeconds: 15 + readinessProbe: + httpGet: + path: /healthz +- port: 10251 ++ scheme: HTTPS ++ port: 10259 + {{- with .Values.apps.jupyterhub.scheduling.userScheduler.resources }} + resources: + {{- . | toYaml | nindent 12 }} +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml +index 04f2af8..2c9c6de 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml +@@ -1,9 +1,5 @@ + {{- if and .Values.apps.jupyterhub.scheduling.userScheduler.enabled .Values.apps.jupyterhub.scheduling.userScheduler.pdb.enabled -}} +-{{- if .Capabilities.APIVersions.Has "policy/v1" }} + apiVersion: policy/v1 +-{{- else }} +-apiVersion: policy/v1beta1 +-{{- end }} + kind: PodDisruptionBudget + metadata: + name: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml +index 083e065..9c7fab7 100755 +--- a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml +@@ -1,16 +1,5 @@ + {{- if .Values.apps.jupyterhub.scheduling.userScheduler.enabled -}} +-{{- if .Values.apps.jupyterhub.rbac.enabled }} +-apiVersion: v1 +-kind: ServiceAccount +-metadata: +- name: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} +- labels: +- {{- include "jupyterhub.labels" . | nindent 4 }} +- {{- with .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.annotations }} +- annotations: +- {{- . | toYaml | nindent 4 }} +- {{- end }} +---- ++{{- if .Values.apps.jupyterhub.rbac.create -}} + kind: ClusterRole + apiVersion: rbac.authorization.k8s.io/v1 + metadata: +@@ -19,13 +8,23 @@ metadata: + {{- include "jupyterhub.labels" . | nindent 4 }} + rules: + # Copied from the system:kube-scheduler ClusterRole of the k8s version +- # matching the kube-scheduler binary we use. A modification of two resource +- # name references from kube-scheduler to user-scheduler-lock was made. ++ # matching the kube-scheduler binary we use. A modification has been made to ++ # resourceName fields to remain relevant for how we have named our resources ++ # in this Helm chart. + # +- # NOTE: These rules have been unchanged between 1.12 and 1.15, then changed in +- # 1.16 and in 1.17, but unchanged in 1.18 and 1.19. ++ # NOTE: These rules have been: ++ # - unchanged between 1.12 and 1.15 ++ # - changed in 1.16 ++ # - changed in 1.17 ++ # - unchanged between 1.18 and 1.20 ++ # - changed in 1.21: get/list/watch permission for namespace, ++ # csidrivers, csistoragecapacities was added. ++ # - unchanged between 1.22 and 1.27 ++ # - changed in 1.28: permissions to get/update lock endpoint resource ++ # removed ++ # - unchanged between 1.28 and 1.29 + # +- # ref: https://github.com/kubernetes/kubernetes/blob/v1.19.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L696-L829 ++ # ref: https://github.com/kubernetes/kubernetes/blob/v1.29.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L721-L862 + - apiGroups: + - "" + - events.k8s.io +@@ -50,21 +49,6 @@ rules: + verbs: + - get + - update +- - apiGroups: +- - "" +- resources: +- - endpoints +- verbs: +- - create +- - apiGroups: +- - "" +- resourceNames: +- - {{ include "jupyterhub.user-scheduler-lock.fullname" . }} +- resources: +- - endpoints +- verbs: +- - get +- - update + - apiGroups: + - "" + resources: +@@ -159,13 +143,37 @@ rules: + - get + - list + - watch ++ - apiGroups: ++ - "" ++ resources: ++ - namespaces ++ verbs: ++ - get ++ - list ++ - watch ++ - apiGroups: ++ - storage.k8s.io ++ resources: ++ - csidrivers ++ verbs: ++ - get ++ - list ++ - watch ++ - apiGroups: ++ - storage.k8s.io ++ resources: ++ - csistoragecapacities ++ verbs: ++ - get ++ - list ++ - watch + + # Copied from the system:volume-scheduler ClusterRole of the k8s version + # matching the kube-scheduler binary we use. + # +- # NOTE: These rules have not changed between 1.12 and 1.19. ++ # NOTE: These rules have not changed between 1.12 and 1.29. + # +- # ref: https://github.com/kubernetes/kubernetes/blob/v1.19.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L1213-L1240 ++ # ref: https://github.com/kubernetes/kubernetes/blob/v1.29.0/plugin/pkg/auth/authorizer/rbac/bootstrappolicy/testdata/cluster-roles.yaml#L1283-L1310 + - apiGroups: + - "" + resources: +@@ -203,7 +211,7 @@ metadata: + {{- include "jupyterhub.labels" . | nindent 4 }} + subjects: + - kind: ServiceAccount +- name: {{ include "jupyterhub.user-scheduler-deploy.fullname" . }} ++ name: {{ include "jupyterhub.user-scheduler-serviceaccount.fullname" . }} + namespace: "{{ .Release.Namespace }}" + roleRef: + kind: ClusterRole +diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml +new file mode 100644 +index 0000000..67618b0 +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/serviceaccount.yaml +@@ -0,0 +1,14 @@ ++{{- if .Values.apps.jupyterhub.scheduling.userScheduler.enabled -}} ++{{- if .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.create -}} ++apiVersion: v1 ++kind: ServiceAccount ++metadata: ++ name: {{ include "jupyterhub.user-scheduler-serviceaccount.fullname" . }} ++ labels: ++ {{- include "jupyterhub.labels" . | nindent 4 }} ++ {{- with .Values.apps.jupyterhub.scheduling.userScheduler.serviceAccount.annotations }} ++ annotations: ++ {{- . | toYaml | nindent 4 }} ++ {{- end }} ++{{- end }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml b/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml +index 3dfb137..931a150 100755 +--- a/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml ++++ b/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml +@@ -62,23 +62,38 @@ spec: + + egress: + # singleuser-server --> hub +- - ports: +- - port: 8081 +- to: ++ - to: + - podSelector: + matchLabels: + {{- $_ := merge (dict "componentLabel" "hub") . }} + {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8081 + +- # singleuser-server --> Kubernetes internal DNS +- - ports: +- - protocol: UDP +- port: 53 +- - protocol: TCP +- port: 53 ++ # singleuser-server --> proxy ++ # singleuser-server --> autohttps ++ # ++ # While not critical for core functionality, a user or library code may rely ++ # on communicating with the proxy or autohttps pods via a k8s Service it can ++ # detected from well known environment variables. ++ # ++ - to: ++ - podSelector: ++ matchLabels: ++ {{- $_ := merge (dict "componentLabel" "proxy") . }} ++ {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8000 ++ - to: ++ - podSelector: ++ matchLabels: ++ {{- $_ := merge (dict "componentLabel" "autohttps") . }} ++ {{- include "jupyterhub.matchLabels" $_ | nindent 14 }} ++ ports: ++ - port: 8080 ++ - port: 8443 + +- {{- with .Values.apps.jupyterhub.singleuser.networkPolicy.egress }} +- # singleuser-server --> depends, but the default is everything +- {{- . | toYaml | nindent 4 }} ++ {{- with (include "jupyterhub.networkPolicy.renderEgressRules" (list . .Values.apps.jupyterhub.singleuser.networkPolicy)) }} ++ {{- . | nindent 4 }} + {{- end }} + {{- end }} +diff --git a/applications/jupyterhub/deploy/templates/singleuser/secret.yaml b/applications/jupyterhub/deploy/templates/singleuser/secret.yaml +new file mode 100644 +index 0000000..e6eab9b +--- /dev/null ++++ b/applications/jupyterhub/deploy/templates/singleuser/secret.yaml +@@ -0,0 +1,17 @@ ++{{- if .Values.apps.jupyterhub.singleuser.extraFiles }} ++kind: Secret ++apiVersion: v1 ++metadata: ++ name: {{ include "jupyterhub.singleuser.fullname" . }} ++ labels: ++ {{- include "jupyterhub.labels" . | nindent 4 }} ++type: Opaque ++{{- with include "jupyterhub.extraFiles.data" .Values.apps.jupyterhub.singleuser.extraFiles }} ++data: ++ {{- . | nindent 2 }} ++{{- end }} ++{{- with include "jupyterhub.extraFiles.stringData" .Values.apps.jupyterhub.singleuser.extraFiles }} ++stringData: ++ {{- . | nindent 2 }} ++{{- end }} ++{{- end }} +diff --git a/applications/jupyterhub/deploy/values.schema.yaml b/applications/jupyterhub/deploy/values.schema.yaml +new file mode 100644 +index 0000000..69c13a8 +--- /dev/null ++++ b/applications/jupyterhub/deploy/values.schema.yaml +@@ -0,0 +1,3014 @@ ++# This schema (a jsonschema in YAML format) is used to generate ++# values.schema.json which is packaged with the Helm chart for client side ++# validation by helm of values before template rendering. ++# ++# This schema is also used by our documentation system to build the ++# configuration reference section based on the description fields. See ++# docs/source/conf.py for that logic! ++# ++# We look to document everything we have default values for in values.yaml, but ++# we don't look to enforce the perfect validation logic within this file. ++# ++# ref: https://json-schema.org/learn/getting-started-step-by-step.html ++# ++$schema: http://json-schema.org/draft-07/schema# ++type: object ++additionalProperties: false ++required: ++ - imagePullSecrets ++ - hub ++ - proxy ++ - singleuser ++ - ingress ++ - prePuller ++ - custom ++ - cull ++ - debug ++ - rbac ++ - global ++properties: ++ enabled: ++ type: [boolean, "null"] ++ description: | ++ `enabled` is ignored by the jupyterhub chart itself, but a chart depending ++ on the jupyterhub chart conditionally can make use this config option as ++ the condition. ++ fullnameOverride: ++ type: [string, "null"] ++ description: | ++ fullnameOverride and nameOverride allow you to adjust how the resources ++ part of the Helm chart are named. ++ ++ Name format | Resource types | fullnameOverride | nameOverride | Note ++ ------------------------- | -------------- | ---------------- | ------------ | - ++ component | namespaced | `""` | * | Default ++ release-component | cluster wide | `""` | * | Default ++ fullname-component | * | str | * | - ++ release-component | * | null | `""` | - ++ release-(name-)component | * | null | str | omitted if contained in release ++ release-(chart-)component | * | null | null | omitted if contained in release ++ ++ ```{admonition} Warning! ++ :class: warning ++ Changing fullnameOverride or nameOverride after the initial installation ++ of the chart isn't supported. Changing their values likely leads to a ++ reset of non-external JupyterHub databases, abandonment of users' storage, ++ and severed couplings to currently running user pods. ++ ``` ++ ++ If you are a developer of a chart depending on this chart, you should ++ avoid hardcoding names. If you want to reference the name of a resource in ++ this chart from a parent helm chart's template, you can make use of the ++ global named templates instead. ++ ++ ```yaml ++ # some pod definition of a parent chart helm template ++ schedulerName: {{ include "jupyterhub.user-scheduler.fullname" . }} ++ ``` ++ ++ To access them from a container, you can also rely on the hub ConfigMap ++ that contains entries of all the resource names. ++ ++ ```yaml ++ # some container definition in a parent chart helm template ++ env: ++ - name: SCHEDULER_NAME ++ valueFrom: ++ configMapKeyRef: ++ name: {{ include "jupyterhub.user-scheduler.fullname" . }} ++ key: user-scheduler ++ ``` ++ ++ nameOverride: ++ type: [string, "null"] ++ description: | ++ See the documentation under [`fullnameOverride`](schema_fullnameOverride). ++ ++ imagePullSecret: ++ type: object ++ required: [create] ++ if: ++ properties: ++ create: ++ const: true ++ then: ++ additionalProperties: false ++ required: [registry, username, password] ++ description: | ++ This is configuration to create a k8s Secret resource of `type: ++ kubernetes.io/dockerconfigjson`, with credentials to pull images from a ++ private image registry. If you opt to do so, it will be available for use ++ by all pods in their respective `spec.imagePullSecrets` alongside other ++ k8s Secrets defined in `imagePullSecrets` or the pod respective ++ `...image.pullSecrets` configuration. ++ ++ In other words, using this configuration option can automate both the ++ otherwise manual creation of a k8s Secret and the otherwise manual ++ configuration to reference this k8s Secret in all the pods of the Helm ++ chart. ++ ++ ```sh ++ # you won't need to create a k8s Secret manually... ++ kubectl create secret docker-registry image-pull-secret \ ++ --docker-server= \ ++ --docker-username= \ ++ --docker-email= \ ++ --docker-password= ++ ``` ++ ++ If you just want to let all Pods reference an existing secret, use the ++ [`imagePullSecrets`](schema_imagePullSecrets) configuration instead. ++ properties: ++ create: ++ type: boolean ++ description: | ++ Toggle the creation of the k8s Secret with provided credentials to ++ access a private image registry. ++ automaticReferenceInjection: ++ type: boolean ++ description: | ++ Toggle the automatic reference injection of the created Secret to all ++ pods' `spec.imagePullSecrets` configuration. ++ registry: ++ type: string ++ description: | ++ Name of the private registry you want to create a credential set for. ++ It will default to Docker Hub's image registry. ++ ++ Examples: ++ - https://index.docker.io/v1/ ++ - quay.io ++ - eu.gcr.io ++ - alexmorreale.privatereg.net ++ username: ++ type: string ++ description: | ++ Name of the user you want to use to connect to your private registry. ++ ++ For external gcr.io, you will use the `_json_key`. ++ ++ Examples: ++ - alexmorreale ++ - alex@pfc.com ++ - _json_key ++ password: ++ type: string ++ description: | ++ Password for the private image registry's user. ++ ++ Examples: ++ - plaintextpassword ++ - abc123SECRETzyx098 ++ ++ For gcr.io registries the password will be a big JSON blob for a ++ Google cloud service account, it should look something like below. ++ ++ ```yaml ++ password: |- ++ { ++ "type": "service_account", ++ "project_id": "jupyter-se", ++ "private_key_id": "f2ba09118a8d3123b3321bd9a7d6d0d9dc6fdb85", ++ ... ++ } ++ ``` ++ email: ++ type: [string, "null"] ++ description: | ++ Specification of an email is most often not required, but it is ++ supported. ++ ++ imagePullSecrets: ++ type: array ++ description: | ++ Chart wide configuration to _append_ k8s Secret references to all its ++ pod's `spec.imagePullSecrets` configuration. ++ ++ This will not override or get overridden by pod specific configuration, ++ but instead augment the pod specific configuration. ++ ++ You can use both the k8s native syntax, where each list element is like ++ `{"name": "my-secret-name"}`, or you can let list elements be strings ++ naming the secrets directly. ++ ++ hub: ++ type: object ++ additionalProperties: false ++ required: [baseUrl] ++ properties: ++ revisionHistoryLimit: &revisionHistoryLimit ++ type: [integer, "null"] ++ minimum: 0 ++ description: | ++ Configures the resource's `spec.revisionHistoryLimit`. This is ++ available for Deployment, StatefulSet, and DaemonSet resources. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/#revision-history-limit) ++ for more info. ++ config: ++ type: object ++ additionalProperties: true ++ description: | ++ JupyterHub and its components (authenticators, spawners, etc), are ++ Python classes that expose its configuration through ++ [_traitlets_](https://traitlets.readthedocs.io/en/stable/). With this ++ Helm chart configuration (`hub.config`), you can directly configure ++ the Python classes through _static_ YAML values. To _dynamically_ set ++ values, you need to use [`hub.extraConfig`](schema_hub.extraConfig) ++ instead. ++ ++ ```{admonition} Currently intended only for auth config ++ :class: warning ++ This config _currently_ (0.11.0) only influence the software in the ++ `hub` Pod, but some Helm chart config options such as ++ [`hub.baseUrl`](schema_hub.baseUrl) is used to set ++ `JupyterHub.base_url` in the `hub` Pod _and_ influence how other Helm ++ templates are rendered. ++ ++ As we have not yet mapped out all the potential configuration ++ conflicts except for the authentication related configuration options, ++ please accept that using it for something else at this point can lead ++ to issues. ++ ``` ++ ++ __Example__ ++ ++ If you inspect documentation or some `jupyterhub_config.py` to contain ++ the following section: ++ ++ ```python ++ c.JupyterHub.admin_access = true ++ c.JupyterHub.admin_users = ["jovyan1", "jovyan2"] ++ c.KubeSpawner.k8s_api_request_timeout = 10 ++ c.GitHubOAuthenticator.allowed_organizations = ["jupyterhub"] ++ ``` ++ ++ Then, you would be able to represent it with this configuration like: ++ ++ ```yaml ++ hub: ++ config: ++ JupyterHub: ++ admin_access: true ++ admin_users: ++ - jovyan1 ++ - jovyan2 ++ KubeSpawner: ++ k8s_api_request_timeout: 10 ++ GitHubOAuthenticator: ++ allowed_organizations: ++ - jupyterhub ++ ``` ++ ++ ```{admonition} YAML limitations ++ :class: tip ++ You can't represent Python `Bytes` or `Set` objects in YAML directly. ++ ``` ++ ++ ```{admonition} Helm value merging ++ :class: tip ++ `helm` merges a Helm chart's default values with values passed with ++ the `--values` or `-f` flag. During merging, lists are replaced while ++ dictionaries are updated. ++ ``` ++ extraFiles: &extraFiles ++ type: object ++ additionalProperties: false ++ description: | ++ A dictionary with extra files to be injected into the pod's container ++ on startup. This can for example be used to inject: configuration ++ files, custom user interface templates, images, and more. ++ ++ ```yaml ++ # NOTE: "hub" is used in this example, but the configuration is the ++ # same for "singleuser". ++ hub: ++ extraFiles: ++ # The file key is just a reference that doesn't influence the ++ # actual file name. ++ : ++ # mountPath is required and must be the absolute file path. ++ mountPath: ++ ++ # Choose one out of the three ways to represent the actual file ++ # content: data, stringData, or binaryData. ++ # ++ # data should be set to a mapping (dictionary). It will in the ++ # end be rendered to either YAML, JSON, or TOML based on the ++ # filename extension that are required to be either .yaml, .yml, ++ # .json, or .toml. ++ # ++ # If your content is YAML, JSON, or TOML, it can make sense to ++ # use data to represent it over stringData as data can be merged ++ # instead of replaced if set partially from separate Helm ++ # configuration files. ++ # ++ # Both stringData and binaryData should be set to a string ++ # representing the content, where binaryData should be the ++ # base64 encoding of the actual file content. ++ # ++ data: ++ myConfig: ++ myMap: ++ number: 123 ++ string: "hi" ++ myList: ++ - 1 ++ - 2 ++ stringData: | ++ hello world! ++ binaryData: aGVsbG8gd29ybGQhCg== ++ ++ # mode is by default 0644 and you can optionally override it ++ # either by octal notation (example: 0400) or decimal notation ++ # (example: 256). ++ mode: ++ ``` ++ ++ **Using --set-file** ++ ++ To avoid embedding entire files in the Helm chart configuration, you ++ can use the `--set-file` flag during `helm upgrade` to set the ++ stringData or binaryData field. ++ ++ ```yaml ++ hub: ++ extraFiles: ++ my_image: ++ mountPath: /usr/local/share/jupyterhub/static/my_image.png ++ ++ # Files in /usr/local/etc/jupyterhub/jupyterhub_config.d are ++ # automatically loaded in alphabetical order of the final file ++ # name when JupyterHub starts. ++ my_config: ++ mountPath: /usr/local/etc/jupyterhub/jupyterhub_config.d/my_jupyterhub_config.py ++ ``` ++ ++ ```bash ++ # --set-file expects a text based file, so you need to base64 encode ++ # it manually first. ++ base64 my_image.png > my_image.png.b64 ++ ++ helm upgrade <...> \ ++ --set-file hub.extraFiles.my_image.binaryData=./my_image.png.b64 \ ++ --set-file hub.extraFiles.my_config.stringData=./my_jupyterhub_config.py ++ ``` ++ ++ **Common uses** ++ ++ 1. **JupyterHub template customization** ++ ++ You can replace the default JupyterHub user interface templates in ++ the hub pod by injecting new ones to ++ `/usr/local/share/jupyterhub/templates`. These can in turn ++ reference custom images injected to ++ `/usr/local/share/jupyterhub/static`. ++ ++ 1. **JupyterHub standalone file config** ++ ++ Instead of embedding JupyterHub python configuration as a string ++ within a YAML file through ++ [`hub.extraConfig`](schema_hub.extraConfig), you can inject a ++ standalone .py file into ++ `/usr/local/etc/jupyterhub/jupyterhub_config.d` that is ++ automatically loaded. ++ ++ 1. **Flexible configuration** ++ ++ By injecting files, you don't have to embed them in a docker image ++ that you have to rebuild. ++ ++ If your configuration file is a YAML/JSON/TOML file, you can also ++ use `data` instead of `stringData` which allow you to set various ++ configuration in separate Helm config files. This can be useful to ++ help dependent charts override only some configuration part of the ++ file, or to allow for the configuration be set through multiple ++ Helm configuration files. ++ ++ **Limitations** ++ ++ 1. File size ++ ++ The files in `hub.extraFiles` and `singleuser.extraFiles` are ++ respectively stored in their own k8s Secret resource. As k8s ++ Secret's are limited, typically to 1MB, you will be limited to a ++ total file size of less than 1MB as there is also base64 encoding ++ that takes place reducing available capacity to 75%. ++ ++ 2. File updates ++ ++ The files that are mounted are only set during container startup. ++ This is [because we use ++ `subPath`](https://kubernetes.io/docs/concepts/storage/volumes/#secret) ++ as is required to avoid replacing the content of the entire ++ directory we mount in. ++ patternProperties: ++ ".*": ++ type: object ++ additionalProperties: false ++ required: [mountPath] ++ oneOf: ++ - required: [data] ++ - required: [stringData] ++ - required: [binaryData] ++ properties: ++ mountPath: ++ type: string ++ data: ++ type: object ++ additionalProperties: true ++ stringData: ++ type: string ++ binaryData: ++ type: string ++ mode: ++ type: number ++ baseUrl: ++ type: string ++ description: | ++ This is the equivalent of c.JupyterHub.base_url, but it is also needed ++ by the Helm chart in general. So, instead of setting ++ c.JupyterHub.base_url, use this configuration. ++ command: ++ type: array ++ description: | ++ A list of strings to be used to replace the JupyterHub image's ++ `ENTRYPOINT` entry. Note that in k8s lingo, the Dockerfile's ++ `ENTRYPOINT` is called `command`. The list of strings will be expanded ++ with Helm's template function `tpl` which can render Helm template ++ logic inside curly braces (`{{... }}`). ++ ++ This could be useful to wrap the invocation of JupyterHub itself in ++ some custom way. ++ ++ For more details, see the [Kubernetes ++ documentation](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/). ++ args: ++ type: array ++ description: | ++ A list of strings to be used to replace the JupyterHub image's `CMD` ++ entry as well as the Helm chart's default way to start JupyterHub. ++ Note that in k8s lingo, the Dockerfile's `CMD` is called `args`. The ++ list of strings will be expanded with Helm's template function `tpl` ++ which can render Helm template logic inside curly braces (`{{... }}`). ++ ++ ```{warning} ++ By replacing the entire configuration file, which is mounted to ++ `/usr/local/etc/jupyterhub/jupyterhub_config.py` by the Helm chart, ++ instead of appending to it with `hub.extraConfig`, you expose your ++ deployment for issues stemming from getting out of sync with the Helm ++ chart's config file. ++ ++ These kind of issues will be significantly harder to debug and ++ diagnose, and can due to this could cause a lot of time expenditure ++ for both the community maintaining the Helm chart as well as yourself, ++ even if this wasn't the reason for the issue. ++ ++ Due to this, we ask that you do your _absolute best to avoid replacing ++ the default provided `jupyterhub_config.py` file. It can often be ++ possible. For example, if your goal is to have a dedicated .py file ++ for more extensive additions that you can syntax highlight and such ++ and feel limited by passing code in `hub.extraConfig` which is part of ++ a YAML file, you can use [this ++ trick](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/issues/1580#issuecomment-707776237) ++ instead. ++ ``` ++ ++ ```yaml ++ hub: ++ args: ++ - "jupyterhub" ++ - "--config" ++ - "/usr/local/etc/jupyterhub/jupyterhub_config.py" ++ - "--debug" ++ - "--upgrade-db" ++ ``` ++ ++ For more details, see the [Kubernetes ++ documentation](https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/). ++ cookieSecret: ++ type: [string, "null"] ++ description: | ++ ```{note} ++ As of version 1.0.0 this will automatically be generated and there is ++ no need to set it manually. ++ ++ If you wish to reset a generated key, you can use `kubectl edit` on ++ the k8s Secret typically named `hub` and remove the ++ `hub.config.JupyterHub.cookie_secret` entry in the k8s Secret, then ++ perform a new `helm upgrade`. ++ ``` ++ ++ A 32-byte cryptographically secure randomly generated string used to sign values of ++ secure cookies set by the hub. If unset, jupyterhub will generate one on startup and ++ save it in the file `jupyterhub_cookie_secret` in the `/srv/jupyterhub` directory of ++ the hub container. A value set here will make JupyterHub overwrite any previous file. ++ ++ You do not need to set this at all if you are using the default configuration for ++ storing databases - sqlite on a persistent volume (with `hub.db.type` set to the ++ default `sqlite-pvc`). If you are using an external database, then you must set this ++ value explicitly - or your users will keep getting logged out each time the hub pod ++ restarts. ++ ++ Changing this value will all user logins to be invalidated. If this secret leaks, ++ *immediately* change it to something else, or user data can be compromised ++ ++ ```sh ++ # to generate a value, run ++ openssl rand -hex 32 ++ ``` ++ image: &image-spec ++ type: object ++ additionalProperties: false ++ required: [name, tag] ++ description: | ++ Set custom image name, tag, pullPolicy, or pullSecrets for the pod. ++ properties: ++ name: ++ type: string ++ description: | ++ The name of the image, without the tag. ++ ++ ``` ++ # example name ++ gcr.io/my-project/my-image ++ ``` ++ tag: ++ type: string ++ description: | ++ The tag of the image to pull. This is the value following `:` in ++ complete image specifications. ++ ++ ``` ++ # example tags ++ v1.11.1 ++ zhy270a ++ ``` ++ pullPolicy: ++ enum: [null, "", IfNotPresent, Always, Never] ++ description: | ++ Configures the Pod's `spec.imagePullPolicy`. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/containers/images/#updating-images) ++ for more info. ++ pullSecrets: ++ type: array ++ description: | ++ A list of references to existing Kubernetes Secrets with ++ credentials to pull the image. ++ ++ This Pod's final `imagePullSecrets` k8s specification will be a ++ combination of: ++ ++ 1. This list of k8s Secrets, specific for this pod. ++ 2. The list of k8s Secrets, for use by all pods in the Helm chart, ++ declared in this Helm charts configuration called ++ `imagePullSecrets`. ++ 3. A k8s Secret, for use by all pods in the Helm chart, if ++ conditionally created from image registry credentials provided ++ under `imagePullSecret` if `imagePullSecret.create` is set to ++ true. ++ ++ ```yaml ++ # example - k8s native syntax ++ pullSecrets: ++ - name: my-k8s-secret-with-image-registry-credentials ++ ++ # example - simplified syntax ++ pullSecrets: ++ - my-k8s-secret-with-image-registry-credentials ++ ``` ++ networkPolicy: &networkPolicy-spec ++ type: object ++ additionalProperties: false ++ description: | ++ This configuration regards the creation and configuration of a k8s ++ _NetworkPolicy resource_. ++ properties: ++ enabled: ++ type: boolean ++ description: | ++ Toggle the creation of the NetworkPolicy resource targeting this ++ pod, and by doing so, restricting its communication to only what ++ is explicitly allowed in the NetworkPolicy. ++ ingress: ++ type: array ++ description: | ++ Additional ingress rules to add besides those that are required ++ for core functionality. ++ egress: ++ type: array ++ description: | ++ Additional egress rules to add besides those that are required for ++ core functionality and those added via ++ [`.egressAllowRules`](schema_hub.networkPolicy.egressAllowRules). ++ ++ ```{versionchanged} 2.0.0 ++ The default value changed from providing one very permissive rule ++ allowing all egress to providing no rule. The permissive rule is ++ still provided via ++ [`.egressAllowRules`](schema_hub.networkPolicy.egressAllowRules) ++ set to true though. ++ ``` ++ ++ As an example, below is a configuration that disables the more ++ broadly permissive `.privateIPs` egress allow rule for the hub ++ pod, and instead provides tightly scoped permissions to access a ++ specific k8s local service as identified by pod labels. ++ ++ ```yaml ++ hub: ++ networkPolicy: ++ egressAllowRules: ++ privateIPs: false ++ egress: ++ - to: ++ - podSelector: ++ matchLabels: ++ app: my-k8s-local-service ++ ports: ++ - protocol: TCP ++ port: 5978 ++ ``` ++ egressAllowRules: ++ type: object ++ additionalProperties: false ++ description: | ++ This is a set of predefined rules that when enabled will be added ++ to the NetworkPolicy list of egress rules. ++ ++ The resulting egress rules will be a composition of: ++ - rules specific for the respective pod(s) function within the ++ Helm chart ++ - rules based on enabled `egressAllowRules` flags ++ - rules explicitly specified by the user ++ ++ ```{note} ++ Each flag under this configuration will not render into a ++ dedicated rule in the NetworkPolicy resource, but instead combine ++ with the other flags to a reduced set of rules to avoid a ++ performance penalty. ++ ``` ++ ++ ```{versionadded} 2.0.0 ++ ``` ++ properties: ++ cloudMetadataServer: ++ type: boolean ++ description: | ++ Defaults to `false` for singleuser servers, but to `true` for ++ all other network policies. ++ ++ When enabled this rule allows the respective pod(s) to ++ establish outbound connections to the cloud metadata server. ++ ++ Note that the `nonPrivateIPs` rule is allowing all non Private ++ IP ranges but makes an exception for the cloud metadata ++ server, leaving this as the definitive configuration to allow ++ access to the cloud metadata server. ++ ++ ```{versionchanged} 3.0.0 ++ This configuration is not allowed to be configured true at the ++ same time as ++ [`singleuser.cloudMetadata.blockWithIptables`](schema_singleuser.cloudMetadata.blockWithIptables) ++ to avoid an ambiguous configuration. ++ ``` ++ dnsPortsCloudMetadataServer: ++ type: boolean ++ description: | ++ Defaults to `true` for all network policies. ++ ++ When enabled this rule allows the respective pod(s) to ++ establish outbound connections to the cloud metadata server ++ via port 53. ++ ++ Relying on this rule for the singleuser config should go hand ++ in hand with disabling ++ [`singleuser.cloudMetadata.blockWithIptables`](schema_singleuser.cloudMetadata.blockWithIptables) ++ to avoid an ambiguous configuration. ++ ++ Known situations when this rule can be relevant: ++ ++ - In GKE clusters with Cloud DNS that is reached at the ++ cloud metadata server's non-private IP. ++ ++ ```{note} ++ This chart doesn't know how to identify the DNS server that ++ pods will rely on due to variations between how k8s clusters ++ have been setup. Due to that, multiple rules are enabled by ++ default to ensure DNS connectivity. ++ ``` ++ ++ ```{versionadded} 3.0.0 ++ ``` ++ dnsPortsKubeSystemNamespace: ++ type: boolean ++ description: | ++ Defaults to `true` for all network policies. ++ ++ When enabled this rule allows the respective pod(s) to ++ establish outbound connections to pods in the kube-system ++ namespace via port 53. ++ ++ Known situations when this rule can be relevant: ++ ++ - GKE, EKS, AKS, and other clusters relying directly on ++ `kube-dns` or `coredns` pods in the `kube-system` namespace. ++ ++ ```{note} ++ This chart doesn't know how to identify the DNS server that ++ pods will rely on due to variations between how k8s clusters ++ have been setup. Due to that, multiple rules are enabled by ++ default to ensure DNS connectivity. ++ ``` ++ ++ ```{versionadded} 3.0.0 ++ ``` ++ dnsPortsPrivateIPs: ++ type: boolean ++ description: | ++ Defaults to `true` for all network policies. ++ ++ When enabled this rule allows the respective pod(s) to ++ establish outbound connections to private IPs via port 53. ++ ++ Known situations when this rule can be relevant: ++ ++ - GKE clusters relying on a DNS server indirectly via a a node ++ local DNS cache at an unknown private IP. ++ ++ ```{note} ++ This chart doesn't know how to identify the DNS server that ++ pods will rely on due to variations between how k8s clusters ++ have been setup. Due to that, multiple rules are enabled by ++ default to ensure DNS connectivity. ++ ++ ```{warning} ++ This rule is not expected to work in clusters relying on ++ Cilium to enforce the NetworkPolicy rules (includes GKE ++ clusters with Dataplane v2), this is due to a [known ++ limitation](https://github.com/cilium/cilium/issues/9209). ++ ``` ++ nonPrivateIPs: ++ type: boolean ++ description: | ++ Defaults to `true` for all network policies. ++ ++ When enabled this rule allows the respective pod(s) to ++ establish outbound connections to the non-private IP ranges ++ with the exception of the cloud metadata server. This means ++ respective pod(s) can establish connections to the internet ++ but not (say) an unsecured prometheus server running in the ++ same cluster. ++ privateIPs: ++ type: boolean ++ description: | ++ Defaults to `false` for singleuser servers, but to `true` for ++ all other network policies. ++ ++ Private IPs refer to the IP ranges `10.0.0.0/8`, ++ `172.16.0.0/12`, `192.168.0.0/16`. ++ ++ When enabled this rule allows the respective pod(s) to ++ establish outbound connections to the internal k8s cluster. ++ This means users can access the internet but not (say) an ++ unsecured prometheus server running in the same cluster. ++ ++ Since not all workloads in the k8s cluster may have ++ NetworkPolicies setup to restrict their incoming connections, ++ having this set to false can be a good defense against ++ malicious intent from someone in control of software in these ++ pods. ++ ++ If possible, try to avoid setting this to true as it gives ++ broad permissions that could be specified more directly via ++ the [`.egress`](schema_singleuser.networkPolicy.egress). ++ ++ ```{warning} ++ This rule is not expected to work in clusters relying on ++ Cilium to enforce the NetworkPolicy rules (includes GKE ++ clusters with Dataplane v2), this is due to a [known ++ limitation](https://github.com/cilium/cilium/issues/9209). ++ ``` ++ interNamespaceAccessLabels: ++ enum: [accept, ignore] ++ description: | ++ This configuration option determines if both namespaces and pods ++ in other namespaces, that have specific access labels, should be ++ accepted to allow ingress (set to `accept`), or, if the labels are ++ to be ignored when applied outside the local namespace (set to ++ `ignore`). ++ ++ The available access labels for respective NetworkPolicy resources ++ are: ++ ++ - `hub.jupyter.org/network-access-hub: "true"` (hub) ++ - `hub.jupyter.org/network-access-proxy-http: "true"` (proxy.chp, proxy.traefik) ++ - `hub.jupyter.org/network-access-proxy-api: "true"` (proxy.chp) ++ - `hub.jupyter.org/network-access-singleuser: "true"` (singleuser) ++ allowedIngressPorts: ++ type: array ++ description: | ++ A rule to allow ingress on these ports will be added no matter ++ what the origin of the request is. The default setting for ++ `proxy.chp` and `proxy.traefik`'s networkPolicy configuration is ++ `[http, https]`, while it is `[]` for other networkPolicies. ++ ++ Note that these port names or numbers target a Pod's port name or ++ number, not a k8s Service's port name or number. ++ db: ++ type: object ++ additionalProperties: false ++ properties: ++ type: ++ enum: [sqlite-pvc, sqlite-memory, mysql, postgres, other] ++ description: | ++ Type of database backend to use for the hub database. ++ ++ The Hub requires a persistent database to function, and this lets you specify ++ where it should be stored. ++ ++ The various options are: ++ ++ 1. **sqlite-pvc** ++ ++ Use an `sqlite` database kept on a persistent volume attached to the hub. ++ ++ By default, this disk is created by the cloud provider using ++ *dynamic provisioning* configured by a [storage ++ class](https://kubernetes.io/docs/concepts/storage/storage-classes/). ++ You can customize how this disk is created / attached by ++ setting various properties under `hub.db.pvc`. ++ ++ This is the default setting, and should work well for most cloud provider ++ deployments. ++ ++ 2. **sqlite-memory** ++ ++ Use an in-memory `sqlite` database. This should only be used for testing, ++ since the database is erased whenever the hub pod restarts - causing the hub ++ to lose all memory of users who had logged in before. ++ ++ When using this for testing, make sure you delete all other objects that the ++ hub has created (such as user pods, user PVCs, etc) every time the hub restarts. ++ Otherwise you might run into errors about duplicate resources. ++ ++ 3. **mysql** ++ ++ Use an externally hosted mysql database. ++ ++ You have to specify an sqlalchemy connection string for the mysql database you ++ want to connect to in `hub.db.url` if using this option. ++ ++ The general format of the connection string is: ++ ``` ++ mysql+pymysql://:@:/ ++ ``` ++ ++ The user specified in the connection string must have the rights to create ++ tables in the database specified. ++ ++ 4. **postgres** ++ ++ Use an externally hosted postgres database. ++ ++ You have to specify an sqlalchemy connection string for the postgres database you ++ want to connect to in `hub.db.url` if using this option. ++ ++ The general format of the connection string is: ++ ``` ++ postgresql+psycopg2://:@:/ ++ ``` ++ ++ The user specified in the connection string must have the rights to create ++ tables in the database specified. ++ ++ 5. **other** ++ ++ Use an externally hosted database of some kind other than mysql ++ or postgres. ++ ++ When using _other_, the database password must be passed as ++ part of [hub.db.url](schema_hub.db.url) as ++ [hub.db.password](schema_hub.db.password) will be ignored. ++ pvc: ++ type: object ++ additionalProperties: false ++ required: [storage] ++ description: | ++ Customize the Persistent Volume Claim used when `hub.db.type` is `sqlite-pvc`. ++ properties: ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: &labels-and-annotations-patternProperties ++ ".*": ++ type: string ++ description: | ++ Annotations to apply to the PVC containing the sqlite database. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) ++ for more details about annotations. ++ selector: ++ type: object ++ additionalProperties: true ++ description: | ++ Label selectors to set for the PVC containing the sqlite database. ++ ++ Useful when you are using a specific PV, and want to bind to ++ that and only that. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistentvolumeclaims) ++ for more details about using a label selector for what PV to ++ bind to. ++ storage: ++ type: string ++ description: | ++ Size of disk to request for the database disk. ++ accessModes: ++ type: array ++ items: ++ type: [string, "null"] ++ description: | ++ AccessModes contains the desired access modes the volume ++ should have. See [the k8s ++ documentation](https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1) ++ for more information. ++ storageClassName: ++ type: [string, "null"] ++ description: | ++ Name of the StorageClass required by the claim. ++ ++ If this is a blank string it will be set to a blank string, ++ while if it is null, it will not be set at all. ++ subPath: ++ type: [string, "null"] ++ description: | ++ Path within the volume from which the container's volume ++ should be mounted. Defaults to "" (volume's root). ++ upgrade: ++ type: [boolean, "null"] ++ description: | ++ Users with external databases need to opt-in for upgrades of the ++ JupyterHub specific database schema if needed as part of a ++ JupyterHub version upgrade. ++ url: ++ type: [string, "null"] ++ description: | ++ Connection string when `hub.db.type` is mysql or postgres. ++ ++ See documentation for `hub.db.type` for more details on the format of this property. ++ password: ++ type: [string, "null"] ++ description: | ++ Password for the database when `hub.db.type` is mysql or postgres. ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra labels to add to the hub pod. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) ++ to learn more about labels. ++ initContainers: ++ type: array ++ description: | ++ list of initContainers to be run with hub pod. See [Kubernetes Docs](https://kubernetes.io/docs/concepts/workloads/pods/init-containers/) ++ ++ ```yaml ++ hub: ++ initContainers: ++ - name: init-myservice ++ image: busybox:1.28 ++ command: ['sh', '-c', 'command1'] ++ - name: init-mydb ++ image: busybox:1.28 ++ command: ['sh', '-c', 'command2'] ++ ``` ++ extraEnv: ++ type: [object, array] ++ additionalProperties: true ++ description: | ++ Extra environment variables that should be set for the hub pod. ++ ++ Environment variables are usually used to: ++ - Pass parameters to some custom code in `hub.extraConfig`. ++ - Configure code running in the hub pod, such as an authenticator or ++ spawner. ++ ++ String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which ++ is a part of Kubernetes. ++ ++ ```yaml ++ hub: ++ extraEnv: ++ # basic notation (for literal values only) ++ MY_ENV_VARS_NAME1: "my env var value 1" ++ ++ # explicit notation (the "name" field takes precedence) ++ HUB_NAMESPACE: ++ name: HUB_NAMESPACE ++ valueFrom: ++ fieldRef: ++ fieldPath: metadata.namespace ++ ++ # implicit notation (the "name" field is implied) ++ PREFIXED_HUB_NAMESPACE: ++ value: "my-prefix-$(HUB_NAMESPACE)" ++ SECRET_VALUE: ++ valueFrom: ++ secretKeyRef: ++ name: my-k8s-secret ++ key: password ++ ``` ++ ++ For more information, see the [Kubernetes EnvVar ++ specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). ++ extraConfig: ++ type: object ++ additionalProperties: true ++ description: | ++ Arbitrary extra python based configuration that should be in `jupyterhub_config.py`. ++ ++ This is the *escape hatch* - if you want to configure JupyterHub to do something specific ++ that is not present here as an option, you can write the raw Python to do it here. ++ ++ extraConfig is a *dict*, so there can be multiple configuration ++ snippets under different names. The configuration sections are run in ++ alphabetical order based on the keys. ++ ++ Non-exhaustive examples of things you can do here: ++ - Subclass authenticator / spawner to do a custom thing ++ - Dynamically launch different images for different sets of images ++ - Inject an auth token from GitHub authenticator into user pod ++ - Anything else you can think of! ++ ++ Since this is usually a multi-line string, you want to format it using YAML's ++ [| operator](https://yaml.org/spec/1.2.2/#23-scalars). ++ ++ For example: ++ ++ ```yaml ++ hub: ++ extraConfig: ++ myConfig.py: | ++ c.JupyterHub.something = 'something' ++ c.Spawner.something_else = 'something else' ++ ``` ++ ++ ```{note} ++ No code validation is performed until JupyterHub loads it! If you make ++ a typo here, it will probably manifest itself as the hub pod failing ++ to start up and instead entering an `Error` state or the subsequent ++ `CrashLoopBackoff` state. ++ ++ To make use of your own programs linters etc, it would be useful to ++ not embed Python code inside a YAML file. To do that, consider using ++ [`hub.extraFiles`](schema_hub.extraFiles) and mounting a file to ++ `/usr/local/etc/jupyterhub/jupyterhub_config.d` in order to load your ++ extra configuration logic. ++ ``` ++ ++ fsGid: ++ type: [integer, "null"] ++ minimum: 0 ++ # This schema entry is needed to help us print a more helpful error ++ # message in NOTES.txt if hub.fsGid is set. ++ # ++ description: | ++ ```{note} ++ Removed in version 2.0.0. Use ++ [`hub.podSecurityContext`](schema_hub.podSecurityContext) and specify ++ `fsGroup` instead. ++ ``` ++ service: ++ type: object ++ additionalProperties: false ++ description: | ++ Object to configure the service the JupyterHub will be exposed on by the Kubernetes server. ++ properties: ++ type: ++ enum: [ClusterIP, NodePort, LoadBalancer, ExternalName] ++ description: | ++ The Kubernetes ServiceType to be used. ++ ++ The default type is `ClusterIP`. ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types) ++ to learn more about service types. ++ ports: ++ type: object ++ additionalProperties: false ++ description: | ++ Object to configure the ports the hub service will be deployed on. ++ properties: ++ nodePort: ++ type: [integer, "null"] ++ minimum: 0 ++ description: | ++ The nodePort to deploy the hub service on. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Kubernetes annotations to apply to the hub service. ++ extraPorts: ++ type: array ++ description: | ++ Extra ports to add to the Hub Service object besides `hub` / `8081`. ++ This should be an array that includes `name`, `port`, and `targetPort`. ++ See [Multi-port Services](https://kubernetes.io/docs/concepts/services-networking/service/#multi-port-services) for more details. ++ loadBalancerIP: ++ type: [string, "null"] ++ description: | ++ A public IP address the hub Kubernetes service should be exposed ++ on. To expose the hub directly is not recommended. Instead route ++ traffic through the proxy-public service towards the hub. ++ ++ pdb: &pdb-spec ++ type: object ++ additionalProperties: false ++ description: | ++ Configure a PodDisruptionBudget for this Deployment. ++ ++ These are disabled by default for our deployments that don't support ++ being run in parallel with multiple replicas. Only the user-scheduler ++ currently supports being run in parallel with multiple replicas. If ++ they are enabled for a Deployment with only one replica, they will ++ block `kubectl drain` of a node for example. ++ ++ Note that if you aim to block scaling down a node with the ++ hub/proxy/autohttps pod that would cause disruptions of the ++ deployment, then you should instead annotate the pods of the ++ Deployment [as described ++ here](https://github.com/kubernetes/autoscaler/blob/HEAD/cluster-autoscaler/FAQ.md#what-types-of-pods-can-prevent-ca-from-removing-a-node). ++ ++ "cluster-autoscaler.kubernetes.io/safe-to-evict": "false" ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/workloads/pods/disruptions/) ++ for more details about disruptions. ++ properties: ++ enabled: ++ type: boolean ++ description: | ++ Decides if a PodDisruptionBudget is created targeting the ++ Deployment's pods. ++ maxUnavailable: ++ type: [integer, "null"] ++ description: | ++ The maximum number of pods that can be unavailable during ++ voluntary disruptions. ++ minAvailable: ++ type: [integer, "null"] ++ description: | ++ The minimum number of pods required to be available during ++ voluntary disruptions. ++ existingSecret: ++ type: [string, "null"] ++ description: | ++ This option allow you to provide the name of an existing k8s Secret to ++ use alongside of the chart managed k8s Secret. The content of this k8s ++ Secret will be merged with the chart managed k8s Secret, giving ++ priority to the self-managed k8s Secret. ++ ++ ```{warning} ++ 1. The self managed k8s Secret must mirror the structure in the chart ++ managed secret. ++ 2. [`proxy.secretToken`](schema_proxy.secretToken) (aka. ++ `hub.config.ConfigurableHTTPProxy.auth_token`) is only read from ++ the chart managed k8s Secret. ++ ``` ++ nodeSelector: &nodeSelector-spec ++ type: object ++ additionalProperties: true ++ description: | ++ An object with key value pairs representing labels. K8s Nodes are ++ required to have match all these labels for this Pod to scheduled on ++ them. ++ ++ ```yaml ++ disktype: ssd ++ nodetype: awesome ++ ``` ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#nodeselector) ++ for more details. ++ tolerations: &tolerations-spec ++ type: array ++ description: | ++ Tolerations allow a pod to be scheduled on nodes with taints. These ++ tolerations are additional tolerations to the tolerations common to ++ all pods of a their respective kind ++ ([scheduling.corePods.tolerations](schema_scheduling.corePods.tolerations), ++ [scheduling.userPods.tolerations](schema_scheduling.userPods.tolerations)). ++ ++ Pass this field an array of ++ [`Toleration`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#toleration-v1-core) ++ objects. ++ ++ See the [Kubernetes ++ docs](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/) ++ for more info. ++ activeServerLimit: ++ type: [integer, "null"] ++ description: &jupyterhub-native-config-description | ++ JupyterHub native configuration, see the [JupyterHub ++ documentation](https://jupyterhub.readthedocs.io/en/stable/reference/api/app.html) ++ for more information. ++ allowNamedServers: ++ type: [boolean, "null"] ++ description: *jupyterhub-native-config-description ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ K8s annotations for the hub pod. ++ authenticatePrometheus: ++ type: [boolean, "null"] ++ description: *jupyterhub-native-config-description ++ concurrentSpawnLimit: ++ type: [integer, "null"] ++ description: *jupyterhub-native-config-description ++ consecutiveFailureLimit: ++ type: [integer, "null"] ++ description: *jupyterhub-native-config-description ++ podSecurityContext: &podSecurityContext-spec ++ additionalProperties: true ++ description: | ++ A k8s native specification of the pod's security context, see [the ++ documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#podsecuritycontext-v1-core) ++ for details. ++ containerSecurityContext: &containerSecurityContext-spec ++ type: object ++ additionalProperties: true ++ description: | ++ A k8s native specification of the container's security context, see [the ++ documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#securitycontext-v1-core) ++ for details. ++ deploymentStrategy: ++ type: object ++ additionalProperties: false ++ properties: ++ rollingUpdate: ++ type: [string, "null"] ++ type: ++ type: [string, "null"] ++ description: | ++ JupyterHub does not support running in parallel, due to this we ++ default to using a deployment strategy of Recreate. ++ extraContainers: &extraContainers-spec ++ type: array ++ description: | ++ Additional containers for the Pod. Use a k8s native syntax. ++ extraVolumeMounts: &extraVolumeMounts-spec ++ type: array ++ description: | ++ Additional volume mounts for the Container. Use a k8s native syntax. ++ extraVolumes: &extraVolumes-spec ++ type: array ++ description: | ++ Additional volumes for the Pod. Use a k8s native syntax. ++ livenessProbe: &probe-spec ++ type: object ++ additionalProperties: true ++ required: [enabled] ++ if: ++ properties: ++ enabled: ++ const: true ++ then: ++ description: | ++ This config option is like the k8s native specification of a ++ container probe, except that it also supports an `enabled` boolean ++ flag. ++ ++ See [the k8s ++ documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#probe-v1-core) ++ for more details. ++ readinessProbe: *probe-spec ++ namedServerLimitPerUser: ++ type: [integer, "null"] ++ description: *jupyterhub-native-config-description ++ redirectToServer: ++ type: [boolean, "null"] ++ description: *jupyterhub-native-config-description ++ resources: &resources-spec ++ type: object ++ additionalProperties: true ++ description: | ++ A k8s native specification of resources, see [the ++ documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#resourcerequirements-v1-core). ++ lifecycle: &lifecycle-spec ++ type: object ++ additionalProperties: false ++ description: | ++ A k8s native specification of lifecycle hooks on the container, see [the ++ documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#lifecycle-v1-core). ++ properties: ++ postStart: ++ type: object ++ additionalProperties: true ++ preStop: ++ type: object ++ additionalProperties: true ++ services: ++ type: object ++ additionalProperties: true ++ description: | ++ This is where you register JupyterHub services. For details on how to ++ configure these services in this Helm chart just keep reading but for ++ details on services themselves instead read [JupyterHub's ++ documentation](https://jupyterhub.readthedocs.io/en/stable/reference/api/service.html). ++ ++ ```{note} ++ Only a selection of JupyterHub's configuration options that can be ++ configured for a service are documented below. All configuration set ++ here will be applied even if this Helm chart doesn't recognize it. ++ ``` ++ ++ JupyterHub's native configuration accepts a list of service objects, ++ this Helm chart only accept a dictionary where each key represents the ++ name of a service and the value is the actual service objects. ++ ++ When configuring JupyterHub services via this Helm chart, the `name` ++ field can be omitted as it can be implied by the dictionary key. ++ Further, the `api_token` field can be omitted as it will be ++ automatically generated as of version 1.1.0 of this Helm chart. ++ ++ If you have an external service that needs to access the automatically ++ generated api_token for the service, you can access it from the `hub` ++ k8s Secret part of this Helm chart under the key ++ `hub.services.my-service-config-key.apiToken`. ++ ++ Here is an example configuration of two services where the first ++ explicitly sets a name and api_token, while the second omits those and ++ lets the name be implied from the key name and the api_token be ++ automatically generated. ++ ++ ```yaml ++ hub: ++ services: ++ my-service-1: ++ admin: true ++ name: my-explicitly-set-service-name ++ api_token: my-explicitly-set-api_token ++ ++ # the name of the following service will be my-service-2 ++ # the api_token of the following service will be generated ++ my-service-2: {} ++ ``` ++ ++ If you develop a Helm chart depending on the JupyterHub Helm chart and ++ want to let some Pod's environment variable be populated with the ++ api_token of a service registered like above, then do something along ++ these lines. ++ ++ ```yaml ++ # ... container specification of a pod ... ++ env: ++ - name: MY_SERVICE_1_API_TOKEN ++ valueFrom: ++ secretKeyRef: ++ # Don't hardcode the name, use the globally accessible ++ # named templates part of the JupyterHub Helm chart. ++ name: {{ include "jupyterhub.hub.fullname" . }} ++ # Note below the use of the configuration key my-service-1 ++ # rather than the explicitly set service name. ++ key: hub.services.my-service-1.apiToken ++ ``` ++ properties: ++ name: ++ type: string ++ description: | ++ The name can be implied via the key name under which this ++ service is configured, and is due to that allowed to be ++ omitted in this Helm chart configuration of JupyterHub. ++ admin: ++ type: boolean ++ command: ++ type: [string, array] ++ url: ++ type: string ++ api_token: ++ type: [string, "null"] ++ description: | ++ The api_token will be automatically generated if not ++ explicitly set. It will also be exposed in via a k8s Secret ++ part of this Helm chart under a specific key. ++ ++ See the documentation under ++ [`hub.services`](schema_hub.services) for details about this. ++ apiToken: ++ type: [string, "null"] ++ description: | ++ An alias for api_token provided for backward compatibility by ++ the JupyterHub Helm chart that will be transformed to ++ api_token. ++ loadRoles: ++ type: object ++ additionalProperties: true ++ description: | ++ This is where you should define JupyterHub roles and apply them to ++ JupyterHub users, groups, and services to grant them additional ++ permissions as defined in JupyterHub's RBAC system. ++ ++ Complement this documentation with [JupyterHub's ++ documentation](https://jupyterhub.readthedocs.io/en/stable/rbac/roles.html#defining-roles) ++ about `load_roles`. ++ ++ Note that while JupyterHub's native configuration `load_roles` accepts ++ a list of role objects, this Helm chart only accepts a dictionary where ++ each key represents the name of a role and the value is the actual ++ role object. ++ ++ ```yaml ++ hub: ++ loadRoles: ++ teacher: ++ description: Access to users' information and group membership ++ ++ # this role provides permissions to... ++ scopes: [users, groups] ++ ++ # this role will be assigned to... ++ users: [erik] ++ services: [grading-service] ++ groups: [teachers] ++ ``` ++ ++ When configuring JupyterHub roles via this Helm chart, the `name` ++ field can be omitted as it can be implied by the dictionary key. ++ shutdownOnLogout: ++ type: [boolean, "null"] ++ description: *jupyterhub-native-config-description ++ templatePaths: ++ type: array ++ description: *jupyterhub-native-config-description ++ templateVars: ++ type: object ++ additionalProperties: true ++ description: *jupyterhub-native-config-description ++ serviceAccount: &serviceAccount ++ type: object ++ required: [create] ++ additionalProperties: false ++ description: | ++ Configuration for a k8s ServiceAccount dedicated for use by the ++ specific pod which this configuration is nested under. ++ properties: ++ create: ++ type: boolean ++ description: | ++ Whether or not to create the `ServiceAccount` resource. ++ name: ++ type: ["string", "null"] ++ description: | ++ This configuration serves multiple purposes: ++ ++ - It will be the `serviceAccountName` referenced by related Pods. ++ - If `create` is set, the created ServiceAccount resource will be named like this. ++ - If [`rbac.create`](schema_rbac.create) is set, the associated (Cluster)RoleBindings will bind to this name. ++ ++ If not explicitly provided, a default name will be used. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Kubernetes annotations to apply to the k8s ServiceAccount. ++ extraPodSpec: &extraPodSpec-spec ++ type: object ++ additionalProperties: true ++ description: | ++ Arbitrary extra k8s pod specification as a YAML object. The default ++ value of this setting is an empty object, i.e. no extra configuration. ++ The value of this property is augmented to the pod specification as-is. ++ ++ This is a powerful tool for expert k8s administrators with advanced ++ configuration requirements. This setting should only be used for ++ configuration that cannot be accomplished through the other settings. ++ Misusing this setting can break your deployment and/or compromise ++ your system security. ++ ++ This is one of four related settings for inserting arbitrary pod ++ specification: ++ ++ 1. hub.extraPodSpec ++ 2. proxy.chp.extraPodSpec ++ 3. proxy.traefik.extraPodSpec ++ 4. scheduling.userScheduler.extraPodSpec ++ ++ One real-world use of these settings is to enable host networking. For ++ example, to configure host networking for the hub pod, add the ++ following to your helm configuration values: ++ ++ ```yaml ++ hub: ++ extraPodSpec: ++ hostNetwork: true ++ dnsPolicy: ClusterFirstWithHostNet ++ ``` ++ ++ Likewise, to configure host networking for the proxy pod, add the ++ following: ++ ++ ```yaml ++ proxy: ++ chp: ++ extraPodSpec: ++ hostNetwork: true ++ dnsPolicy: ClusterFirstWithHostNet ++ ``` ++ ++ N.B. Host networking has special security implications and can easily ++ break your deployment. This is an example—not an endorsement. ++ ++ See [PodSpec](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#PodSpec) ++ for the latest pod resource specification. ++ ++ proxy: ++ type: object ++ additionalProperties: false ++ properties: ++ chp: ++ type: object ++ additionalProperties: false ++ description: | ++ Configure the configurable-http-proxy (chp) pod managed by jupyterhub to route traffic ++ both to itself and to user pods. ++ properties: ++ revisionHistoryLimit: *revisionHistoryLimit ++ networkPolicy: *networkPolicy-spec ++ extraCommandLineFlags: ++ type: array ++ description: | ++ A list of strings to be added as command line options when ++ starting ++ [configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy#command-line-options) ++ that will be expanded with Helm's template function `tpl` which ++ can render Helm template logic inside curly braces (`{{ ... }}`). ++ ++ ```yaml ++ proxy: ++ chp: ++ extraCommandLineFlags: ++ - "--auto-rewrite" ++ - "--custom-header {{ .Values.myCustomStuff }}" ++ ``` ++ ++ Note that these will be appended last, and if you provide the same ++ flag twice, the last flag will be used, which mean you can ++ override the default flag values as well. ++ extraEnv: ++ type: [object, array] ++ additionalProperties: true ++ description: | ++ Extra environment variables that should be set for the chp pod. ++ ++ Environment variables are usually used here to: ++ - override HUB_SERVICE_PORT or HUB_SERVICE_HOST default values ++ - set CONFIGPROXY_SSL_KEY_PASSPHRASE for setting passphrase of SSL keys ++ ++ String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which ++ is a part of Kubernetes. ++ ++ ```yaml ++ proxy: ++ chp: ++ extraEnv: ++ # basic notation (for literal values only) ++ MY_ENV_VARS_NAME1: "my env var value 1" ++ ++ # explicit notation (the "name" field takes precedence) ++ CHP_NAMESPACE: ++ name: CHP_NAMESPACE ++ valueFrom: ++ fieldRef: ++ fieldPath: metadata.namespace ++ ++ # implicit notation (the "name" field is implied) ++ PREFIXED_CHP_NAMESPACE: ++ value: "my-prefix-$(CHP_NAMESPACE)" ++ SECRET_VALUE: ++ valueFrom: ++ secretKeyRef: ++ name: my-k8s-secret ++ key: password ++ ``` ++ ++ For more information, see the [Kubernetes EnvVar ++ specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). ++ pdb: *pdb-spec ++ nodeSelector: *nodeSelector-spec ++ tolerations: *tolerations-spec ++ containerSecurityContext: *containerSecurityContext-spec ++ image: *image-spec ++ livenessProbe: *probe-spec ++ readinessProbe: *probe-spec ++ resources: *resources-spec ++ defaultTarget: ++ type: [string, "null"] ++ description: | ++ Override the URL for the default routing target for the proxy. ++ Defaults to JupyterHub itself. ++ This will generally only have an effect while JupyterHub is not running, ++ as JupyterHub adds itself as the default target after it starts. ++ errorTarget: ++ type: [string, "null"] ++ description: | ++ Override the URL for the error target for the proxy. ++ Defaults to JupyterHub itself. ++ Useful to reduce load on the Hub ++ or produce more informative error messages than the Hub's default, ++ e.g. in highly customized deployments such as BinderHub. ++ See Configurable HTTP Proxy for details on implementing an error target. ++ extraPodSpec: *extraPodSpec-spec ++ secretToken: ++ type: [string, "null"] ++ description: | ++ ```{note} ++ As of version 1.0.0 this will automatically be generated and there is ++ no need to set it manually. ++ ++ If you wish to reset a generated key, you can use `kubectl edit` on ++ the k8s Secret typically named `hub` and remove the ++ `hub.config.ConfigurableHTTPProxy.auth_token` entry in the k8s Secret, ++ then perform a new `helm upgrade`. ++ ``` ++ ++ A 32-byte cryptographically secure randomly generated string used to ++ secure communications between the hub pod and the proxy pod running a ++ [configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy) ++ instance. ++ ++ ```sh ++ # to generate a value, run ++ openssl rand -hex 32 ++ ``` ++ ++ Changing this value will cause the proxy and hub pods to restart. It is good security ++ practice to rotate these values over time. If this secret leaks, *immediately* change ++ it to something else, or user data can be compromised. ++ service: ++ type: object ++ additionalProperties: false ++ description: | ++ Configuration of the k8s Service `proxy-public` which either will ++ point to the `autohttps` pod running Traefik for TLS termination, or ++ the `proxy` pod running ConfigurableHTTPProxy. Incoming traffic from ++ users on the internet should always go through this k8s Service. ++ ++ When this service targets the `autohttps` pod which then routes to the ++ `proxy` pod, a k8s Service named `proxy-http` will be added targeting ++ the `proxy` pod and only accepting HTTP traffic on port 80. ++ properties: ++ type: ++ enum: [ClusterIP, NodePort, LoadBalancer, ExternalName] ++ description: | ++ Default `LoadBalancer`. ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types) ++ to learn more about service types. ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra labels to add to the proxy service. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) ++ to learn more about labels. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Annotations to apply to the service that is exposing the proxy. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) ++ for more details about annotations. ++ nodePorts: ++ type: object ++ additionalProperties: false ++ description: | ++ Object to set NodePorts to expose the service on for http and https. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport) ++ for more details about NodePorts. ++ properties: ++ http: ++ type: [integer, "null"] ++ description: | ++ The HTTP port the proxy-public service should be exposed on. ++ https: ++ type: [integer, "null"] ++ description: | ++ The HTTPS port the proxy-public service should be exposed on. ++ disableHttpPort: ++ type: boolean ++ description: | ++ Default `false`. ++ ++ If `true`, port 80 for incoming HTTP traffic will no longer be exposed. This should not be used with `proxy.https.type=letsencrypt` or `proxy.https.enabled=false` as it would remove the only exposed port. ++ extraPorts: ++ type: array ++ description: | ++ Extra ports the k8s Service should accept incoming traffic on, ++ which will be redirected to either the `autohttps` pod (treafik) ++ or the `proxy` pod (chp). ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#serviceport-v1-core) ++ for the structure of the items in this list. ++ loadBalancerIP: ++ type: [string, "null"] ++ description: | ++ The public IP address the proxy-public Kubernetes service should ++ be exposed on. This entry will end up at the configurable proxy ++ server that JupyterHub manages, which will direct traffic to user ++ pods at the `/user` path and the hub pod at the `/hub` path. ++ ++ Set this if you want to use a fixed external IP address instead of ++ a dynamically acquired one. This is relevant if you have a domain ++ name that you want to point to a specific IP and want to ensure it ++ doesn't change. ++ loadBalancerSourceRanges: ++ type: array ++ description: | ++ A list of IP CIDR ranges that are allowed to access the load balancer service. ++ Defaults to allowing everyone to access it. ++ https: ++ type: object ++ additionalProperties: false ++ description: | ++ Object for customizing the settings for HTTPS used by the JupyterHub's proxy. ++ For more information on configuring HTTPS for your JupyterHub, see the [HTTPS section in our security guide](https) ++ properties: ++ enabled: ++ type: [boolean, "null"] ++ description: | ++ Indicator to set whether HTTPS should be enabled or not on the proxy. Defaults to `true` if the https object is provided. ++ type: ++ enum: [null, "", letsencrypt, manual, offload, secret] ++ description: | ++ The type of HTTPS encryption that is used. ++ Decides on which ports and network policies are used for communication via HTTPS. Setting this to `secret` sets the type to manual HTTPS with a secret that has to be provided in the `https.secret` object. ++ Defaults to `letsencrypt`. ++ letsencrypt: ++ type: object ++ additionalProperties: false ++ properties: ++ contactEmail: ++ type: [string, "null"] ++ description: | ++ The contact email to be used for automatically provisioned HTTPS certificates by Let's Encrypt. For more information see [Set up automatic HTTPS](setup-automatic-https). ++ Required for automatic HTTPS. ++ acmeServer: ++ type: [string, "null"] ++ description: | ++ Let's Encrypt is one of various ACME servers that can provide ++ a certificate, and by default their production server is used. ++ ++ Let's Encrypt staging: https://acme-staging-v02.api.letsencrypt.org/directory ++ Let's Encrypt production: acmeServer: https://acme-v02.api.letsencrypt.org/directory ++ manual: ++ type: object ++ additionalProperties: false ++ description: | ++ Object for providing own certificates for manual HTTPS configuration. To be provided when setting `https.type` to `manual`. ++ See [Set up manual HTTPS](setup-manual-https) ++ properties: ++ key: ++ type: [string, "null"] ++ description: | ++ The RSA private key to be used for HTTPS. ++ To be provided in the form of ++ ++ ``` ++ key: | ++ -----BEGIN RSA PRIVATE KEY----- ++ ... ++ -----END RSA PRIVATE KEY----- ++ ``` ++ cert: ++ type: [string, "null"] ++ description: | ++ The certificate to be used for HTTPS. ++ To be provided in the form of ++ ++ ``` ++ cert: | ++ -----BEGIN CERTIFICATE----- ++ ... ++ -----END CERTIFICATE----- ++ ``` ++ secret: ++ type: object ++ additionalProperties: false ++ description: | ++ Secret to be provided when setting `https.type` to `secret`. ++ properties: ++ name: ++ type: [string, "null"] ++ description: | ++ Name of the secret ++ key: ++ type: [string, "null"] ++ description: | ++ Path to the private key to be used for HTTPS. ++ Example: `'tls.key'` ++ crt: ++ type: [string, "null"] ++ description: | ++ Path to the certificate to be used for HTTPS. ++ Example: `'tls.crt'` ++ hosts: ++ type: array ++ description: | ++ You domain in list form. ++ Required for automatic HTTPS. See [Set up automatic HTTPS](setup-automatic-https). ++ To be provided like: ++ ``` ++ hosts: ++ - ++ ``` ++ traefik: ++ type: object ++ additionalProperties: false ++ description: | ++ Configure the traefik proxy used to terminate TLS when 'autohttps' is enabled ++ properties: ++ revisionHistoryLimit: *revisionHistoryLimit ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra labels to add to the traefik pod. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) ++ to learn more about labels. ++ networkPolicy: *networkPolicy-spec ++ extraInitContainers: ++ type: array ++ description: | ++ list of extraInitContainers to be run with traefik pod, after the containers set in the chart. See [Kubernetes Docs](https://kubernetes.io/docs/concepts/workloads/pods/init-containers/) ++ ++ ```yaml ++ proxy: ++ traefik: ++ extraInitContainers: ++ - name: init-myservice ++ image: busybox:1.28 ++ command: ['sh', '-c', 'command1'] ++ - name: init-mydb ++ image: busybox:1.28 ++ command: ['sh', '-c', 'command2'] ++ ``` ++ extraEnv: ++ type: [object, array] ++ additionalProperties: true ++ description: | ++ Extra environment variables that should be set for the traefik pod. ++ ++ Environment Variables here may be used to configure traefik. ++ ++ String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which ++ is a part of Kubernetes. ++ ++ ```yaml ++ proxy: ++ traefik: ++ extraEnv: ++ # basic notation (for literal values only) ++ MY_ENV_VARS_NAME1: "my env var value 1" ++ ++ # explicit notation (the "name" field takes precedence) ++ TRAEFIK_NAMESPACE: ++ name: TRAEFIK_NAMESPACE ++ valueFrom: ++ fieldRef: ++ fieldPath: metadata.namespace ++ ++ # implicit notation (the "name" field is implied) ++ PREFIXED_TRAEFIK_NAMESPACE: ++ value: "my-prefix-$(TRAEFIK_NAMESPACE)" ++ SECRET_VALUE: ++ valueFrom: ++ secretKeyRef: ++ name: my-k8s-secret ++ key: password ++ ``` ++ ++ For more information, see the [Kubernetes EnvVar ++ specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). ++ pdb: *pdb-spec ++ nodeSelector: *nodeSelector-spec ++ tolerations: *tolerations-spec ++ containerSecurityContext: *containerSecurityContext-spec ++ extraDynamicConfig: ++ type: object ++ additionalProperties: true ++ description: | ++ This refers to traefik's post-startup configuration. ++ ++ This Helm chart already provide such configuration, so this is a ++ place where you can merge in additional configuration. If you are ++ about to use this configuration, you may want to inspect the ++ default configuration declared ++ [here](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/blob/HEAD/jupyterhub/templates/proxy/autohttps/_configmap-dynamic.yaml). ++ extraPorts: ++ type: array ++ description: | ++ Extra ports for the traefik container within the autohttps pod ++ that you would like to expose, formatted in a k8s native way. ++ extraStaticConfig: ++ type: object ++ additionalProperties: true ++ description: | ++ This refers to traefik's startup configuration. ++ ++ This Helm chart already provide such configuration, so this is a ++ place where you can merge in additional configuration. If you are ++ about to use this configuration, you may want to inspect the ++ default configuration declared ++ [here](https://github.com/jupyterhub/zero-to-jupyterhub-k8s/blob/HEAD/jupyterhub/templates/proxy/autohttps/_configmap-traefik.yaml). ++ extraVolumes: *extraVolumes-spec ++ extraVolumeMounts: *extraVolumeMounts-spec ++ hsts: ++ type: object ++ additionalProperties: false ++ required: [includeSubdomains, maxAge, preload] ++ description: | ++ This section regards a HTTP Strict-Transport-Security (HSTS) ++ response header. It can act as a request for a visiting web ++ browsers to enforce HTTPS on their end in for a given time into ++ the future, and optionally also for future requests to subdomains. ++ ++ These settings relate to traefik configuration which we use as a ++ TLS termination proxy. ++ ++ See [Mozilla's ++ documentation](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Strict-Transport-Security) ++ for more information. ++ properties: ++ includeSubdomains: ++ type: boolean ++ maxAge: ++ type: integer ++ preload: ++ type: boolean ++ image: *image-spec ++ resources: *resources-spec ++ serviceAccount: *serviceAccount ++ extraPodSpec: *extraPodSpec-spec ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ K8s labels for the proxy pod. ++ ++ ```{note} ++ For consistency, this should really be located under ++ proxy.chp.labels but isn't for historical reasons. ++ ``` ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ K8s annotations for the proxy pod. ++ ++ ```{note} ++ For consistency, this should really be located under ++ proxy.chp.annotations but isn't for historical reasons. ++ ``` ++ deploymentStrategy: ++ type: object ++ additionalProperties: false ++ properties: ++ rollingUpdate: ++ type: [string, "null"] ++ type: ++ type: [string, "null"] ++ description: | ++ While the proxy pod running ++ [configurable-http-proxy](https://github.com/jupyterhub/configurable-http-proxy) ++ could run in parallel, two instances running in parallel wouldn't ++ both receive updates from JupyterHub regarding how it should route ++ traffic. Due to this we default to using a deployment strategy of ++ Recreate instead of RollingUpdate. ++ secretSync: ++ type: object ++ additionalProperties: false ++ description: | ++ This configuration section refers to configuration of the sidecar ++ container in the autohttps pod running next to its traefik container ++ responsible for TLS termination. ++ ++ The purpose of this container is to store away and load TLS ++ certificates from a k8s Secret. The TLS certificates are acquired by ++ the ACME client (LEGO) that is running within the traefik container, ++ where traefik is using them for TLS termination. ++ properties: ++ containerSecurityContext: *containerSecurityContext-spec ++ image: *image-spec ++ resources: *resources-spec ++ ++ singleuser: ++ type: object ++ additionalProperties: false ++ description: | ++ Options for customizing the environment that is provided to the users after they log in. ++ properties: ++ networkPolicy: *networkPolicy-spec ++ podNameTemplate: ++ type: [string, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.pod_name_template](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.pod_name_template). ++ cpu: ++ type: object ++ additionalProperties: false ++ description: | ++ Set CPU limits & guarantees that are enforced for each user. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/) ++ for more info. ++ properties: ++ limit: ++ type: [number, "null"] ++ guarantee: ++ type: [number, "null"] ++ memory: ++ type: object ++ additionalProperties: false ++ description: | ++ Set Memory limits & guarantees that are enforced for each user. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/) ++ for more info. ++ properties: ++ limit: ++ type: [number, string, "null"] ++ guarantee: ++ type: [number, string, "null"] ++ description: | ++ Note that this field is referred to as *requests* by the Kubernetes API. ++ image: *image-spec ++ initContainers: ++ type: array ++ description: | ++ list of initContainers to be run every singleuser pod. See [Kubernetes Docs](https://kubernetes.io/docs/concepts/workloads/pods/init-containers/) ++ ++ ```yaml ++ singleuser: ++ initContainers: ++ - name: init-myservice ++ image: busybox:1.28 ++ command: ['sh', '-c', 'command1'] ++ - name: init-mydb ++ image: busybox:1.28 ++ command: ['sh', '-c', 'command2'] ++ ``` ++ profileList: ++ type: array ++ description: | ++ For more information about the profile list, see [KubeSpawner's ++ documentation](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner) ++ as this is simply a passthrough to that configuration. ++ ++ ```{note} ++ The image-pullers are aware of the overrides of images in ++ `singleuser.profileList` but they won't be if you configure it in ++ JupyterHub's configuration of '`c.KubeSpawner.profile_list`. ++ ``` ++ ++ ```yaml ++ singleuser: ++ profileList: ++ - display_name: "Default: Shared, 8 CPU cores" ++ description: "Your code will run on a shared machine with CPU only." ++ default: True ++ - display_name: "Personal, 4 CPU cores & 26GB RAM, 1 NVIDIA Tesla K80 GPU" ++ description: "Your code will run a personal machine with a GPU." ++ kubespawner_override: ++ extra_resource_limits: ++ nvidia.com/gpu: "1" ++ ``` ++ extraFiles: *extraFiles ++ extraEnv: ++ type: [object, array] ++ additionalProperties: true ++ description: | ++ Extra environment variables that should be set for the user pods. ++ ++ String literals with `$(ENV_VAR_NAME)` will be expanded by Kubelet which ++ is a part of Kubernetes. Note that the user pods will already have ++ access to a set of environment variables that you can use, like ++ `JUPYTERHUB_USER` and `JUPYTERHUB_HOST`. For more information about these ++ inspect [this source ++ code](https://github.com/jupyterhub/jupyterhub/blob/cc8e7806530466dce8968567d1bbd2b39a7afa26/jupyterhub/spawner.py#L763). ++ ++ ```yaml ++ singleuser: ++ extraEnv: ++ # basic notation (for literal values only) ++ MY_ENV_VARS_NAME1: "my env var value 1" ++ ++ # explicit notation (the "name" field takes precedence) ++ USER_NAMESPACE: ++ name: USER_NAMESPACE ++ valueFrom: ++ fieldRef: ++ fieldPath: metadata.namespace ++ ++ # implicit notation (the "name" field is implied) ++ PREFIXED_USER_NAMESPACE: ++ value: "my-prefix-$(USER_NAMESPACE)" ++ SECRET_VALUE: ++ valueFrom: ++ secretKeyRef: ++ name: my-k8s-secret ++ key: password ++ ``` ++ ++ For more information, see the [Kubernetes EnvVar ++ specification](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#envvar-v1-core). ++ nodeSelector: *nodeSelector-spec ++ extraTolerations: *tolerations-spec ++ extraNodeAffinity: ++ type: object ++ additionalProperties: false ++ description: | ++ Affinities describe where pods prefer or require to be scheduled, they ++ may prefer or require a node where they are to be scheduled to have a ++ certain label (node affinity). They may also require to be scheduled ++ in proximity or with a lack of proximity to another pod (pod affinity ++ and anti pod affinity). ++ ++ See the [Kubernetes ++ docs](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/) ++ for more info. ++ properties: ++ required: ++ type: array ++ description: | ++ Pass this field an array of ++ [`NodeSelectorTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#nodeselectorterm-v1-core) ++ objects. ++ preferred: ++ type: array ++ description: | ++ Pass this field an array of ++ [`PreferredSchedulingTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#preferredschedulingterm-v1-core) ++ objects. ++ extraPodAffinity: ++ type: object ++ additionalProperties: false ++ description: | ++ See the description of `singleuser.extraNodeAffinity`. ++ properties: ++ required: ++ type: array ++ description: | ++ Pass this field an array of ++ [`PodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#podaffinityterm-v1-core) ++ objects. ++ preferred: ++ type: array ++ description: | ++ Pass this field an array of ++ [`WeightedPodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#weightedpodaffinityterm-v1-core) ++ objects. ++ extraPodAntiAffinity: ++ type: object ++ additionalProperties: false ++ description: | ++ See the description of `singleuser.extraNodeAffinity`. ++ properties: ++ required: ++ type: array ++ description: | ++ Pass this field an array of ++ [`PodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#podaffinityterm-v1-core) ++ objects. ++ preferred: ++ type: array ++ description: | ++ Pass this field an array of ++ [`WeightedPodAffinityTerm`](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.23/#weightedpodaffinityterm-v1-core) ++ objects. ++ cloudMetadata: ++ type: object ++ additionalProperties: false ++ required: [blockWithIptables, ip] ++ description: | ++ Please refer to dedicated section in [the Helm chart ++ documentation](block-metadata-iptables) for more information about ++ this. ++ properties: ++ blockWithIptables: ++ type: boolean ++ ip: ++ type: string ++ ++ cmd: ++ type: [array, string, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.cmd](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.cmd). ++ The default is "jupyterhub-singleuser". ++ Use `cmd: null` to launch a custom CMD from the image, ++ which must launch jupyterhub-singleuser or an equivalent process eventually. ++ For example: Jupyter's docker-stacks images. ++ defaultUrl: ++ type: [string, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.default_url](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.default_url). ++ # FIXME: name mismatch, named events_enabled in kubespawner ++ events: ++ type: [boolean, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.events_enabled](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.events_enabled). ++ extraAnnotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.extra_annotations](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_annotations). ++ extraContainers: ++ type: array ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.extra_containers](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_containers). ++ extraLabels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.extra_labels](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_labels). ++ extraPodConfig: ++ type: object ++ additionalProperties: true ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.extra_pod_config](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_pod_config). ++ extraResource: ++ type: object ++ additionalProperties: false ++ properties: ++ # FIXME: name mismatch, named extra_resource_guarantees in kubespawner ++ guarantees: ++ type: object ++ additionalProperties: true ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.extra_resource_guarantees](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_resource_guarantees). ++ # FIXME: name mismatch, named extra_resource_limits in kubespawner ++ limits: ++ type: object ++ additionalProperties: true ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.extra_resource_limits](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_resource_limits). ++ fsGid: ++ type: [integer, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.fs_gid](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.fs_gid). ++ lifecycleHooks: ++ type: object ++ additionalProperties: false ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.lifecycle_hooks](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.lifecycle_hooks). ++ properties: ++ postStart: ++ type: object ++ additionalProperties: true ++ preStop: ++ type: object ++ additionalProperties: true ++ networkTools: ++ type: object ++ additionalProperties: false ++ description: | ++ This configuration section refers to configuration of a conditionally ++ created initContainer for the user pods with a purpose to block a ++ specific IP address. ++ ++ This initContainer will be created if ++ [`singleuser.cloudMetadata.blockWithIptables`](schema_singleuser.cloudMetadata.blockWithIptables) ++ is set to true. ++ properties: ++ image: *image-spec ++ resources: *resources-spec ++ # FIXME: name mismatch, named service_account in kubespawner ++ serviceAccountName: ++ type: [string, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.service_account](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.service_account). ++ startTimeout: ++ type: [integer, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.start_timeout](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.start_timeout). ++ storage: ++ type: object ++ additionalProperties: false ++ required: [type, homeMountPath] ++ description: | ++ This section configures KubeSpawner directly to some extent but also ++ indirectly through Helm chart specific configuration options such as ++ [`singleuser.storage.type`](schema_singleuser.storage.type). ++ properties: ++ capacity: ++ type: [string, "null"] ++ description: | ++ Configures `KubeSpawner.storage_capacity`. ++ ++ See the [KubeSpawner ++ documentation](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html) ++ for more information. ++ dynamic: ++ type: object ++ additionalProperties: false ++ properties: ++ pvcNameTemplate: ++ type: [string, "null"] ++ description: | ++ Configures `KubeSpawner.pvc_name_template` which will be the ++ resource name of the PVC created by KubeSpawner for each user ++ if needed. ++ storageAccessModes: ++ type: array ++ items: ++ type: [string, "null"] ++ description: | ++ Configures `KubeSpawner.storage_access_modes`. ++ ++ See KubeSpawners documentation and [the k8s ++ documentation](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes) ++ for more information. ++ storageClass: ++ type: [string, "null"] ++ description: | ++ Configures `KubeSpawner.storage_class`, which can be an ++ explicit StorageClass to dynamically provision storage for the ++ PVC that KubeSpawner will create. ++ ++ There is of a default StorageClass available in k8s clusters ++ for use if this is unspecified. ++ volumeNameTemplate: ++ type: [string, "null"] ++ description: | ++ Configures `KubeSpawner.volume_name_template`, which is the ++ name to reference from the containers volumeMounts section. ++ extraLabels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Configures `KubeSpawner.storage_extra_labels`. Note that these ++ labels are set on the PVC during creation only and won't be ++ updated after creation. ++ extraVolumeMounts: *extraVolumeMounts-spec ++ extraVolumes: *extraVolumes-spec ++ homeMountPath: ++ type: string ++ description: | ++ The location within the container where the home folder storage ++ should be mounted. ++ static: ++ type: object ++ additionalProperties: false ++ properties: ++ pvcName: ++ type: [string, "null"] ++ description: | ++ Configures `KubeSpawner.pvc_claim_name` to reference ++ pre-existing storage. ++ subPath: ++ type: [string, "null"] ++ description: | ++ Configures the `subPath` field of a ++ `KubeSpawner.volume_mounts` entry added by the Helm chart. ++ ++ Path within the volume from which the container's volume ++ should be mounted. ++ type: ++ enum: [dynamic, static, none] ++ description: | ++ Decide if you want storage to be provisioned dynamically ++ (dynamic), or if you want to attach existing storage (static), or ++ don't want any storage to be attached (none). ++ allowPrivilegeEscalation: ++ type: [boolean, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.allow_privilege_escalation](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.allow_privilege_escalation). ++ uid: ++ type: [integer, "null"] ++ description: | ++ Passthrough configuration for ++ [KubeSpawner.uid](https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.uid). ++ ++ This dictates as what user the main container will start up as. ++ ++ As an example of when this is needed, consider if you want to enable ++ sudo rights for some of your users. This can be done by starting up as ++ root, enabling it from the container in a startup script, and then ++ transitioning to the normal user. ++ ++ Default is 1000, set to null to use the container's default. ++ ++ scheduling: ++ type: object ++ additionalProperties: false ++ description: | ++ Objects for customizing the scheduling of various pods on the nodes and ++ related labels. ++ properties: ++ userScheduler: ++ type: object ++ additionalProperties: false ++ required: [enabled, plugins, pluginConfig, logLevel] ++ description: | ++ The user scheduler is making sure that user pods are scheduled ++ tight on nodes, this is useful for autoscaling of user node pools. ++ properties: ++ enabled: ++ type: boolean ++ description: | ++ Enables the user scheduler. ++ revisionHistoryLimit: *revisionHistoryLimit ++ replicas: ++ type: integer ++ description: | ++ You can have multiple schedulers to share the workload or improve ++ availability on node failure. ++ image: *image-spec ++ pdb: *pdb-spec ++ nodeSelector: *nodeSelector-spec ++ tolerations: *tolerations-spec ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra labels to add to the userScheduler pods. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) ++ to learn more about labels. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra annotations to add to the user-scheduler pods. ++ containerSecurityContext: *containerSecurityContext-spec ++ logLevel: ++ type: integer ++ description: | ++ Corresponds to the verbosity level of logging made by the ++ kube-scheduler binary running within the user-scheduler pod. ++ plugins: ++ type: object ++ additionalProperties: true ++ description: | ++ These plugins refers to kube-scheduler plugins as documented ++ [here](https://kubernetes.io/docs/reference/scheduling/config/). ++ ++ The user-scheduler is really just a kube-scheduler configured in a ++ way to pack users tight on nodes using these plugins. See ++ values.yaml for information about the default plugins. ++ pluginConfig: ++ type: array ++ description: | ++ Individually activated plugins can be configured further. ++ resources: *resources-spec ++ serviceAccount: *serviceAccount ++ extraPodSpec: *extraPodSpec-spec ++ podPriority: ++ type: object ++ additionalProperties: false ++ description: | ++ Pod Priority is used to allow real users evict user placeholder pods ++ that in turn by entering a Pending state can trigger a scale up by a ++ cluster autoscaler. ++ ++ Having this option enabled only make sense if the following conditions ++ are met: ++ ++ 1. A cluster autoscaler is installed. ++ 2. user-placeholer pods are configured to have a priority equal or ++ higher than the cluster autoscaler's "priority cutoff" so that the ++ cluster autoscaler scales up a node in advance for a pending user ++ placeholder pod. ++ 3. Normal user pods have a higher priority than the user-placeholder ++ pods. ++ 4. Image puller pods have a priority between normal user pods and ++ user-placeholder pods. ++ ++ Note that if the default priority cutoff if not configured on cluster ++ autoscaler, it will currently default to 0, and that in the future ++ this is meant to be lowered. If your cloud provider is installing the ++ cluster autoscaler for you, they may also configure this specifically. ++ ++ Recommended settings for a cluster autoscaler... ++ ++ ... with a priority cutoff of -10 (GKE): ++ ++ ```yaml ++ podPriority: ++ enabled: true ++ globalDefault: false ++ defaultPriority: 0 ++ imagePullerPriority: -5 ++ userPlaceholderPriority: -10 ++ ``` ++ ++ ... with a priority cutoff of 0: ++ ++ ```yaml ++ podPriority: ++ enabled: true ++ globalDefault: true ++ defaultPriority: 10 ++ imagePullerPriority: 5 ++ userPlaceholderPriority: 0 ++ ``` ++ properties: ++ enabled: ++ type: boolean ++ globalDefault: ++ type: boolean ++ description: | ++ Warning! This will influence all pods in the cluster. ++ ++ The priority a pod usually get is 0. But this can be overridden ++ with a PriorityClass resource if it is declared to be the global ++ default. This configuration option allows for the creation of such ++ global default. ++ defaultPriority: ++ type: integer ++ description: | ++ The actual value for the default pod priority. ++ imagePullerPriority: ++ type: integer ++ description: | ++ The actual value for the [hook|continuous]-image-puller pods' priority. ++ userPlaceholderPriority: ++ type: integer ++ description: | ++ The actual value for the user-placeholder pods' priority. ++ userPlaceholder: ++ type: object ++ additionalProperties: false ++ description: | ++ User placeholders simulate users but will thanks to PodPriority be ++ evicted by the cluster autoscaler if a real user shows up. In this way ++ placeholders allow you to create a headroom for the real users and ++ reduce the risk of a user having to wait for a node to be added. Be ++ sure to use the the continuous image puller as well along with ++ placeholders, so the images are also available when real users arrive. ++ ++ To test your setup efficiently, you can adjust the amount of user ++ placeholders with the following command: ++ ```sh ++ # Configure to have 3 user placeholders ++ kubectl scale sts/user-placeholder --replicas=3 ++ ``` ++ properties: ++ enabled: ++ type: boolean ++ image: *image-spec ++ revisionHistoryLimit: *revisionHistoryLimit ++ replicas: ++ type: integer ++ description: | ++ How many placeholder pods would you like to have? ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra labels to add to the userPlaceholder pods. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) ++ to learn more about labels. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra annotations to add to the placeholder pods. ++ resources: ++ type: object ++ additionalProperties: true ++ description: | ++ Unless specified here, the placeholder pods will request the same ++ resources specified for the real singleuser pods. ++ containerSecurityContext: *containerSecurityContext-spec ++ corePods: ++ type: object ++ additionalProperties: false ++ description: | ++ These settings influence the core pods like the hub, proxy and ++ user-scheduler pods. ++ These settings influence all pods considered core pods, namely: ++ ++ - hub ++ - proxy ++ - autohttps ++ - hook-image-awaiter ++ - user-scheduler ++ ++ By defaults, the tolerations are: ++ ++ - hub.jupyter.org/dedicated=core:NoSchedule ++ - hub.jupyter.org_dedicated=core:NoSchedule ++ ++ Note that tolerations set here are combined with the respective ++ components dedicated tolerations, and that `_` is available in case ++ `/` isn't allowed in the clouds tolerations. ++ properties: ++ tolerations: *tolerations-spec ++ nodeAffinity: ++ type: object ++ additionalProperties: false ++ description: | ++ Where should pods be scheduled? Perhaps on nodes with a certain ++ label is preferred or even required? ++ properties: ++ matchNodePurpose: ++ enum: [ignore, prefer, require] ++ description: | ++ Decide if core pods *ignore*, *prefer* or *require* to ++ schedule on nodes with this label: ++ ``` ++ hub.jupyter.org/node-purpose=core ++ ``` ++ userPods: ++ type: object ++ additionalProperties: false ++ description: | ++ These settings influence all pods considered user pods, namely: ++ ++ - user-placeholder ++ - hook-image-puller ++ - continuous-image-puller ++ - jupyter- ++ ++ By defaults, the tolerations are: ++ ++ - hub.jupyter.org/dedicated=core:NoSchedule ++ - hub.jupyter.org_dedicated=core:NoSchedule ++ ++ Note that tolerations set here are combined with the respective ++ components dedicated tolerations, and that `_` is available in case ++ `/` isn't allowed in the clouds tolerations. ++ properties: ++ tolerations: *tolerations-spec ++ nodeAffinity: ++ type: object ++ additionalProperties: false ++ description: | ++ Where should pods be scheduled? Perhaps on nodes with a certain ++ label is preferred or even required? ++ properties: ++ matchNodePurpose: ++ enum: [ignore, prefer, require] ++ description: | ++ Decide if user pods *ignore*, *prefer* or *require* to ++ schedule on nodes with this label: ++ ``` ++ hub.jupyter.org/node-purpose=user ++ ``` ++ ++ ingress: ++ type: object ++ additionalProperties: false ++ required: [enabled] ++ properties: ++ enabled: ++ type: boolean ++ description: | ++ Enable the creation of a Kubernetes Ingress to proxy-public service. ++ ++ See [Advanced Topics — Zero to JupyterHub with Kubernetes ++ 0.7.0 documentation](ingress) ++ for more details. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Annotations to apply to the Ingress resource. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations/) ++ for more details about annotations. ++ ingressClassName: ++ type: [string, "null"] ++ description: | ++ Maps directly to the Ingress resource's `spec.ingressClassName``. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/services-networking/ingress/#ingress-class) ++ for more details. ++ hosts: ++ type: array ++ description: | ++ List of hosts to route requests to the proxy. ++ pathSuffix: ++ type: [string, "null"] ++ description: | ++ Suffix added to Ingress's routing path pattern. ++ ++ Specify `*` if your ingress matches path by glob pattern. ++ pathType: ++ enum: [Prefix, Exact, ImplementationSpecific] ++ description: | ++ The path type to use. The default value is 'Prefix'. ++ ++ See [the Kubernetes documentation](https://kubernetes.io/docs/concepts/services-networking/ingress/#path-types) ++ for more details about path types. ++ tls: ++ type: array ++ description: | ++ TLS configurations for Ingress. ++ ++ See [the Kubernetes ++ documentation](https://kubernetes.io/docs/concepts/services-networking/ingress/#tls) ++ for more details about annotations. ++ ++ prePuller: ++ type: object ++ additionalProperties: false ++ required: [hook, continuous] ++ properties: ++ revisionHistoryLimit: *revisionHistoryLimit ++ labels: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Extra labels to add to the pre puller job pods. ++ ++ See the [Kubernetes docs](https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/) ++ to learn more about labels. ++ annotations: ++ type: object ++ additionalProperties: false ++ patternProperties: *labels-and-annotations-patternProperties ++ description: | ++ Annotations to apply to the hook and continous image puller pods. One example use case is to ++ disable istio sidecars which could interfere with the image pulling. ++ resources: ++ type: object ++ additionalProperties: true ++ description: | ++ These are standard Kubernetes resources with requests and limits for ++ cpu and memory. They will be used on the containers in the pods ++ pulling images. These should be set extremely low as the containers ++ shut down directly or is a pause container that just idles. ++ ++ They were made configurable as usage of ResourceQuota may require ++ containers in the namespace to have explicit resources set. ++ extraTolerations: *tolerations-spec ++ hook: ++ type: object ++ additionalProperties: false ++ required: [enabled] ++ description: | ++ See the [*optimization ++ section*](pulling-images-before-users-arrive) ++ for more details. ++ properties: ++ enabled: ++ type: boolean ++ pullOnlyOnChanges: ++ type: boolean ++ description: | ++ Pull only if changes have been made to the images to pull, or more ++ accurately if the hook-image-puller daemonset has changed in any ++ way. ++ podSchedulingWaitDuration: ++ description: | ++ The `hook-image-awaiter` has a criteria to await all the ++ `hook-image-puller` DaemonSet's pods to both schedule and finish ++ their image pulling. This flag can be used to relax this criteria ++ to instead only await the pods that _has already scheduled_ to ++ finish image pulling after a certain duration. ++ ++ The value of this is that sometimes the newly created ++ `hook-image-puller` pods cannot be scheduled because nodes are ++ full, and then it probably won't make sense to block a `helm ++ upgrade`. ++ ++ An infinite duration to wait for pods to schedule can be ++ represented by `-1`. This was the default behavior of version ++ 0.9.0 and earlier. ++ type: integer ++ nodeSelector: *nodeSelector-spec ++ tolerations: *tolerations-spec ++ containerSecurityContext: *containerSecurityContext-spec ++ image: *image-spec ++ resources: *resources-spec ++ serviceAccount: *serviceAccount ++ continuous: ++ type: object ++ additionalProperties: false ++ required: [enabled] ++ description: | ++ See the [*optimization ++ section*](pulling-images-before-users-arrive) ++ for more details. ++ ++ ```{note} ++ If used with a Cluster Autoscaler (an autoscaling node pool), also add ++ user-placeholders and enable pod priority. ++ ``` ++ properties: ++ enabled: ++ type: boolean ++ pullProfileListImages: ++ type: boolean ++ description: | ++ The singleuser.profileList configuration can provide a selection of ++ images. This option determines if all images identified there should ++ be pulled, both by the hook and continuous pullers. ++ ++ Images are looked for under `kubespawner_override`, and also ++ `profile_options.choices.kubespawner_override` since version 3.2.0. ++ ++ The reason to disable this, is that if you have for example 10 images ++ which start pulling in order from 1 to 10, a user that arrives and ++ wants to start a pod with image number 10 will need to wait for all ++ images to be pulled, and then it may be preferable to just let the ++ user arriving wait for a single image to be pulled on arrival. ++ extraImages: ++ type: object ++ additionalProperties: false ++ description: | ++ See the [*optimization section*](images-that-will-be-pulled) for more ++ details. ++ ++ ```yaml ++ prePuller: ++ extraImages: ++ my-extra-image-i-want-pulled: ++ name: jupyter/all-spark-notebook ++ tag: 2343e33dec46 ++ ``` ++ patternProperties: ++ ".*": ++ type: object ++ additionalProperties: false ++ required: [name, tag] ++ properties: ++ name: ++ type: string ++ tag: ++ type: string ++ containerSecurityContext: *containerSecurityContext-spec ++ pause: ++ type: object ++ additionalProperties: false ++ description: | ++ The image-puller pods rely on initContainer to pull all images, and ++ their actual container when they are done is just running a `pause` ++ container. These are settings for that pause container. ++ properties: ++ containerSecurityContext: *containerSecurityContext-spec ++ image: *image-spec ++ ++ custom: ++ type: object ++ additionalProperties: true ++ description: | ++ Additional values to pass to the Hub. ++ JupyterHub will not itself look at these, ++ but you can read values in your own custom config via `hub.extraConfig`. ++ For example: ++ ++ ```yaml ++ custom: ++ myHost: "https://example.horse" ++ hub: ++ extraConfig: ++ myConfig.py: | ++ c.MyAuthenticator.host = get_config("custom.myHost") ++ ``` ++ ++ cull: ++ type: object ++ additionalProperties: false ++ required: [enabled] ++ description: | ++ The ++ [jupyterhub-idle-culler](https://github.com/jupyterhub/jupyterhub-idle-culler) ++ can run as a JupyterHub managed service to _cull_ running servers. ++ properties: ++ enabled: ++ type: boolean ++ description: | ++ Enable/disable use of jupyter-idle-culler. ++ users: ++ type: [boolean, "null"] ++ description: See the `--cull-users` flag. ++ adminUsers: ++ type: [boolean, "null"] ++ description: See the `--cull-admin-users` flag. ++ removeNamedServers: ++ type: [boolean, "null"] ++ description: See the `--remove-named-servers` flag. ++ timeout: ++ type: [integer, "null"] ++ description: See the `--timeout` flag. ++ every: ++ type: [integer, "null"] ++ description: See the `--cull-every` flag. ++ concurrency: ++ type: [integer, "null"] ++ description: See the `--concurrency` flag. ++ maxAge: ++ type: [integer, "null"] ++ description: See the `--max-age` flag. ++ ++ debug: ++ type: object ++ additionalProperties: false ++ required: [enabled] ++ properties: ++ enabled: ++ type: boolean ++ description: | ++ Increases the loglevel throughout the resources in the Helm chart. ++ ++ rbac: ++ type: object ++ additionalProperties: false ++ required: [create] ++ properties: ++ enabled: ++ type: boolean ++ # This schema entry is needed to help us print a more helpful error ++ # message in NOTES.txt if hub.fsGid is set. ++ # ++ description: | ++ ````{note} ++ Removed in version 2.0.0. If you have been using `rbac.enable=false` ++ (strongly discouraged), then the equivalent configuration would be: ++ ++ ```yaml ++ rbac: ++ create: false ++ hub: ++ serviceAccount: ++ create: false ++ proxy: ++ traefik: ++ serviceAccount: ++ create: false ++ scheduling: ++ userScheduler: ++ serviceAccount: ++ create: false ++ prePuller: ++ hook: ++ serviceAccount: ++ create: false ++ ``` ++ ```` ++ create: ++ type: boolean ++ description: | ++ Decides if (Cluster)Role and (Cluster)RoleBinding resources are ++ created and bound to the configured serviceAccounts. ++ ++ global: ++ type: object ++ additionalProperties: true ++ properties: ++ safeToShowValues: ++ type: boolean ++ description: | ++ A flag that should only be set to true temporarily when experiencing a ++ deprecation message that contain censored content that you wish to ++ reveal. +diff --git a/applications/jupyterhub/deploy/values.yaml b/applications/jupyterhub/deploy/values.yaml +index 2f5cbca..41e108d 100755 +--- a/applications/jupyterhub/deploy/values.yaml ++++ b/applications/jupyterhub/deploy/values.yaml +@@ -1,4 +1,4 @@ +-harness: ++harness: # EDIT: CLOUDHARNESS + subdomain: hub + service: + auto: false +@@ -31,6 +31,11 @@ harness: + fullnameOverride: "" + nameOverride: + ++# enabled is ignored by the jupyterhub chart itself, but a chart depending on ++# the jupyterhub chart conditionally can make use this config option as the ++# condition. ++enabled: ++ + # custom can contain anything you want to pass to the hub pod, as all passed + # Helm template values will be made available there. + custom: {} +@@ -54,10 +59,11 @@ imagePullSecrets: [] + # ConfigurableHTTPProxy speaks with the actual ConfigurableHTTPProxy server in + # the proxy pod. + hub: ++ revisionHistoryLimit: + config: + JupyterHub: + admin_access: true +- authenticator_class: keycloak ++ authenticator_class: keycloak # EDIT: CLOUDHARNESS + service: + type: ClusterIP + annotations: {} +@@ -68,7 +74,6 @@ hub: + baseUrl: / + cookieSecret: + initContainers: [] +- fsGid: 1000 + nodeSelector: {} + tolerations: [] + concurrentSpawnLimit: 64 +@@ -106,37 +111,38 @@ hub: + extraVolumes: [] + extraVolumeMounts: [] + image: +- name: jupyterhub/k8s-hub +- tag: "1.1.3" ++ name: quay.io/jupyterhub/k8s-hub ++ tag: "3.2.1" + pullPolicy: + pullSecrets: [] + resources: {} ++ podSecurityContext: ++ fsGroup: 1000 + containerSecurityContext: + runAsUser: 1000 + runAsGroup: 1000 + allowPrivilegeEscalation: false + lifecycle: {} ++ loadRoles: {} + services: {} + pdb: + enabled: false + maxUnavailable: + minAvailable: 1 + networkPolicy: +- enabled: false ++ enabled: true + ingress: [] +- ## egress for JupyterHub already includes Kubernetes internal DNS and +- ## access to the proxy, but can be restricted further, but ensure to allow +- ## access to the Kubernetes API server that couldn't be pinned ahead of +- ## time. +- ## +- ## ref: https://stackoverflow.com/a/59016417/2220152 +- egress: +- - to: +- - ipBlock: +- cidr: 0.0.0.0/0 ++ egress: [] ++ egressAllowRules: ++ cloudMetadataServer: true ++ dnsPortsCloudMetadataServer: true ++ dnsPortsKubeSystemNamespace: true ++ dnsPortsPrivateIPs: true ++ nonPrivateIPs: true ++ privateIPs: true + interNamespaceAccessLabels: ignore + allowedIngressPorts: [] +- allowNamedServers: true ++ allowNamedServers: true # EDIT: CLOUDHARNESS + namedServerLimitPerUser: + authenticatePrometheus: + redirectToServer: +@@ -163,11 +169,13 @@ hub: + timeoutSeconds: 1 + existingSecret: + serviceAccount: ++ create: true ++ name: + annotations: {} + extraPodSpec: {} + + rbac: +- enabled: true ++ create: true + + # proxy relates to the proxy pod, the proxy-public service, and the autohttps + # pod and proxy-http service. +@@ -202,7 +210,7 @@ proxy: + rollingUpdate: + # service relates to the proxy-public service + service: +- type: NodePort ++ type: NodePort # EDIT: CLOUDHARNESS + labels: {} + annotations: {} + nodePorts: +@@ -215,13 +223,17 @@ proxy: + # chp relates to the proxy pod, which is responsible for routing traffic based + # on dynamic configuration sent from JupyterHub to CHP's REST API. + chp: ++ revisionHistoryLimit: + containerSecurityContext: + runAsUser: 65534 # nobody user + runAsGroup: 65534 # nobody group + allowPrivilegeEscalation: false + image: +- name: jupyterhub/configurable-http-proxy +- tag: 4.5.0 # https://github.com/jupyterhub/configurable-http-proxy/releases ++ name: quay.io/jupyterhub/configurable-http-proxy ++ # tag is automatically bumped to new patch versions by the ++ # watch-dependencies.yaml workflow. ++ # ++ tag: "4.6.1" # https://github.com/jupyterhub/configurable-http-proxy/tags + pullPolicy: + pullSecrets: [] + extraCommandLineFlags: [] +@@ -229,11 +241,14 @@ proxy: + enabled: true + initialDelaySeconds: 60 + periodSeconds: 10 ++ failureThreshold: 30 ++ timeoutSeconds: 3 + readinessProbe: + enabled: true + initialDelaySeconds: 0 + periodSeconds: 2 + failureThreshold: 1000 ++ timeoutSeconds: 1 + resources: {} + defaultTarget: + errorTarget: +@@ -241,12 +256,16 @@ proxy: + nodeSelector: {} + tolerations: [] + networkPolicy: +- enabled: false ++ enabled: true + ingress: [] +- egress: +- - to: +- - ipBlock: +- cidr: 0.0.0.0/0 ++ egress: [] ++ egressAllowRules: ++ cloudMetadataServer: true ++ dnsPortsCloudMetadataServer: true ++ dnsPortsKubeSystemNamespace: true ++ dnsPortsPrivateIPs: true ++ nonPrivateIPs: true ++ privateIPs: true + interNamespaceAccessLabels: ignore + allowedIngressPorts: [http, https] + pdb: +@@ -257,13 +276,17 @@ proxy: + # traefik relates to the autohttps pod, which is responsible for TLS + # termination when proxy.https.type=letsencrypt. + traefik: ++ revisionHistoryLimit: + containerSecurityContext: + runAsUser: 65534 # nobody user + runAsGroup: 65534 # nobody group + allowPrivilegeEscalation: false + image: + name: traefik +- tag: v2.4.11 # ref: https://hub.docker.com/_/traefik?tab=tags ++ # tag is automatically bumped to new patch versions by the ++ # watch-dependencies.yaml workflow. ++ # ++ tag: "v2.10.7" # ref: https://hub.docker.com/_/traefik?tab=tags + pullPolicy: + pullSecrets: [] + hsts: +@@ -272,6 +295,7 @@ proxy: + maxAge: 15724800 # About 6 months + resources: {} + labels: {} ++ extraInitContainers: [] + extraEnv: {} + extraVolumes: [] + extraVolumeMounts: [] +@@ -283,10 +307,14 @@ proxy: + networkPolicy: + enabled: true + ingress: [] +- egress: +- - to: +- - ipBlock: +- cidr: 0.0.0.0/0 ++ egress: [] ++ egressAllowRules: ++ cloudMetadataServer: true ++ dnsPortsCloudMetadataServer: true ++ dnsPortsKubeSystemNamespace: true ++ dnsPortsPrivateIPs: true ++ nonPrivateIPs: true ++ privateIPs: true + interNamespaceAccessLabels: ignore + allowedIngressPorts: [http, https] + pdb: +@@ -294,6 +322,8 @@ proxy: + maxUnavailable: + minAvailable: 1 + serviceAccount: ++ create: true ++ name: + annotations: {} + extraPodSpec: {} + secretSync: +@@ -302,8 +332,8 @@ proxy: + runAsGroup: 65534 # nobody group + allowPrivilegeEscalation: false + image: +- name: jupyterhub/k8s-secret-sync +- tag: "1.1.3" ++ name: quay.io/jupyterhub/k8s-secret-sync ++ tag: "3.2.1" + pullPolicy: + pullSecrets: [] + resources: {} +@@ -342,29 +372,27 @@ singleuser: + preferred: [] + networkTools: + image: +- name: jupyterhub/k8s-network-tools +- tag: "1.1.3" ++ name: quay.io/jupyterhub/k8s-network-tools ++ tag: "3.2.1" + pullPolicy: + pullSecrets: [] ++ resources: {} + cloudMetadata: + # block set to true will append a privileged initContainer using the + # iptables to block the sensitive metadata server at the provided ip. +- blockWithIptables: false ++ blockWithIptables: true ++ ip: 169.254.169.254 + networkPolicy: +- enabled: false ++ enabled: true + ingress: [] +- egress: +- # Required egress to communicate with the hub and DNS servers will be +- # augmented to these egress rules. +- # +- # This default rule explicitly allows all outbound traffic from singleuser +- # pods, except to a typical IP used to return metadata that can be used by +- # someone with malicious intent. +- - to: +- - ipBlock: +- cidr: 0.0.0.0/0 +- except: +- - 169.254.169.254/32 ++ egress: [] ++ egressAllowRules: ++ cloudMetadataServer: false ++ dnsPortsCloudMetadataServer: true ++ dnsPortsKubeSystemNamespace: true ++ dnsPortsPrivateIPs: true ++ nonPrivateIPs: true ++ privateIPs: false + interNamespaceAccessLabels: ignore + allowedIngressPorts: [] + events: true +@@ -376,6 +404,7 @@ singleuser: + lifecycleHooks: {} + initContainers: [] + extraContainers: [] ++ allowPrivilegeEscalation: false + uid: 1000 + fsGid: 100 + serviceAccountName: +@@ -387,29 +416,29 @@ singleuser: + static: + pvcName: + subPath: "{username}" +- capacity: 10Mi +- homeMountPath: /home/workspace ++ capacity: 10Mi # EDIT: CLOUDHARNESS ++ homeMountPath: /home/workspace # EDIT: CLOUDHARNESS + dynamic: + storageClass: +- pvcNameTemplate: jupyter-{username} +- volumeNameTemplate: jupyter-{username} ++ pvcNameTemplate: jupyter-{username} # EDIT: CLOUDHARNESS ++ volumeNameTemplate: jupyter-{username} # EDIT: CLOUDHARNESS + storageAccessModes: [ReadWriteOnce] + image: +- name: jupyter/base-notebook +- tag: "hub-1.4.2" ++ name: quay.io/jupyterhub/k8s-singleuser-sample ++ tag: "3.2.1" + pullPolicy: + pullSecrets: [] + startTimeout: 300 + cpu: +- limit: 0.4 +- guarantee: 0.05 ++ limit: 0.4 # EDIT: CLOUDHARNESS ++ guarantee: 0.05 # EDIT: CLOUDHARNESS + memory: +- limit: 0.5G +- guarantee: 0.1G ++ limit: 0.5G # EDIT: CLOUDHARNESS ++ guarantee: 0.1G # EDIT: CLOUDHARNESS + extraResource: + limits: {} + guarantees: {} +- cmd: /usr/local/bin/start-singleuser.sh ++ cmd: jupyterhub-singleuser + defaultUrl: + extraPodConfig: {} + profileList: [] +@@ -417,74 +446,146 @@ singleuser: + # scheduling relates to the user-scheduler pods and user-placeholder pods. + scheduling: + userScheduler: +- enabled: false ++ enabled: false # EDIT: CLOUDHARNESS ++ revisionHistoryLimit: + replicas: 2 + logLevel: 4 ++ # plugins are configured on the user-scheduler to make us score how we ++ # schedule user pods in a way to help us schedule on the most busy node. By ++ # doing this, we help scale down more effectively. It isn't obvious how to ++ # enable/disable scoring plugins, and configure them, to accomplish this. ++ # + # plugins ref: https://kubernetes.io/docs/reference/scheduling/config/#scheduling-plugins-1 ++ # migration ref: https://kubernetes.io/docs/reference/scheduling/config/#scheduler-configuration-migrations ++ # + plugins: + score: ++ # These scoring plugins are enabled by default according to ++ # https://kubernetes.io/docs/reference/scheduling/config/#scheduling-plugins ++ # 2022-02-22. ++ # ++ # Enabled with high priority: ++ # - NodeAffinity ++ # - InterPodAffinity ++ # - NodeResourcesFit ++ # - ImageLocality ++ # Remains enabled with low default priority: ++ # - TaintToleration ++ # - PodTopologySpread ++ # - VolumeBinding ++ # Disabled for scoring: ++ # - NodeResourcesBalancedAllocation ++ # + disabled: +- - name: SelectorSpread +- - name: TaintToleration +- - name: PodTopologySpread ++ # We disable these plugins (with regards to scoring) to not interfere ++ # or complicate our use of NodeResourcesFit. + - name: NodeResourcesBalancedAllocation +- - name: NodeResourcesLeastAllocated + # Disable plugins to be allowed to enable them again with a different + # weight and avoid an error. +- - name: NodePreferAvoidPods + - name: NodeAffinity + - name: InterPodAffinity ++ - name: NodeResourcesFit + - name: ImageLocality + enabled: +- - name: NodePreferAvoidPods +- weight: 161051 + - name: NodeAffinity + weight: 14631 + - name: InterPodAffinity + weight: 1331 +- - name: NodeResourcesMostAllocated ++ - name: NodeResourcesFit + weight: 121 + - name: ImageLocality + weight: 11 ++ pluginConfig: ++ # Here we declare that we should optimize pods to fit based on a ++ # MostAllocated strategy instead of the default LeastAllocated. ++ - name: NodeResourcesFit ++ args: ++ scoringStrategy: ++ resources: ++ - name: cpu ++ weight: 1 ++ - name: memory ++ weight: 1 ++ type: MostAllocated + containerSecurityContext: + runAsUser: 65534 # nobody user + runAsGroup: 65534 # nobody group + allowPrivilegeEscalation: false + image: + # IMPORTANT: Bumping the minor version of this binary should go hand in +- # hand with an inspection of the user-scheduelrs RBAC resources +- # that we have forked. +- name: k8s.gcr.io/kube-scheduler +- tag: v1.19.13 # ref: https://github.com/kubernetes/website/blob/main/content/en/releases/patch-releases.md ++ # hand with an inspection of the user-scheduelr's RBAC ++ # resources that we have forked in ++ # templates/scheduling/user-scheduler/rbac.yaml. ++ # ++ # Debugging advice: ++ # ++ # - Is configuration of kube-scheduler broken in ++ # templates/scheduling/user-scheduler/configmap.yaml? ++ # ++ # - Is the kube-scheduler binary's compatibility to work ++ # against a k8s api-server that is too new or too old? ++ # ++ # - You can update the GitHub workflow that runs tests to ++ # include "deploy/user-scheduler" in the k8s namespace report ++ # and reduce the user-scheduler deployments replicas to 1 in ++ # dev-config.yaml to get relevant logs from the user-scheduler ++ # pods. Inspect the "Kubernetes namespace report" action! ++ # ++ # - Typical failures are that kube-scheduler fails to search for ++ # resources via its "informers", and won't start trying to ++ # schedule pods before they succeed which may require ++ # additional RBAC permissions or that the k8s api-server is ++ # aware of the resources. ++ # ++ # - If "successfully acquired lease" can be seen in the logs, it ++ # is a good sign kube-scheduler is ready to schedule pods. ++ # ++ name: registry.k8s.io/kube-scheduler ++ # tag is automatically bumped to new patch versions by the ++ # watch-dependencies.yaml workflow. The minor version is pinned in the ++ # workflow, and should be updated there if a minor version bump is done ++ # here. We aim to stay around 1 minor version behind the latest k8s ++ # version. ++ # ++ tag: "v1.28.6" # ref: https://github.com/kubernetes/kubernetes/tree/master/CHANGELOG + pullPolicy: + pullSecrets: [] + nodeSelector: {} + tolerations: [] ++ labels: {} ++ annotations: {} + pdb: + enabled: true + maxUnavailable: 1 + minAvailable: + resources: {} + serviceAccount: ++ create: true ++ name: + annotations: {} + extraPodSpec: {} + podPriority: + enabled: false + globalDefault: false + defaultPriority: 0 ++ imagePullerPriority: -5 + userPlaceholderPriority: -10 + userPlaceholder: + enabled: true + image: +- name: k8s.gcr.io/pause +- # tag's can be updated by inspecting the output of the command: +- # gcloud container images list-tags k8s.gcr.io/pause --sort-by=~tags ++ name: registry.k8s.io/pause ++ # tag is automatically bumped to new patch versions by the ++ # watch-dependencies.yaml workflow. + # + # If you update this, also update prePuller.pause.image.tag +- tag: "3.5" ++ # ++ tag: "3.9" + pullPolicy: + pullSecrets: [] ++ revisionHistoryLimit: + replicas: 0 ++ labels: {} ++ annotations: {} + containerSecurityContext: + runAsUser: 65534 # nobody user + runAsGroup: 65534 # nobody group +@@ -517,6 +618,8 @@ scheduling: + + # prePuller relates to the hook|continuous-image-puller DaemonsSets + prePuller: ++ revisionHistoryLimit: ++ labels: {} + annotations: {} + resources: {} + containerSecurityContext: +@@ -530,8 +633,8 @@ prePuller: + pullOnlyOnChanges: true + # image and the configuration below relates to the hook-image-awaiter Job + image: +- name: jupyterhub/k8s-image-awaiter +- tag: "1.1.3" ++ name: quay.io/jupyterhub/k8s-image-awaiter ++ tag: "3.2.1" + pullPolicy: + pullSecrets: [] + containerSecurityContext: +@@ -543,6 +646,8 @@ prePuller: + tolerations: [] + resources: {} + serviceAccount: ++ create: true ++ name: + annotations: {} + continuous: + enabled: true +@@ -554,18 +659,20 @@ prePuller: + runAsGroup: 65534 # nobody group + allowPrivilegeEscalation: false + image: +- name: k8s.gcr.io/pause +- # tag's can be updated by inspecting the output of the command: +- # gcloud container images list-tags k8s.gcr.io/pause --sort-by=~tags ++ name: registry.k8s.io/pause ++ # tag is automatically bumped to new patch versions by the ++ # watch-dependencies.yaml workflow. + # + # If you update this, also update scheduling.userPlaceholder.image.tag +- tag: "3.5" ++ # ++ tag: "3.9" + pullPolicy: + pullSecrets: [] + + ingress: + enabled: false + annotations: {} ++ ingressClassName: + hosts: [] + pathSuffix: + pathType: Prefix +@@ -581,7 +688,8 @@ ingress: + cull: + enabled: true + users: false # --cull-users +- removeNamedServers: true # --remove-named-servers ++ adminUsers: true # --cull-admin-users ++ removeNamedServers: true # EDIT: CLOUDHARNESS + timeout: 3600 # --timeout + every: 600 # --cull-every + concurrency: 10 # --concurrency +diff --git a/applications/jupyterhub/zero-to-jupyterhub-k8s b/applications/jupyterhub/zero-to-jupyterhub-k8s +new file mode 160000 +index 0000000..c92c123 +--- /dev/null ++++ b/applications/jupyterhub/zero-to-jupyterhub-k8s +@@ -0,0 +1 @@ ++Subproject commit c92c12374795e84f36f5f16c4e8b8a448ad2f230-dirty diff --git a/applications/jupyterhub/update.sh b/applications/jupyterhub/update.sh new file mode 100644 index 00000000..cddf6899 --- /dev/null +++ b/applications/jupyterhub/update.sh @@ -0,0 +1,28 @@ +git clone -n git@github.com:jupyterhub/zero-to-jupyterhub-k8s.git +git checkout jupyterhub +git checkout chartpress.yaml +pip install chartpress +cd zero-to-jupyterhub-k8s +chartpress -t $1 +cd .. +cp -R zero-to-jupyterhub-k8s/jupyterhub/templates/* deploy/templates +cp zero-to-jupyterhub-k8s/jupyterhub/files/hub/* deploy/resources/hub +cp zero-to-jupyterhub-k8s/jupyterhub/values* deploy +cd deploy + +rm -Rf templates/proxy/autohttps # Proxy is not used as node balancer +rm templates/ingress.yaml # Default cloudharness ingress is used +# Command to replace everything like files/hub/ inside deploy/templates with resources/jupyterhub/hub/ +find templates -type f -exec sed -i 's/files\/hub/resources\/jupyterhub\/hub/g' {} \; + +# replace .Values.hub. with .Values.hub.config with .Values.apps.jupyterhub.hub +find templates -type f -exec sed -i 's/.Values./.Values.apps.jupyterhub./g' {} \; + +# replace .Values.apps.jupyterhub.hub.image with .Values.apps.jupyterhub.harness.deployment.image +find templates -type f -exec sed -i 's/{{ .Values.apps.jupyterhub.hub.image.name }}:{{ .Values.apps.jupyterhub.hub.image.tag }}/{{ .Values.apps.jupyterhub.harness.deployment.image }}/g' {} \; + + + +find templates -type f -exec sed -i 's$.Template.BasePath "/hub$.Template.BasePath "/jupyterhub/hub$g' {} \; +find templates -type f -exec sed -i 's$.Template.BasePath "/proxy$.Template.BasePath "/jupyterhub/proxy$g' {} \; +find templates -type f -exec sed -i 's$.Template.BasePath "/scheduling$.Template.BasePath "/jupyterhub/scheduling$g' {} \; diff --git a/deployment/codefresh-test-local.yaml b/deployment/codefresh-test-local.yaml index 612e214b..19a91c83 100644 --- a/deployment/codefresh-test-local.yaml +++ b/deployment/codefresh-test-local.yaml @@ -32,9 +32,8 @@ steps: working_directory: . commands: - bash cloud-harness/install.sh - - harness-deployment . -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} - -d ${{CF_SHORT_REVISION}}.${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} - -e test-local --write-env -N -i samples + - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}} + -rs ${{REGISTRY_SECRET}} -e test-local --write-env -N -i jupyterhub - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: @@ -72,33 +71,11 @@ steps: == true forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}') == false - cloudharness-frontend-build: - type: build - stage: build - dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' - buildkit: true - build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/cloudharness-frontend-build - title: Cloudharness frontend build - working_directory: ./. - tag: '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' - when: - condition: - any: - buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}', - '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}', - '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false - build_static_images: - title: Build static images + build_application_images: type: parallel stage: build steps: - cloudharness-flask: + accounts: type: build stage: build dockerfile: Dockerfile @@ -108,23 +85,18 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/cloudharness-flask - title: Cloudharness flask - working_directory: ./infrastructure/common-images/cloudharness-flask - tag: '${{CLOUDHARNESS_FLASK_TAG}}' + image_name: cloudharness/accounts + title: Accounts + working_directory: ./applications/accounts + tag: '${{ACCOUNTS_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}') + buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') == false - build_application_images: - type: parallel - stage: build - steps: - nfsserver: + jupyterhub: type: build stage: build dockerfile: Dockerfile @@ -134,18 +106,19 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/nfsserver - title: Nfsserver - working_directory: ./applications/nfsserver - tag: '${{NFSSERVER_TAG}}' + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + image_name: cloudharness/jupyterhub + title: Jupyterhub + working_directory: ./applications/jupyterhub + tag: '${{JUPYTERHUB_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{NFSSERVER_TAG_EXISTS}}', '{{NFSSERVER_TAG_EXISTS}}') + buildDoesNotExist: includes('${{JUPYTERHUB_TAG_EXISTS}}', '{{JUPYTERHUB_TAG_EXISTS}}') == true - forceNoCache: includes('${{NFSSERVER_TAG_FORCE_BUILD}}', '{{NFSSERVER_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{JUPYTERHUB_TAG_FORCE_BUILD}}', '{{JUPYTERHUB_TAG_FORCE_BUILD}}') == false - accounts: + jupyterhub-zero-to-jupyterhub-k8s-images-secret-sync: type: build stage: build dockerfile: Dockerfile @@ -155,18 +128,20 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/accounts - title: Accounts - working_directory: ./applications/accounts - tag: '${{ACCOUNTS_TAG}}' + image_name: cloudharness/jupyterhub-zero-to-jupyterhub-k8s-images-secret-sync + title: Jupyterhub zero to jupyterhub k8s images secret sync + working_directory: ./applications/jupyterhub/zero-to-jupyterhub-k8s/images/secret-sync + tag: '${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SECRET_SYNC_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') + buildDoesNotExist: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SECRET_SYNC_TAG_EXISTS}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SECRET_SYNC_TAG_EXISTS}}') == true - forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SECRET_SYNC_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SECRET_SYNC_TAG_FORCE_BUILD}}') == false - samples: + jupyterhub-zero-to-jupyterhub-k8s-images-image-awaiter: type: build stage: build dockerfile: Dockerfile @@ -176,20 +151,20 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloudharness/samples - title: Samples - working_directory: ./applications/samples - tag: '${{SAMPLES_TAG}}' + image_name: cloudharness/jupyterhub-zero-to-jupyterhub-k8s-images-image-awaiter + title: Jupyterhub zero to jupyterhub k8s images image awaiter + working_directory: ./applications/jupyterhub/zero-to-jupyterhub-k8s/images/image-awaiter + tag: '${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_IMAGE_AWAITER_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}') + buildDoesNotExist: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_IMAGE_AWAITER_TAG_EXISTS}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_IMAGE_AWAITER_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_IMAGE_AWAITER_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_IMAGE_AWAITER_TAG_FORCE_BUILD}}') == false - samples-print-file: + jupyterhub-zero-to-jupyterhub-k8s-images-singleuser-sample: type: build stage: build dockerfile: Dockerfile @@ -199,19 +174,20 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/samples-print-file - title: Samples print file - working_directory: ./applications/samples/tasks/print-file - tag: '${{SAMPLES_PRINT_FILE_TAG}}' + image_name: cloudharness/jupyterhub-zero-to-jupyterhub-k8s-images-singleuser-sample + title: Jupyterhub zero to jupyterhub k8s images singleuser sample + working_directory: ./applications/jupyterhub/zero-to-jupyterhub-k8s/images/singleuser-sample + tag: '${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SINGLEUSER_SAMPLE_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}') + buildDoesNotExist: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SINGLEUSER_SAMPLE_TAG_EXISTS}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SINGLEUSER_SAMPLE_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SINGLEUSER_SAMPLE_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_SINGLEUSER_SAMPLE_TAG_FORCE_BUILD}}') == false - samples-secret: + jupyterhub-zero-to-jupyterhub-k8s-images-network-tools: type: build stage: build dockerfile: Dockerfile @@ -221,19 +197,20 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/samples-secret - title: Samples secret - working_directory: ./applications/samples/tasks/secret - tag: '${{SAMPLES_SECRET_TAG}}' + image_name: cloudharness/jupyterhub-zero-to-jupyterhub-k8s-images-network-tools + title: Jupyterhub zero to jupyterhub k8s images network tools + working_directory: ./applications/jupyterhub/zero-to-jupyterhub-k8s/images/network-tools + tag: '${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_NETWORK_TOOLS_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}') + buildDoesNotExist: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_NETWORK_TOOLS_TAG_EXISTS}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_NETWORK_TOOLS_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}') + forceNoCache: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_NETWORK_TOOLS_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_NETWORK_TOOLS_TAG_FORCE_BUILD}}') == false - samples-sum: + jupyterhub-zero-to-jupyterhub-k8s-images-hub: type: build stage: build dockerfile: Dockerfile @@ -243,19 +220,20 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/samples-sum - title: Samples sum - working_directory: ./applications/samples/tasks/sum - tag: '${{SAMPLES_SUM_TAG}}' + image_name: cloudharness/jupyterhub-zero-to-jupyterhub-k8s-images-hub + title: Jupyterhub zero to jupyterhub k8s images hub + working_directory: ./applications/jupyterhub/zero-to-jupyterhub-k8s/images/hub + tag: '${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_HUB_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}') - == true - forceNoCache: includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}') + buildDoesNotExist: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_HUB_TAG_EXISTS}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_HUB_TAG_EXISTS}}') == + true + forceNoCache: includes('${{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_HUB_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_ZERO_TO_JUPYTERHUB_K8S_IMAGES_HUB_TAG_FORCE_BUILD}}') == false - common: + jupyterhub-jupyterhub: type: build stage: build dockerfile: Dockerfile @@ -265,19 +243,18 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloudharness/common - title: Common - working_directory: ./applications/common/server - tag: '${{COMMON_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub + title: Jupyterhub jupyterhub + working_directory: ./applications/jupyterhub/src/jupyterhub + tag: '${{JUPYTERHUB_JUPYTERHUB_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}') - == true - forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}') - == false - workflows-send-result-event: + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_TAG_EXISTS}}') == true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_TAG_FORCE_BUILD}}') == false + jupyterhub-jupyterhub-singleuser: type: build stage: build dockerfile: Dockerfile @@ -287,19 +264,18 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/workflows-send-result-event - title: Workflows send result event - working_directory: ./applications/workflows/tasks/send-result-event - tag: '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub-singleuser + title: Jupyterhub jupyterhub singleuser + working_directory: ./applications/jupyterhub/src/jupyterhub/singleuser + tag: '${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}', - '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}', - '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false - workflows-extract-download: + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_EXISTS}}') == true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_SINGLEUSER_TAG_FORCE_BUILD}}') == false + jupyterhub-jupyterhub-examples-service-fastapi: type: build stage: build dockerfile: Dockerfile @@ -309,18 +285,20 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/workflows-extract-download - title: Workflows extract download - working_directory: ./applications/workflows/tasks/extract-download - tag: '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub-examples-service-fastapi + title: Jupyterhub jupyterhub examples service fastapi + working_directory: ./applications/jupyterhub/src/jupyterhub/examples/service-fastapi + tag: '${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}', - '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}', - '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false - workflows-notify-queue: + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_EXISTS}}') == + true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_SERVICE_FASTAPI_TAG_FORCE_BUILD}}') + == false + jupyterhub-jupyterhub-examples-postgres-db: type: build stage: build dockerfile: Dockerfile @@ -330,19 +308,19 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/workflows-notify-queue - title: Workflows notify queue - working_directory: ./applications/workflows/tasks/notify-queue - tag: '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub-examples-postgres-db + title: Jupyterhub jupyterhub examples postgres db + working_directory: ./applications/jupyterhub/src/jupyterhub/examples/postgres/db + tag: '${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}', - '{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}', - '{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false - workflows: + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_EXISTS}}') == true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_DB_TAG_FORCE_BUILD}}') + == false + jupyterhub-jupyterhub-examples-postgres-hub: type: build stage: build dockerfile: Dockerfile @@ -352,50 +330,19 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} - image_name: cloudharness/workflows - title: Workflows - working_directory: ./applications/workflows/server - tag: '${{WORKFLOWS_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub-examples-postgres-hub + title: Jupyterhub jupyterhub examples postgres hub + working_directory: ./applications/jupyterhub/src/jupyterhub/examples/postgres/hub + tag: '${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}') - == true - forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}') + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_EXISTS}}') == true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_EXAMPLES_POSTGRES_HUB_TAG_FORCE_BUILD}}') == false - tests_unit: - stage: unittest - type: parallel - steps: - samples_ut: - title: Unit tests for samples - commands: - - pytest /usr/src/app/samples/test - image: '${{REGISTRY}}/cloudharness/samples:${{SAMPLES_TAG}}' - deployment: - stage: deploy - type: helm - working_directory: ./${{CF_REPO_NAME}} - title: Installing chart - arguments: - helm_version: 3.6.2 - chart_name: deployment/helm - release_name: test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} - kube_context: '${{CLUSTER_NAME}}' - namespace: test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} - chart_version: '${{CF_BUILD_ID}}' - cmd_ps: --timeout 600s --create-namespace - custom_value_files: - - ./deployment/helm/values.yaml - custom_values: - - apps_samples_harness_secrets_asecret=${{ASECRET}} - build_test_images: - title: Build test images - type: parallel - stage: qa - steps: - test-e2e: + jupyterhub-jupyterhub-demo-image: type: build stage: build dockerfile: Dockerfile @@ -405,128 +352,58 @@ steps: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - image_name: cloudharness/test-e2e - title: Test e2e - working_directory: ./test/test-e2e - tag: '${{TEST_E2E_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub-demo-image + title: Jupyterhub jupyterhub demo image + working_directory: ./applications/jupyterhub/src/jupyterhub/demo-image + tag: '${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}') - == true - forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}') - == false - test-api: + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_EXISTS}}') == true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_DEMO_IMAGE_TAG_FORCE_BUILD}}') == false + jupyterhub-jupyterhub-onbuild: type: build stage: build - dockerfile: test/test-api/Dockerfile + dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} - image_name: cloudharness/test-api - title: Test api - working_directory: ./. - tag: '${{TEST_API_TAG}}' + image_name: cloudharness/jupyterhub-jupyterhub-onbuild + title: Jupyterhub jupyterhub onbuild + working_directory: ./applications/jupyterhub/src/jupyterhub/onbuild + tag: '${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG}}' when: condition: any: - buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}') - == true - forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}') - == false - wait_deployment: - stage: qa - title: Wait deployment to be ready - image: codefresh/kubectl - commands: - - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} - - kubectl rollout status deployment/accounts - - kubectl rollout status deployment/samples - - kubectl rollout status deployment/common - - kubectl rollout status deployment/workflows - - sleep 60 - tests_api: - stage: qa - title: Api tests - working_directory: /home/test - image: '${{REGISTRY}}/cloudharness/test-api:${{TEST_API_TAG}}' - fail_fast: false - commands: - - echo $APP_NAME - scale: - samples_api_test: - title: samples api test - volumes: - - '${{CF_REPO_NAME}}/applications/samples:/home/test' - - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' - environment: - - APP_URL=https://samples.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api - - USERNAME=sample@testuser.com - - PASSWORD=test - commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://samples.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api -c all --skip-deprecated-operations - --hypothesis-suppress-health-check=too_slow --hypothesis-deadline=180000 - --request-timeout=180000 --hypothesis-max-examples=2 --show-errors-tracebacks - - pytest -v test/api - common_api_test: - title: common api test - volumes: - - '${{CF_REPO_NAME}}/applications/common:/home/test' - - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' - environment: - - APP_URL=https://common.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api - commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://common.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api -c all - workflows_api_test: - title: workflows api test - volumes: - - '${{CF_REPO_NAME}}/applications/workflows:/home/test' - - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' - environment: - - APP_URL=https://workflows.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api - commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://workflows.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api -c all - hooks: - on_fail: - exec: - image: alpine - commands: - - cf_export FAILED=failed - tests_e2e: - stage: qa - title: End to end tests - working_directory: /home/test - image: '${{REGISTRY}}/cloudharness/test-e2e:${{TEST_E2E_TAG}}' - fail_fast: false - commands: - - yarn test - scale: - samples_e2e_test: - title: samples e2e test - volumes: - - '${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples' - environment: - - APP_URL=https://samples.${{CF_SHORT_REVISION}}.${{DOMAIN}} - - USERNAME=sample@testuser.com - - PASSWORD=test - hooks: - on_fail: - exec: - image: alpine - commands: - - cf_export FAILED=failed + buildDoesNotExist: includes('${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_EXISTS}}', + '{{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_EXISTS}}') == true + forceNoCache: includes('${{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_FORCE_BUILD}}', + '{{JUPYTERHUB_JUPYTERHUB_ONBUILD_TAG_FORCE_BUILD}}') == false + deployment: + stage: deploy + type: helm + working_directory: ./${{CF_REPO_NAME}} + title: Installing chart + arguments: + helm_version: 3.6.2 + chart_name: deployment/helm + release_name: test-${{NAMESPACE_BASENAME}} + kube_context: '${{CLUSTER_NAME}}' + namespace: test-${{NAMESPACE_BASENAME}} + chart_version: '${{CF_SHORT_REVISION}}' + cmd_ps: --timeout 600s --create-namespace + custom_value_files: + - ./deployment/helm/values.yaml + custom_values: [] approval: type: pending-approval stage: qa - title: Approve with failed tests + title: Approve anyway and delete deployment description: The pipeline will fail after ${{WAIT_ON_FAIL}} minutes timeout: timeUnit: minutes @@ -536,21 +413,11 @@ steps: condition: all: error: '"${{FAILED}}" == "failed"' - wait_on_fail: '${{WAIT_ON_FAIL}}' - dummy_end: - title: Dummy step - description: Without this, the on_finish hook is executed before the approval - step - image: python:3.9.10 - stage: qa - when: - condition: - all: - error: '"${{FAILED}}" == "failed"' - wait_on_fail: '${{WAIT_ON_FAIL}}' -hooks: - on_finish: + delete_deployment: + title: Delete deployment + description: The deployment is deleted at the end of the pipeline image: codefresh/kubectl + stage: qa commands: - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl delete ns test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} + - kubectl delete ns test-${{NAMESPACE_BASENAME}} From f87869b8462be4bebe18b3eb9adf2c63392eaf1f Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 22 Jan 2024 12:38:05 +0100 Subject: [PATCH 004/210] CH-110 jupyterhub update OK; add tests --- .../deploy/resources/hub/jupyterhub_config.py | 6 ++-- .../jupyterhub/deploy/values-test.yaml | 7 +++++ applications/jupyterhub/deploy/values.yaml | 6 ++++ .../harness_jupyter/jupyterhub.py | 2 +- applications/samples/deploy/values-test.yaml | 8 +++++ deployment/codefresh-test.yaml | 30 +++++++++++++++++++ 6 files changed, 55 insertions(+), 4 deletions(-) create mode 100644 applications/jupyterhub/deploy/values-test.yaml diff --git a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py index 5ebe20b5..8fdfa8c1 100755 --- a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py +++ b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py @@ -12,8 +12,6 @@ #CLOUDHARNESS: EDIT START import logging -from kubernetes import client -from jupyterhub.utils import url_path_join try: from harness_jupyter.jupyterhub import harness_hub @@ -492,6 +490,7 @@ def camelCaseify(s): cfg.pop("keys", None) c[app].update(cfg) + # load /usr/local/etc/jupyterhub/jupyterhub_config.d config files config_dir = "/usr/local/etc/jupyterhub/jupyterhub_config.d" if os.path.isdir(config_dir): @@ -560,4 +559,5 @@ def camelCaseify(s): c.registry = get_config('registry') c.domain = get_config('root.domain') c.namespace = get_config('root.namespace') -# CLOUDHARNESS: EDIT END \ No newline at end of file +# CLOUDHARNESS: EDIT END + \ No newline at end of file diff --git a/applications/jupyterhub/deploy/values-test.yaml b/applications/jupyterhub/deploy/values-test.yaml new file mode 100644 index 00000000..3ca312d3 --- /dev/null +++ b/applications/jupyterhub/deploy/values-test.yaml @@ -0,0 +1,7 @@ +harness: + accounts: + users: + - username: samplehub@testuser.com + realmRoles: + - offline_access + diff --git a/applications/jupyterhub/deploy/values.yaml b/applications/jupyterhub/deploy/values.yaml index 41e108d6..b871b33b 100755 --- a/applications/jupyterhub/deploy/values.yaml +++ b/applications/jupyterhub/deploy/values.yaml @@ -25,6 +25,12 @@ harness: # EDIT: CLOUDHARNESS quota-ws-maxmem: 0.5 # sets the storage dedicated to the user data in Gb units (float) quota-storage-max: 1.25 + test: + e2e: + enabled: true + smoketest: true + ignoreRequestErrors: false + ignoreConsoleErrors: false # fullnameOverride and nameOverride distinguishes blank strings, null values, # and non-blank strings. For more details, see the configuration reference. diff --git a/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py b/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py index 220883a8..fc4d0dd0 100644 --- a/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py +++ b/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py @@ -139,7 +139,7 @@ def change_pod_manifest(self: KubeSpawner): if 'subdomain' in harness and harness['subdomain'] == subdomain: ws_image = getattr(self, "ws_image", None) - logging.info("Subdomain is", subdomain) + logging.info("Subdomain is %s", subdomain) if ws_image: # try getting the image + tag from values.yaml ch_conf = conf.get_configuration() diff --git a/applications/samples/deploy/values-test.yaml b/applications/samples/deploy/values-test.yaml index 3555108f..14274fd6 100644 --- a/applications/samples/deploy/values-test.yaml +++ b/applications/samples/deploy/values-test.yaml @@ -1,4 +1,12 @@ harness: + dependencies: + soft: + - workflows + - events + - accounts + - common + - nfsserver + - jupyterhub accounts: roles: - role1 diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml index ff7a88af..7280c8e6 100644 --- a/deployment/codefresh-test.yaml +++ b/deployment/codefresh-test.yaml @@ -165,6 +165,28 @@ steps: == true forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') == false + jupyterhub: + type: build + stage: build + dockerfile: Dockerfile + registry: '${{CODEFRESH_REGISTRY}}' + buildkit: true + build_arguments: + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + image_name: cloudharness/jupyterhub + title: Jupyterhub + working_directory: ./applications/jupyterhub + tag: '${{JUPYTERHUB_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{JUPYTERHUB_TAG_EXISTS}}', '{{JUPYTERHUB_TAG_EXISTS}}') + == true + forceNoCache: includes('${{JUPYTERHUB_TAG_FORCE_BUILD}}', '{{JUPYTERHUB_TAG_FORCE_BUILD}}') + == false samples: type: build stage: build @@ -510,6 +532,14 @@ steps: commands: - yarn test scale: + jupyterhub_e2e_test: + title: jupyterhub e2e test + volumes: + - '${{CF_REPO_NAME}}/applications/jupyterhub/test/e2e:/home/test/__tests__/jupyterhub' + environment: + - APP_URL=https://hub.${{DOMAIN}} + - USERNAME=samplehub@testuser.com + - PASSWORD=test samples_e2e_test: title: samples e2e test volumes: From 9c905eb17cc78d39c5fc560bc5f250ab649ec7ba Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 22 Jan 2024 15:13:19 +0100 Subject: [PATCH 005/210] CH-110 fix test --- applications/jupyterhub/deploy/values-test.yaml | 6 ------ deployment/codefresh-test.yaml | 4 +++- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/applications/jupyterhub/deploy/values-test.yaml b/applications/jupyterhub/deploy/values-test.yaml index 3ca312d3..8b137891 100644 --- a/applications/jupyterhub/deploy/values-test.yaml +++ b/applications/jupyterhub/deploy/values-test.yaml @@ -1,7 +1 @@ -harness: - accounts: - users: - - username: samplehub@testuser.com - realmRoles: - - offline_access diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml index bbedd788..d15db5cd 100644 --- a/deployment/codefresh-test.yaml +++ b/deployment/codefresh-test.yaml @@ -467,7 +467,9 @@ steps: - kubectl config use-context ${{CLUSTER_NAME}} - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}} - kubectl rollout status deployment/accounts + - kubectl rollout status deployment/argo-server-gk - kubectl rollout status deployment/samples + - kubectl rollout status deployment/samples-gk - kubectl rollout status deployment/common - kubectl rollout status deployment/workflows - sleep 60 @@ -536,7 +538,7 @@ steps: - '${{CF_REPO_NAME}}/applications/jupyterhub/test/e2e:/home/test/__tests__/jupyterhub' environment: - APP_URL=https://hub.${{DOMAIN}} - - USERNAME=samplehub@testuser.com + - USERNAME=sample@testuser.com - PASSWORD=test samples_e2e_test: title: samples e2e test From 313b9e47491c5c2c1a8795dab01147893245dc8d Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 22 Jan 2024 16:52:10 +0100 Subject: [PATCH 006/210] CH-110 Disable hub network policy --- applications/jupyterhub/deploy/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/jupyterhub/deploy/values.yaml b/applications/jupyterhub/deploy/values.yaml index b871b33b..9be3ad5d 100755 --- a/applications/jupyterhub/deploy/values.yaml +++ b/applications/jupyterhub/deploy/values.yaml @@ -136,7 +136,7 @@ hub: maxUnavailable: minAvailable: 1 networkPolicy: - enabled: true + enabled: false # EDIT: CLOUDHARNESS -- cannot connect to accounts otherwise ingress: [] egress: [] egressAllowRules: From 24132a5d52e806d21cfabad02345f6140ecfebf6 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Tue, 23 Jan 2024 16:38:26 +0100 Subject: [PATCH 007/210] CH-94 add control on image prepull --- .../image-puller/_helpers-daemonset.tpl | 20 ++++++++++++ .../jupyterhub/deploy/values-test.yaml | 6 +++- docs/jupyterhub.md | 31 ++++++++++++++++++- 3 files changed, 55 insertions(+), 2 deletions(-) diff --git a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl index 528345c0..f872a336 100644 --- a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl +++ b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl @@ -199,7 +199,27 @@ spec: securityContext: {{- . | toYaml | nindent 12 }} {{- end }} + {{- end }} + {{- /* --- EDIT: CLOUDHARNESS pull images --- */}} + {{- if $.Values.apps.jupyterhub.harness.jupyterhub.prepull -}} + {{- range $k, $v := $.Values.apps.jupyterhub.harness.jupyterhub.prepull }} + - name: image-pull--{{ $v }} + image: {{ get ( get $.Values "task-images" ) $v }} + command: + - /bin/sh + - -c + - echo "Pulling complete" + {{- with $.Values.apps.jupyterhub.prePuller.resources }} + resources: + {{- . | toYaml | nindent 12 }} + {{- end }} + {{- with $.Values.apps.jupyterhub.prePuller.containerSecurityContext }} + securityContext: + {{- . | toYaml | nindent 12 }} + {{- end }} {{- end }} + {{- end }} + {{- /* --- END EDIT: CLOUDHARNESS pull images --- */}} containers: - name: pause image: {{ .Values.apps.jupyterhub.prePuller.pause.image.name }}:{{ .Values.apps.jupyterhub.prePuller.pause.image.tag }} diff --git a/applications/jupyterhub/deploy/values-test.yaml b/applications/jupyterhub/deploy/values-test.yaml index 8b137891..653cae1e 100644 --- a/applications/jupyterhub/deploy/values-test.yaml +++ b/applications/jupyterhub/deploy/values-test.yaml @@ -1 +1,5 @@ - +harness: + jupyterhub: + prepull: + - cloudharness-base + diff --git a/docs/jupyterhub.md b/docs/jupyterhub.md index 709ede5f..3d7046bc 100644 --- a/docs/jupyterhub.md +++ b/docs/jupyterhub.md @@ -35,6 +35,7 @@ Edit the `deploy/values.yaml` file `harness.jupyterhub` section to edit configu - `applicationHook`: change the hook function (advances, see below) - `extraConfig`: allows you to add Python snippets to the jupyterhub_config.py file - `spawnerExtraConfig`: allows you to add values to the spawner object without the need of creating a new hook +- `prepull`: indicate images that will be prepulled from the current build Example: ```yaml @@ -46,6 +47,8 @@ harness: name: proxy-public jupyterhub: args: ["--debug", "--NotebookApp.default_url=/lab"] + prepull: + - cloudharness-base extraConfig: timing: | c.Spawner.port = 8000 @@ -179,4 +182,30 @@ Cloudharness JupyterHub is integrated with the accounts service so enabling a sh The spawner is also adapted providing a hook to allow other applications to be based on the hub spawner to run with their own configurations. -Available \ No newline at end of file +Available + +## Prepull configuration +Image prepull can be configured in two ways. + +For static images (tag known), can set `prepuller.extraImages` on `applications/jupyterhub/deploy/values.yaml`, like: + +```yaml +prePuller: + extraImages: + nginx-image: + name: nginx + tag: latest +``` + +For images which build is managed by CloudHarness the tag is unknown during the configuration; +for this case, can rely on the dynamic configuration through `harness.jupyterhub.prepull` variable, like: + +```yaml +harness: + jupyterhub: + prepull: + - cloudharness-base +``` + +> Note that only built images defined as tasks, base or common can be used here. +> If an image is not included, it might be required to include it also as a build dependency or, better, define task images directly inside your jupyterhub application override. From 99958b7dbd8ab21ab42addf16909653fc365ec46 Mon Sep 17 00:00:00 2001 From: Zoran Sinnema Date: Mon, 29 Jan 2024 10:57:13 +0100 Subject: [PATCH 008/210] chore(): save jupyterhub profile list in self: --- .../jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py | 1 + 1 file changed, 1 insertion(+) diff --git a/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py b/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py index fc4d0dd0..ac7dafa6 100644 --- a/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py +++ b/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py @@ -18,6 +18,7 @@ def custom_options_form(spawner, abc): # let's skip the profile selection form for now # ToDo: for future we can remove this hook + spawner._ch_profile_list = spawner.profile_list spawner.profile_list = [] # ref: https://github.com/jupyterhub/kubespawner/blob/37a80abb0a6c826e5c118a068fa1cf2725738038/kubespawner/spawner.py#L1885-L1935 return spawner._options_form_default() From 7126e57da27642aa5db6148692212443e23ed5cf Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Tue, 30 Jan 2024 13:34:57 +0100 Subject: [PATCH 009/210] CH-110 fix custom prepull issue --- .../deploy/templates/image-puller/_helpers-daemonset.tpl | 4 ++-- applications/jupyterhub/deploy/values.yaml | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl index f872a336..04fb18a3 100644 --- a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl +++ b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl @@ -201,8 +201,8 @@ spec: {{- end }} {{- end }} {{- /* --- EDIT: CLOUDHARNESS pull images --- */}} - {{- if $.Values.apps.jupyterhub.harness.jupyterhub.prepull -}} - {{- range $k, $v := $.Values.apps.jupyterhub.harness.jupyterhub.prepull }} + {{- if $.Values.apps.jupyterhub.harness.dependencies.prepull -}} + {{- range $k, $v := $.Values.apps.jupyterhub.harness.dependencies.prepull }} - name: image-pull--{{ $v }} image: {{ get ( get $.Values "task-images" ) $v }} command: diff --git a/applications/jupyterhub/deploy/values.yaml b/applications/jupyterhub/deploy/values.yaml index 9be3ad5d..5acc7928 100755 --- a/applications/jupyterhub/deploy/values.yaml +++ b/applications/jupyterhub/deploy/values.yaml @@ -12,6 +12,7 @@ harness: # EDIT: CLOUDHARNESS - accounts build: - cloudharness-base + prepull: [] # additional images to add to the prepuller quotas: # sets the maximum number of (included named) servers open concurrently (int) quota-ws-open: 3 @@ -31,6 +32,7 @@ harness: # EDIT: CLOUDHARNESS smoketest: true ignoreRequestErrors: false ignoreConsoleErrors: false + # fullnameOverride and nameOverride distinguishes blank strings, null values, # and non-blank strings. For more details, see the configuration reference. From d25230ff5144cfead4bf98eb343ddd684dfa654a Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 6 Feb 2024 13:23:28 -0600 Subject: [PATCH 010/210] CH-100 Add first code to call a dedicated docker-compose generation --- .../deploy/templates/{ => helm}/argo-sa.yaml | 0 .../templates/{ => helm}/broker-config.yml | 0 .../templates/{ => helm}/configmap.yaml | 0 .../templates/{ => helm}/deployments.yml | 0 .../deploy/templates/{ => helm}/roles.yml | 0 .../deploy/templates/{ => helm}/services.yml | 0 .../templates/{ => helm}/zoo-config.yml | 0 .../{ => helm}/_helpers-auth-rework.tpl | 0 .../templates/{ => helm}/_helpers-names.tpl | 0 .../deploy/templates/{ => helm}/_helpers.tpl | 0 .../{ => helm}/hub/_helpers-passwords.tpl | 0 .../templates/{ => helm}/hub/configmap.yaml | 0 .../templates/{ => helm}/hub/deployment.yaml | 0 .../templates/{ => helm}/hub/netpol.yaml | 0 .../deploy/templates/{ => helm}/hub/pdb.yaml | 0 .../deploy/templates/{ => helm}/hub/pvc.yaml | 0 .../deploy/templates/{ => helm}/hub/rbac.yaml | 0 .../templates/{ => helm}/hub/secret.yaml | 0 .../templates/{ => helm}/hub/service.yaml | 0 .../image-puller/_helpers-daemonset.tpl | 0 .../image-puller/daemonset-continuous.yaml | 0 .../image-puller/daemonset-hook.yaml | 0 .../{ => helm}/image-puller/job.yaml | 0 .../{ => helm}/image-puller/rbac.yaml | 0 .../{ => helm}/proxy/autohttps/_README.txt | 0 .../{ => helm}/proxy/autohttps/configmap.yaml | 0 .../proxy/autohttps/deployment.yaml | 0 .../{ => helm}/proxy/autohttps/rbac.yaml | 0 .../{ => helm}/proxy/autohttps/service.yaml | 0 .../{ => helm}/proxy/deployment.yaml | 0 .../templates/{ => helm}/proxy/netpol.yaml | 0 .../templates/{ => helm}/proxy/pdb.yaml | 0 .../templates/{ => helm}/proxy/secret.yaml | 0 .../templates/{ => helm}/proxy/service.yaml | 0 .../scheduling/_scheduling-helpers.tpl | 0 .../{ => helm}/scheduling/priorityclass.yaml | 0 .../scheduling/user-placeholder/pdb.yaml | 0 .../user-placeholder/priorityclass.yaml | 0 .../user-placeholder/statefulset.yaml | 0 .../scheduling/user-scheduler/configmap.yaml | 0 .../scheduling/user-scheduler/deployment.yaml | 0 .../scheduling/user-scheduler/pdb.yaml | 0 .../scheduling/user-scheduler/rbac.yaml | 0 .../{ => helm}/singleuser/netpol.yaml | 0 .../deploy/templates/{ => helm}/_helpers.tpl | 0 .../templates/{ => helm}/clusterrole.yaml | 0 .../{ => helm}/clusterrolebinding.yaml | 0 .../templates/{ => helm}/nfs-server.yaml | 0 .../{ => helm}/podsecuritypolicy.yaml | 0 .../deploy/templates/{ => helm}/role.yaml | 0 .../templates/{ => helm}/rolebinding.yaml | 0 .../templates/{ => helm}/serviceaccount.yaml | 0 .../templates/{ => helm}/storageclass.yaml | 0 .../deploy/templates/{ => helm}/redis.yaml | 0 deployment-configuration/compose/.helmignore | 22 + deployment-configuration/compose/Chart.yaml | 10 + deployment-configuration/compose/README.md | 4 + .../compose/templates/auto-compose.yaml | 103 +++ deployment-configuration/compose/values.yaml | 79 ++ .../ch_cli_tools/dockercompose.py | 753 ++++++++++++++++++ .../deployment-cli-tools/ch_cli_tools/helm.py | 10 +- tools/deployment-cli-tools/harness-deployment | 41 +- 62 files changed, 1015 insertions(+), 7 deletions(-) rename applications/argo/deploy/templates/{ => helm}/argo-sa.yaml (100%) rename applications/events/deploy/templates/{ => helm}/broker-config.yml (100%) rename applications/events/deploy/templates/{ => helm}/configmap.yaml (100%) rename applications/events/deploy/templates/{ => helm}/deployments.yml (100%) rename applications/events/deploy/templates/{ => helm}/roles.yml (100%) rename applications/events/deploy/templates/{ => helm}/services.yml (100%) rename applications/events/deploy/templates/{ => helm}/zoo-config.yml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/_helpers-auth-rework.tpl (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/_helpers-names.tpl (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/_helpers.tpl (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/_helpers-passwords.tpl (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/configmap.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/netpol.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/pvc.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/secret.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/hub/service.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/_helpers-daemonset.tpl (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/daemonset-continuous.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/daemonset-hook.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/job.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/_README.txt (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/configmap.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/service.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/netpol.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/secret.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/service.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/_scheduling-helpers.tpl (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/priorityclass.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-placeholder/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-placeholder/priorityclass.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-placeholder/statefulset.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/configmap.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{ => helm}/singleuser/netpol.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/_helpers.tpl (100%) rename applications/nfsserver/deploy/templates/{ => helm}/clusterrole.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/clusterrolebinding.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/nfs-server.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/podsecuritypolicy.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/role.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/rolebinding.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/serviceaccount.yaml (100%) rename applications/nfsserver/deploy/templates/{ => helm}/storageclass.yaml (100%) rename applications/sentry/deploy/templates/{ => helm}/redis.yaml (100%) create mode 100644 deployment-configuration/compose/.helmignore create mode 100644 deployment-configuration/compose/Chart.yaml create mode 100644 deployment-configuration/compose/README.md create mode 100644 deployment-configuration/compose/templates/auto-compose.yaml create mode 100644 deployment-configuration/compose/values.yaml create mode 100644 tools/deployment-cli-tools/ch_cli_tools/dockercompose.py diff --git a/applications/argo/deploy/templates/argo-sa.yaml b/applications/argo/deploy/templates/helm/argo-sa.yaml similarity index 100% rename from applications/argo/deploy/templates/argo-sa.yaml rename to applications/argo/deploy/templates/helm/argo-sa.yaml diff --git a/applications/events/deploy/templates/broker-config.yml b/applications/events/deploy/templates/helm/broker-config.yml similarity index 100% rename from applications/events/deploy/templates/broker-config.yml rename to applications/events/deploy/templates/helm/broker-config.yml diff --git a/applications/events/deploy/templates/configmap.yaml b/applications/events/deploy/templates/helm/configmap.yaml similarity index 100% rename from applications/events/deploy/templates/configmap.yaml rename to applications/events/deploy/templates/helm/configmap.yaml diff --git a/applications/events/deploy/templates/deployments.yml b/applications/events/deploy/templates/helm/deployments.yml similarity index 100% rename from applications/events/deploy/templates/deployments.yml rename to applications/events/deploy/templates/helm/deployments.yml diff --git a/applications/events/deploy/templates/roles.yml b/applications/events/deploy/templates/helm/roles.yml similarity index 100% rename from applications/events/deploy/templates/roles.yml rename to applications/events/deploy/templates/helm/roles.yml diff --git a/applications/events/deploy/templates/services.yml b/applications/events/deploy/templates/helm/services.yml similarity index 100% rename from applications/events/deploy/templates/services.yml rename to applications/events/deploy/templates/helm/services.yml diff --git a/applications/events/deploy/templates/zoo-config.yml b/applications/events/deploy/templates/helm/zoo-config.yml similarity index 100% rename from applications/events/deploy/templates/zoo-config.yml rename to applications/events/deploy/templates/helm/zoo-config.yml diff --git a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl rename to applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl diff --git a/applications/jupyterhub/deploy/templates/_helpers-names.tpl b/applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/_helpers-names.tpl rename to applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl diff --git a/applications/jupyterhub/deploy/templates/_helpers.tpl b/applications/jupyterhub/deploy/templates/helm/_helpers.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/_helpers.tpl rename to applications/jupyterhub/deploy/templates/helm/_helpers.tpl diff --git a/applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl b/applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl rename to applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl diff --git a/applications/jupyterhub/deploy/templates/hub/configmap.yaml b/applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/configmap.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/deployment.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/netpol.yaml b/applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/netpol.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/pdb.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/pvc.yaml b/applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/pvc.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/rbac.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/secret.yaml b/applications/jupyterhub/deploy/templates/helm/hub/secret.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/secret.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/secret.yaml diff --git a/applications/jupyterhub/deploy/templates/hub/service.yaml b/applications/jupyterhub/deploy/templates/helm/hub/service.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/hub/service.yaml rename to applications/jupyterhub/deploy/templates/helm/hub/service.yaml diff --git a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl rename to applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl diff --git a/applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml rename to applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml diff --git a/applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml rename to applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml diff --git a/applications/jupyterhub/deploy/templates/image-puller/job.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/image-puller/job.yaml rename to applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml diff --git a/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/image-puller/rbac.yaml rename to applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/deployment.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/netpol.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/netpol.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/pdb.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/secret.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/secret.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml diff --git a/applications/jupyterhub/deploy/templates/proxy/service.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/service.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/proxy/service.yaml rename to applications/jupyterhub/deploy/templates/helm/proxy/service.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl b/applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl rename to applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl diff --git a/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml b/applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/singleuser/netpol.yaml rename to applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml diff --git a/applications/nfsserver/deploy/templates/_helpers.tpl b/applications/nfsserver/deploy/templates/helm/_helpers.tpl similarity index 100% rename from applications/nfsserver/deploy/templates/_helpers.tpl rename to applications/nfsserver/deploy/templates/helm/_helpers.tpl diff --git a/applications/nfsserver/deploy/templates/clusterrole.yaml b/applications/nfsserver/deploy/templates/helm/clusterrole.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/clusterrole.yaml rename to applications/nfsserver/deploy/templates/helm/clusterrole.yaml diff --git a/applications/nfsserver/deploy/templates/clusterrolebinding.yaml b/applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/clusterrolebinding.yaml rename to applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml diff --git a/applications/nfsserver/deploy/templates/nfs-server.yaml b/applications/nfsserver/deploy/templates/helm/nfs-server.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/nfs-server.yaml rename to applications/nfsserver/deploy/templates/helm/nfs-server.yaml diff --git a/applications/nfsserver/deploy/templates/podsecuritypolicy.yaml b/applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/podsecuritypolicy.yaml rename to applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml diff --git a/applications/nfsserver/deploy/templates/role.yaml b/applications/nfsserver/deploy/templates/helm/role.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/role.yaml rename to applications/nfsserver/deploy/templates/helm/role.yaml diff --git a/applications/nfsserver/deploy/templates/rolebinding.yaml b/applications/nfsserver/deploy/templates/helm/rolebinding.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/rolebinding.yaml rename to applications/nfsserver/deploy/templates/helm/rolebinding.yaml diff --git a/applications/nfsserver/deploy/templates/serviceaccount.yaml b/applications/nfsserver/deploy/templates/helm/serviceaccount.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/serviceaccount.yaml rename to applications/nfsserver/deploy/templates/helm/serviceaccount.yaml diff --git a/applications/nfsserver/deploy/templates/storageclass.yaml b/applications/nfsserver/deploy/templates/helm/storageclass.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/storageclass.yaml rename to applications/nfsserver/deploy/templates/helm/storageclass.yaml diff --git a/applications/sentry/deploy/templates/redis.yaml b/applications/sentry/deploy/templates/helm/redis.yaml similarity index 100% rename from applications/sentry/deploy/templates/redis.yaml rename to applications/sentry/deploy/templates/helm/redis.yaml diff --git a/deployment-configuration/compose/.helmignore b/deployment-configuration/compose/.helmignore new file mode 100644 index 00000000..50af0317 --- /dev/null +++ b/deployment-configuration/compose/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/deployment-configuration/compose/Chart.yaml b/deployment-configuration/compose/Chart.yaml new file mode 100644 index 00000000..f294c3e7 --- /dev/null +++ b/deployment-configuration/compose/Chart.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +appVersion: "0.0.1" +description: CloudHarness Helm Chart +name: cloudharness +version: 0.0.1 +maintainers: + - name: Filippo Ledda + email: filippo@metacell.us + - name: Zoran Sinnema + email: zoran@metacell.us diff --git a/deployment-configuration/compose/README.md b/deployment-configuration/compose/README.md new file mode 100644 index 00000000..abeab69d --- /dev/null +++ b/deployment-configuration/compose/README.md @@ -0,0 +1,4 @@ +# CloudHarness Helm chart: deploy CloudHarness to k8s + +Helm is used to define the CloudHarness deployment on Kubernetes. For further information about Helm, see https://helm.sh. + diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml new file mode 100644 index 00000000..5b4893ba --- /dev/null +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -0,0 +1,103 @@ +version: '3.7' + +services: +{{- range $app_name, $app_config := .Values.apps }} + {{- if has $app_name (list "argo" "nfsserver" "workflows" "events" ) }} {{- /* We deactivate generation for some services */}} + {{- continue }} + {{- end}} + {{ $deployment := $app_config.harness.deployment }} + {{ $app_name }}: + {{- with $app_config.domain }} + domainname: {{ . }} + {{- end }} + networks: + - ch + {{- with $app_config.image }} + image: {{ . }} + {{- end }} + {{- with $app_config.harness.service.port }} + ports: + - "{{ . }}:{{ $app_config.harness.deployment.port }}" + {{- end}} + deploy: + mode: "replicated" + replicas: {{ $deployment.replicas | default 1 }} + resources: + limits: + cpus: {{ $deployment.resources.limits.cpu | default "50m" }} + memory: {{ trimSuffix "i" $deployment.resources.limits.memory | default "64M" }} + reservations: + cpus: {{ $deployment.resources.requests.cpu | default "25m" }} + memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }} + environment: + - CH_CURRENT_APP_NAME={{ $app_name | quote }} + + {{- range $.Values.env }} + - {{ .name }}={{ .value | quote }} + {{- end }} + {{- /*{{- range $.Values.env }} + - {{ .name }}={{ .value | quote }} + {{- end }} */}} + {{- range $app_config.harness.env }} + - {{ .name }}={{ .value | quote }} + {{- end }} + {{- with $app_config.harness.dependencies.soft }} + # links: + # {{- range . }} + # - {{ . }} + # {{- end }} + {{- end }} + {{- with $app_config.harness.dependencies.hard }} + depends_on: + {{- range . }} + - {{ . }} + {{- end }} + {{- end }} + {{- if or $deployment.volume $app_config.harness.resources }} + volumes: + {{- with $deployment.volume }} + - type: volume + source: {{ .name }} + target: {{ .mountpath }} + {{- end}} + {{- with $app_config.harness.resources }} + {{- range .}} + - type: bind + source: compose/resources/{{ $app_name }}/{{.src }} + target: {{ .dst }} + {{- end }} + {{- end}} + {{- end }} +{{- end }} + + traefik: + image: "traefik:v2.2" + container_name: "traefik" + networks: + - ch + command: + - "--log.level=INFO" + - "--api.insecure=true" + - "--providers.docker=true" + - "--providers.docker.exposedbydefault=false" + - "--entrypoints.web.address=:80" + - "--entrypoints.websecure.address=:443" + - "--providers.file.directory=/etc/traefik/dynamic_conf" + ports: + - "80:80" + - "443:443" + volumes: + - "/var/run/docker.sock:/var/run/docker.sock:ro" + - "./certs/:/certs/:ro" + - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" + +networks: + ch: + name: ch_network + +volumes: # this inclusion needs to be conditional +{{- range $app_name, $app_config := .Values.apps }} + {{- with $app_config.harness.deployment.volume }} + {{ .name }}: + {{- end }} +{{- end }} \ No newline at end of file diff --git a/deployment-configuration/compose/values.yaml b/deployment-configuration/compose/values.yaml new file mode 100644 index 00000000..434dcac7 --- /dev/null +++ b/deployment-configuration/compose/values.yaml @@ -0,0 +1,79 @@ +# -- If set to true, local DNS mapping is added to pods. +local: false +# -- Enables/disables Gatekeeper. +secured_gatekeepers: true +# -- The root domain. +domain: ${{DOMAIN}} +# -- The K8s namespace. +namespace: ch +# -- Name of mainapp, routes incoming traffic of root `domaim` to this app. +mainapp: accounts +registry: + # -- The docker registry. + name: "localhost:5000" + # -- Optional secret used for pulling from docker registry. + secret: +# -- Docker tag used to pull images. +tag: latest +# -- List of applications. +# @default -- Will be filled automatically. +apps: {} +env: + # -- Cloud Harness version + - name: CH_VERSION + value: 0.0.1 + # -- Cloud harness chart version + - name: CH_CHART_VERSION + value: 0.0.1 +privenv: + # -- Defines a secret as private environment variable that is injected in containers. + - name: CH_SECRET + value: In God we trust; all others must bring data. ― W. Edwards Deming +ingress: + # -- Flag to enable/disalbe ingress controller. + enabled: true + # -- K8s Name of ingress. + name: cloudharness-ingress + # -- Enables/disables SSL redirect. + ssl_redirect: true + letsencrypt: + # -- Email for letsencrypt. + email: filippo@metacell.us +backup: + # -- Flag to enable/disable backups. + active: false + # -- Number of days to keep backups. + keep_days: "7" + # -- Number of weeks to keep backups. + keep_weeks: "4" + # -- Number of months to keep backups. + keep_months: "6" + # -- Schedule as cronjob expression. + schedule: "*/5 * * * *" + # -- The file suffix added to backup files. + suffix: ".gz" + # -- The volume size for backups (all backups share the same volume) + volumesize: "2Gi" + # -- Target directory of backups, the mount point of the persistent volume. + dir: "/backups" + resources: + requests: + # -- K8s memory resource definition. + memory: "32Mi" + # -- K8s cpu resource definition. + cpu: "25m" + limits: + # -- K8s memory resource definition. + memory: "64Mi" + # -- K8s cpu resource definition. + cpu: "50m" +proxy: + timeout: + # -- Timeout for proxy connections in seconds. + send: 60 + # -- Timeout for proxy responses in seconds. + read: 60 + keepalive: 60 + payload: + # -- Maximum size of payload in MB + max: 250 diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py new file mode 100644 index 00000000..39ff0272 --- /dev/null +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -0,0 +1,753 @@ +""" +Utilities to create a helm chart from a CloudHarness directory structure +""" +import yaml +import os +import shutil +import logging +from hashlib import sha1 +import subprocess +from functools import cache +import tarfile +from docker import from_env as DockerClient +from pathlib import Path + + +from . import HERE, CH_ROOT +from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ + DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH +from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \ + get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \ + find_dockerfiles_paths + +from .models import HarnessMainConfig + +KEY_HARNESS = 'harness' +KEY_SERVICE = 'service' +KEY_DATABASE = 'database' +KEY_DEPLOYMENT = 'deployment' +KEY_APPS = 'apps' +KEY_TASK_IMAGES = 'task-images' +KEY_TEST_IMAGES = 'test-images' + +DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') + + +def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, + output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, + namespace=None, templates_path=HELM_PATH) -> HarnessMainConfig: + if (type(env)) == str: + env = [env] + return CloudHarnessHelm(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured, + output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env, + namespace=namespace, templates_path=templates_path).process_values() + + +class CloudHarnessHelm: + def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, + output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, + namespace=None, templates_path=HELM_PATH): + assert domain, 'A domain must be specified' + self.root_paths = [Path(r) for r in root_paths] + self.tag = tag + if not registry.endswith('/'): + self.registry = f'{registry}/' + else: + self.registry = registry + self.local = local + self.domain = domain + self.exclude = exclude + self.secured = secured + self.output_path = Path(output_path) + self.include = include + self.registry_secret = registry_secret + self.tls = tls + self.env = env + self.namespace = namespace + + self.templates_path = templates_path + self.dest_deployment_path = self.output_path / templates_path + self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml' + self.__init_deployment() + + self.static_images = set() + self.base_images = {} + self.all_images = {} + + def __init_deployment(self): + """ + Create the base helm chart + """ + if self.dest_deployment_path.exists(): + shutil.rmtree(self.dest_deployment_path) + # Initialize with default + copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path) + + # Override for every cloudharness scaffolding + for root_path in self.root_paths: + copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, + base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path) + collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, + dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path) + + def __adjust_missing_values(self, helm_values): + if 'name' not in helm_values: + with open(self.helm_chart_path) as f: + chart_idx_content = yaml.safe_load(f) + helm_values['name'] = chart_idx_content['name'].lower() + + def process_values(self) -> HarnessMainConfig: + """ + Creates values file for the helm chart + """ + helm_values = self.__get_default_helm_values() + + self.__adjust_missing_values(helm_values) + + helm_values = self.__merge_base_helm_values(helm_values) + + helm_values[KEY_APPS] = {} + + base_image_name = helm_values['name'] + + helm_values[KEY_TASK_IMAGES] = {} + + self.__init_base_images(base_image_name) + self.__init_static_images(base_image_name) + helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name) + + self.__process_applications(helm_values, base_image_name) + + # self.create_tls_certificate(helm_values) + + values, include = self.__finish_helm_values(values=helm_values) + + # Adjust dependencies from static (common) images + self.__assign_static_build_dependencies(helm_values) + + for root_path in self.root_paths: + collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, + dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path) + + # Save values file for manual helm chart + merged_values = merge_to_yaml_file(helm_values, self.dest_deployment_path / VALUES_MANUAL_PATH) + if self.namespace: + merge_to_yaml_file({'metadata': {'namespace': self.namespace}, + 'name': helm_values['name']}, self.helm_chart_path) + validate_helm_values(merged_values) + return HarnessMainConfig.from_dict(merged_values) + + def __process_applications(self, helm_values, base_image_name): + for root_path in self.root_paths: + app_values = init_app_values( + root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) + helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + app_values) + + app_base_path = root_path / APPS_PATH + app_values = self.collect_app_values( + f"{app_base_path}", base_image_name=base_image_name) + helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + app_values) + + def collect_app_values(self, app_base_path, base_image_name=None): + values = {} + + for app_path in get_sub_paths(app_base_path): + app_name = app_name_from_path( + os.path.relpath(app_path, app_base_path)) + + if app_name in self.exclude: + continue + app_key = app_name.replace('-', '_') + + app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) + + values[app_key] = dict_merge( + values[app_key], app_values) if app_key in values else app_values + + return values + + def __init_static_images(self, base_image_name): + for static_img_dockerfile in self.static_images: + img_name = image_name_from_dockerfile_path(os.path.basename( + static_img_dockerfile), base_name=base_image_name) + self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( + img_name, build_context_path=static_img_dockerfile) + + def __assign_static_build_dependencies(self, helm_values): + for static_img_dockerfile in self.static_images: + key = os.path.basename(static_img_dockerfile) + if key in helm_values[KEY_TASK_IMAGES]: + dependencies = guess_build_dependencies_from_dockerfile( + static_img_dockerfile) + for dep in dependencies: + if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: + helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] + + for image_name in list(helm_values[KEY_TASK_IMAGES].keys()): + if image_name in self.exclude: + del helm_values[KEY_TASK_IMAGES][image_name] + + def __init_base_images(self, base_image_name): + + for root_path in self.root_paths: + for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): + img_name = image_name_from_dockerfile_path( + os.path.basename(base_img_dockerfile), base_name=base_image_name) + self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + img_name, build_context_path=root_path) + + self.static_images.update(find_dockerfiles_paths( + os.path.join(root_path, STATIC_IMAGES_PATH))) + return self.base_images + + def __init_test_images(self, base_image_name): + test_images = {} + for root_path in self.root_paths: + for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): + img_name = image_name_from_dockerfile_path( + os.path.basename(base_img_dockerfile), base_name=base_image_name) + test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + img_name, build_context_path=base_img_dockerfile) + + return test_images + + + def __find_static_dockerfile_paths(self, root_path): + return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) + + def __merge_base_helm_values(self, helm_values): + # Override for every cloudharness scaffolding + for root_path in self.root_paths: + helm_values = dict_merge( + helm_values, + collect_helm_values(root_path, env=self.env) + ) + + return helm_values + + def __get_default_helm_values(self): + helm_values = get_template(os.path.join( + CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml')) + helm_values = dict_merge(helm_values, + collect_helm_values(CH_ROOT, env=self.env)) + + return helm_values + + def create_tls_certificate(self, helm_values): + if not self.tls: + helm_values['tls'] = None + return + if not self.local: + return + helm_values['tls'] = self.domain.replace(".", "-") + "-tls" + + bootstrap_file = 'bootstrap.sh' + certs_parent_folder_path = self.output_path / 'helm' / 'resources' + certs_folder_path = certs_parent_folder_path / 'certs' + + # if os.path.exists(os.path.join(certs_folder_path)): + if certs_folder_path.exists(): + # don't overwrite the certificate if it exists + return + + try: + client = DockerClient() + client.ping() + except: + raise ConnectionRefusedError( + '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') + + # Create CA and sign cert for domain + container = client.containers.run(image='frapsoft/openssl', + command=f'sleep 60', + entrypoint="", + detach=True, + environment=[ + f"DOMAIN={self.domain}"], + ) + + container.exec_run('mkdir -p /mnt/vol1') + container.exec_run('mkdir -p /mnt/certs') + + # copy bootstrap file + cur_dir = os.getcwd() + os.chdir(os.path.join(HERE, 'scripts')) + tar = tarfile.open(bootstrap_file + '.tar', mode='w') + try: + tar.add(bootstrap_file) + finally: + tar.close() + data = open(bootstrap_file + '.tar', 'rb').read() + container.put_archive('/mnt/vol1', data) + os.chdir(cur_dir) + container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') + + # exec bootstrap file + container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') + + # retrieve the certs from the container + bits, stat = container.get_archive('/mnt/certs') + if not certs_folder_path.exists(): + certs_folder_path.mkdir(parents=True) + with open(certs_parent_folder_path / 'certs.tar', 'wb') as f: + for chunk in bits: + f.write(chunk) + cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar') + cf.extractall(path=certs_parent_folder_path) + + logs = container.logs() + logging.info(f'openssl container logs: {logs}') + + # stop the container + container.kill() + + logging.info("Created certificates for local deployment") + + def __finish_helm_values(self, values): + """ + Sets default overridden values + """ + if self.registry: + logging.info(f"Registry set: {self.registry}") + if self.local: + values['registry']['secret'] = '' + if self.registry_secret: + logging.info(f"Registry secret set") + values['registry']['name'] = self.registry + values['registry']['secret'] = self.registry_secret + values['tag'] = self.tag + if self.namespace: + values['namespace'] = self.namespace + values['secured_gatekeepers'] = self.secured + values['ingress']['ssl_redirect'] = values['ingress']['ssl_redirect'] and self.tls + values['tls'] = self.tls + if self.domain: + values['domain'] = self.domain + + values['local'] = self.local + if self.local: + try: + values['localIp'] = get_cluster_ip() + except subprocess.TimeoutExpired: + logging.warning("Minikube not available") + except: + logging.warning("Kubectl not available") + + apps = values[KEY_APPS] + + for app_key in apps: + v = apps[app_key] + + values_from_legacy(v) + assert KEY_HARNESS in v, 'Default app value loading is broken' + + app_name = app_key.replace('_', '-') + harness = v[KEY_HARNESS] + harness['name'] = app_name + + if not harness[KEY_SERVICE].get('name', None): + harness[KEY_SERVICE]['name'] = app_name + if not harness[KEY_DEPLOYMENT].get('name', None): + harness[KEY_DEPLOYMENT]['name'] = app_name + + if harness[KEY_DATABASE] and not harness[KEY_DATABASE].get('name', None): + harness[KEY_DATABASE]['name'] = app_name.strip() + '-db' + + self.__clear_unused_db_configuration(harness) + values_set_legacy(v) + + if self.include: + self.include = get_included_with_dependencies( + values, set(self.include)) + logging.info('Selecting included applications') + + for v in [v for v in apps]: + if apps[v]['harness']['name'] not in self.include: + del apps[v] + continue + values[KEY_TASK_IMAGES].update(apps[v][KEY_TASK_IMAGES]) + # Create environment variables + else: + for v in [v for v in apps]: + values[KEY_TASK_IMAGES].update(apps[v][KEY_TASK_IMAGES]) + create_env_variables(values) + return values, self.include + + def __clear_unused_db_configuration(self, harness_config): + database_config = harness_config[KEY_DATABASE] + database_type = database_config.get('type', None) + if database_type is None: + del harness_config[KEY_DATABASE] + return + db_specific_keys = [k for k, v in database_config.items() + if isinstance(v, dict) and 'image' in v and 'ports' in v] + for db in db_specific_keys: + if database_type != db: + del database_config[db] + + def image_tag(self, image_name, build_context_path=None, dependencies=()): + tag = self.tag + if tag is None and not self.local: + logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") + ignore_path = os.path.join(build_context_path, '.dockerignore') + ignore = set(DEFAULT_IGNORE) + if os.path.exists(ignore_path): + with open(ignore_path) as f: + ignore = ignore.union({line.strip() for line in f}) + logging.info(f"Ignoring {ignore}") + tag = generate_tag_from_content(build_context_path, ignore) + logging.info(f"Content hash: {tag}") + dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) + tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() + logging.info(f"Generated tag: {tag}") + app_name = image_name.split("/")[-1] # the image name can have a prefix + self.all_images[app_name] = tag + return self.registry + image_name + (f':{tag}' if tag else '') + + def create_app_values_spec(self, app_name, app_path, base_image_name=None): + logging.info('Generating values script for ' + app_name) + + specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml') + if os.path.exists(specific_template_path): + logging.info("Specific values template found: " + + specific_template_path) + values = get_template(specific_template_path) + else: + values = {} + + for e in self.env: + specific_template_path = os.path.join( + app_path, 'deploy', f'values-{e}.yaml') + if os.path.exists(specific_template_path): + logging.info( + "Specific environment values template found: " + specific_template_path) + with open(specific_template_path) as f: + values_env_specific = yaml.safe_load(f) + values = dict_merge(values, values_env_specific) + + if KEY_HARNESS in values and 'name' in values[KEY_HARNESS] and values[KEY_HARNESS]['name']: + logging.warning('Name is automatically set in applications: name %s will be ignored', + values[KEY_HARNESS]['name']) + + image_paths = [path for path in find_dockerfiles_paths( + app_path) if 'tasks/' not in path and 'subapps' not in path] + if len(image_paths) > 1: + logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name, + image_paths[0]) + if KEY_HARNESS in values and 'dependencies' in values[KEY_HARNESS] and 'build' in values[KEY_HARNESS]['dependencies']: + build_dependencies = values[KEY_HARNESS]['dependencies']['build'] + else: + build_dependencies = [] + + if len(image_paths) > 0: + image_name = image_name_from_dockerfile_path(os.path.relpath( + image_paths[0], os.path.dirname(app_path)), base_image_name) + + values['image'] = self.image_tag( + image_name, build_context_path=app_path, dependencies=build_dependencies) + elif KEY_HARNESS in values and not values[KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('image', None) and values[ + KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('auto', False): + raise Exception(f"At least one Dockerfile must be specified on application {app_name}. " + f"Specify harness.deployment.image value if you intend to use a prebuilt image.") + + task_images_paths = [path for path in find_dockerfiles_paths( + app_path) if 'tasks/' in path] + values[KEY_TASK_IMAGES] = values.get(KEY_TASK_IMAGES, {}) + + if build_dependencies: + for build_dependency in values[KEY_HARNESS]['dependencies']['build']: + if build_dependency in self.base_images: + values[KEY_TASK_IMAGES][build_dependency] = self.base_images[build_dependency] + + for task_path in task_images_paths: + task_name = app_name_from_path(os.path.relpath( + task_path, os.path.dirname(app_path))) + img_name = image_name_from_dockerfile_path(task_name, base_image_name) + + values[KEY_TASK_IMAGES][task_name] = self.image_tag( + img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) + + return values + + +def get_included_with_dependencies(values, include): + app_values = values['apps'].values() + directly_included = [app for app in app_values if any( + inc == app[KEY_HARNESS]['name'] for inc in include)] + + dependent = set(include) + for app in directly_included: + if app['harness']['dependencies'].get('hard', None): + dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) + if app['harness']['dependencies'].get('soft', None): + dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) + if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: + dependent.add('accounts') + if len(dependent) == len(include): + return dependent + return get_included_with_dependencies(values, dependent) + + +def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): + pass + + +def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None): + """ + Searches recursively for helm templates inside the applications and collects the templates in the destination + + :param search_root: + :param dest_helm_chart_path: collected helm templates destination folder + :param exclude: + :return: + """ + app_base_path = os.path.join(search_root, APPS_PATH) + + for app_path in get_sub_paths(app_base_path): + app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) + if app_name in exclude or (include and not any(inc in app_name for inc in include)): + continue + template_dir = os.path.join(app_path, 'deploy', 'templates', templates_path) + if os.path.exists(template_dir): + dest_dir = os.path.join( + dest_helm_chart_path, 'templates', app_name) + + logging.info( + "Collecting templates for application %s to %s", app_name, dest_dir) + if os.path.exists(dest_dir): + logging.warning( + "Merging/overriding all files in directory %s", dest_dir) + merge_configuration_directories(template_dir, dest_dir) + else: + shutil.copytree(template_dir, dest_dir) + resources_dir = os.path.join(app_path, 'deploy/resources') + if os.path.exists(resources_dir): + dest_dir = os.path.join( + dest_helm_chart_path, 'resources', app_name) + + logging.info( + "Collecting resources for application %s to %s", app_name, dest_dir) + + merge_configuration_directories(resources_dir, dest_dir) + + subchart_dir = os.path.join(app_path, 'deploy/charts') + if os.path.exists(subchart_dir): + dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name) + + logging.info( + "Collecting templates for application %s to %s", app_name, dest_dir) + if os.path.exists(dest_dir): + logging.warning( + "Merging/overriding all files in directory %s", dest_dir) + merge_configuration_directories(subchart_dir, dest_dir) + else: + shutil.copytree(subchart_dir, dest_dir) + + +def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): + if not os.path.exists(base_helm_chart): + return + if os.path.exists(dest_helm_chart_path): + logging.info("Merging/overriding all files in directory %s", + dest_helm_chart_path) + merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}") + else: + logging.info("Copying base deployment chart from %s to %s", + base_helm_chart, dest_helm_chart_path) + shutil.copytree(base_helm_chart, dest_helm_chart_path) + + +def collect_helm_values(deployment_root, env=()): + """ + Creates helm values from a cloudharness deployment scaffolding + """ + + values_template_path = os.path.join( + deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml') + + values = get_template(values_template_path) + + for e in env: + specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, + f'values-template-{e}.yaml') + if os.path.exists(specific_template_path): + logging.info( + "Specific environment values template found: " + specific_template_path) + with open(specific_template_path) as f: + values_env_specific = yaml.safe_load(f) + values = dict_merge(values, values_env_specific) + return values + + +def init_app_values(deployment_root, exclude, values=None): + values = values if values is not None else {} + app_base_path = os.path.join(deployment_root, APPS_PATH) + overridden_template_path = os.path.join( + deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + default_values_path = os.path.join( + CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + + for app_path in get_sub_paths(app_base_path): + + app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) + + if app_name in exclude: + continue + app_key = app_name.replace('-', '_') + if app_key not in values: + default_values = get_template(default_values_path) + values[app_key] = default_values + overridden_defaults = get_template(overridden_template_path) + values[app_key] = dict_merge(values[app_key], overridden_defaults) + + return values + + +def values_from_legacy(values): + if KEY_HARNESS not in values: + values[KEY_HARNESS] = {} + harness = values[KEY_HARNESS] + if KEY_SERVICE not in harness: + harness[KEY_SERVICE] = {} + if KEY_DEPLOYMENT not in harness: + harness[KEY_DEPLOYMENT] = {} + if KEY_DATABASE not in harness: + harness[KEY_DATABASE] = {} + + if 'subdomain' in values: + harness['subdomain'] = values['subdomain'] + if 'autodeploy' in values: + harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] + if 'autoservice' in values: + harness[KEY_SERVICE]['auto'] = values['autoservice'] + if 'secureme' in values: + harness['secured'] = values['secureme'] + if 'resources' in values: + harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) + if 'replicas' in values: + harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] + if 'image' in values: + harness[KEY_DEPLOYMENT]['image'] = values['image'] + if 'port' in values: + harness[KEY_DEPLOYMENT]['port'] = values['port'] + harness[KEY_SERVICE]['port'] = values['port'] + + +def values_set_legacy(values): + harness = values[KEY_HARNESS] + if 'image' in harness[KEY_DEPLOYMENT]: + values['image'] = harness[KEY_DEPLOYMENT]['image'] + + values['name'] = harness['name'] + if harness[KEY_DEPLOYMENT].get('port', None): + values['port'] = harness[KEY_DEPLOYMENT]['port'] + if 'resources' in harness[KEY_DEPLOYMENT]: + values['resources'] = harness[KEY_DEPLOYMENT]['resources'] + + +def generate_tag_from_content(content_path, ignore=()): + from dirhash import dirhash + return dirhash(content_path, 'sha1', ignore=ignore) + + +def extract_env_variables_from_values(values, envs=tuple(), prefix=''): + if isinstance(values, dict): + newenvs = list(envs) + for key, value in values.items(): + v = extract_env_variables_from_values( + value, envs, f"{prefix}_{key}".replace('-', '_').upper()) + if key in ('name', 'port', 'subdomain'): + newenvs.extend(v) + return newenvs + else: + return [env_variable(prefix, values)] + + +def create_env_variables(values): + for app_name, value in values[KEY_APPS].items(): + if KEY_HARNESS in value: + values['env'].extend(extract_env_variables_from_values( + value[KEY_HARNESS], prefix='CH_' + app_name)) + values['env'].append(env_variable('CH_DOMAIN', values['domain'])) + values['env'].append(env_variable( + 'CH_IMAGE_REGISTRY', values['registry']['name'])) + values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) + + +def hosts_info(values): + domain = values['domain'] + namespace = values['namespace'] + subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if + KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if + KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] + try: + ip = get_cluster_ip() + except: + logging.warning('Cannot get cluster ip') + return + logging.info( + "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") + + deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] + for app in values[KEY_APPS].values() if KEY_HARNESS in app) + + logging.info( + "\nTo run locally some apps, also those references may be needed") + for appname in values[KEY_APPS]: + app = values[KEY_APPS][appname]['harness'] + if 'deployment' not in app: + continue + print( + "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( + app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) + + print( + f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") + + +class ValuesValidationException(Exception): + pass + + +def validate_helm_values(values): + validate_dependencies(values) + + +def validate_dependencies(values): + all_apps = {a for a in values["apps"]} + for app in all_apps: + app_values = values["apps"][app] + if 'dependencies' in app_values[KEY_HARNESS]: + soft_dependencies = { + d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} + not_found = {d for d in soft_dependencies if d not in all_apps} + if not_found: + logging.warning( + f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") + hard_dependencies = { + d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} + not_found = {d for d in hard_dependencies if d not in all_apps} + if not_found: + raise ValuesValidationException( + f"Bad application dependencies specified for application {app}: {','.join(not_found)}") + + build_dependencies = { + d for d in app_values[KEY_HARNESS]['dependencies']['build']} + + not_found = { + d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} + not_found = {d for d in not_found if d not in all_apps} + if not_found: + raise ValuesValidationException( + f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") + + if 'use_services' in app_values[KEY_HARNESS]: + service_dependencies = {d['name'].replace( + "-", "_") for d in app_values[KEY_HARNESS]['use_services']} + + not_found = {d for d in service_dependencies if d not in all_apps} + if not_found: + raise ValuesValidationException( + f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index 4c75a909..9bd43b8c 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -212,7 +212,7 @@ def __init_base_images(self, base_image_name): self.static_images.update(find_dockerfiles_paths( os.path.join(root_path, STATIC_IMAGES_PATH))) return self.base_images - + def __init_test_images(self, base_image_name): test_images = {} for root_path in self.root_paths: @@ -224,7 +224,7 @@ def __init_test_images(self, base_image_name): return test_images - + def __find_static_dockerfile_paths(self, root_path): return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) @@ -417,7 +417,7 @@ def image_tag(self, image_name, build_context_path=None, dependencies=()): app_name = image_name.split("/")[-1] # the image name can have a prefix self.all_images[app_name] = tag return self.registry + image_name + (f':{tag}' if tag else '') - + def create_app_values_spec(self, app_name, app_path, base_image_name=None): logging.info('Generating values script for ' + app_name) @@ -456,7 +456,7 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): if len(image_paths) > 0: image_name = image_name_from_dockerfile_path(os.path.relpath( image_paths[0], os.path.dirname(app_path)), base_image_name) - + values['image'] = self.image_tag( image_name, build_context_path=app_path, dependencies=build_dependencies) elif KEY_HARNESS in values and not values[KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('image', None) and values[ @@ -521,7 +521,7 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), i app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) if app_name in exclude or (include and not any(inc in app_name for inc in include)): continue - template_dir = os.path.join(app_path, 'deploy/templates') + template_dir = os.path.join(app_path, 'deploy', 'templates', HELM_PATH) if os.path.exists(template_dir): dest_dir = os.path.join( dest_helm_chart_path, 'templates', app_name) diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment index a9cecabb..d8aaebda 100644 --- a/tools/deployment-cli-tools/harness-deployment +++ b/tools/deployment-cli-tools/harness-deployment @@ -4,6 +4,7 @@ import logging import sys import os +from ch_cli_tools.dockercompose import create_docker_compose_configuration from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file @@ -61,6 +62,8 @@ if __name__ == "__main__": help=f'Do not generate ci/cd files') parser.add_argument('-we', '--write-env', dest='write_env', action="store_const", default=None, const=True, help=f'Write build env to .env file in {DEPLOYMENT_PATH}') + parser.add_argument('--docker-compose', dest='docker_compose', action="store_true", + help='Generate docker-compose.yaml and dedicated Skaffold configuration') args, unknown = parser.parse_known_args(sys.argv[1:]) @@ -81,7 +84,24 @@ if __name__ == "__main__": merge_app_directories(root_paths, destination=args.merge) root_paths = [args.merge] - helm_values = create_helm_chart( + # helm_values = create_helm_chart( + # root_paths, + # tag=args.tag, + # registry=args.registry, + # domain=args.domain, + # local=args.local, + # secured=not args.unsecured, + # output_path=args.output_path, + # exclude=args.exclude, + # include=args.include, + # registry_secret=args.registry_secret, + # tls=not args.no_tls, + # env=envs, + # namespace=args.namespace + # ) + + if not args.docker_compose: + helm_values = create_helm_chart( root_paths, tag=args.tag, registry=args.registry, @@ -96,6 +116,23 @@ if __name__ == "__main__": env=envs, namespace=args.namespace ) + else: + helm_values = create_docker_compose_configuration( + root_paths, + tag=args.tag, + registry=args.registry, + domain=args.domain, + local=args.local, + secured=not args.unsecured, + output_path=args.output_path, + exclude=args.exclude, + include=args.include, + registry_secret=args.registry_secret, + tls=not args.no_tls, + env=envs, + namespace=args.namespace, + templates_path="compose", + ) merged_root_paths = preprocess_build_overrides( root_paths=root_paths, helm_values=helm_values) @@ -108,7 +145,7 @@ if __name__ == "__main__": envs=envs, base_image_name=helm_values['name'], helm_values=helm_values) - + if args.write_env: write_env_file(helm_values, os.path.join(root_paths[-1], DEPLOYMENT_PATH, ".env")) From 6f306a18a2f07a6ba78501946d9be3887244b162 Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 7 Feb 2024 09:24:29 -0600 Subject: [PATCH 011/210] Squashed commit of the following: commit c698bbadf4f5cf41a59818d3738258fb29919249 Author: aranega Date: Wed Feb 7 08:55:45 2024 -0600 CH-100 Add second path using pathlib commit 0422bfe9860f272354c1faadd851d37b4976650a Author: aranega Date: Wed Feb 7 07:33:43 2024 -0600 CH-100 Add first port to pathlib --- .../ch_cli_tools/dockercompose.py | 92 +++++++++---------- tools/deployment-cli-tools/harness-deployment | 1 - 2 files changed, 45 insertions(+), 48 deletions(-) diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 39ff0272..06bf6d23 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -31,16 +31,16 @@ KEY_TEST_IMAGES = 'test-images' DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') - +COMPOSE = 'compose' def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, - namespace=None, templates_path=HELM_PATH) -> HarnessMainConfig: + namespace=None) -> HarnessMainConfig: if (type(env)) == str: env = [env] return CloudHarnessHelm(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured, output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env, - namespace=namespace, templates_path=templates_path).process_values() + namespace=namespace, templates_path=COMPOSE).process_values() class CloudHarnessHelm: @@ -146,16 +146,15 @@ def __process_applications(self, helm_values, base_image_name): app_base_path = root_path / APPS_PATH app_values = self.collect_app_values( - f"{app_base_path}", base_image_name=base_image_name) + app_base_path, base_image_name=base_image_name) helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], app_values) def collect_app_values(self, app_base_path, base_image_name=None): values = {} - for app_path in get_sub_paths(app_base_path): - app_name = app_name_from_path( - os.path.relpath(app_path, app_base_path)) + for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories + app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}") if app_name in self.exclude: continue @@ -185,7 +184,7 @@ def __assign_static_build_dependencies(self, helm_values): if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] - for image_name in list(helm_values[KEY_TASK_IMAGES].keys()): + for image_name in helm_values[KEY_TASK_IMAGES].keys(): if image_name in self.exclude: del helm_values[KEY_TASK_IMAGES][image_name] @@ -228,10 +227,11 @@ def __merge_base_helm_values(self, helm_values): return helm_values def __get_default_helm_values(self): - helm_values = get_template(os.path.join( - CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml')) + ch_root_path = Path(CH_ROOT) + values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml' + helm_values = get_template(values_yaml_path) helm_values = dict_merge(helm_values, - collect_helm_values(CH_ROOT, env=self.env)) + collect_helm_values(ch_root_path, env=self.env)) return helm_values @@ -273,7 +273,7 @@ def create_tls_certificate(self, helm_values): # copy bootstrap file cur_dir = os.getcwd() - os.chdir(os.path.join(HERE, 'scripts')) + os.chdir(Path(HERE) / 'scripts') tar = tarfile.open(bootstrap_file + '.tar', mode='w') try: tar.add(bootstrap_file) @@ -291,10 +291,11 @@ def create_tls_certificate(self, helm_values): bits, stat = container.get_archive('/mnt/certs') if not certs_folder_path.exists(): certs_folder_path.mkdir(parents=True) - with open(certs_parent_folder_path / 'certs.tar', 'wb') as f: + certs_tar = certs_parent_folder_path / 'certs.tar' + with open(certs_tar, 'wb') as f: for chunk in bits: f.write(chunk) - cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar') + cf = tarfile.open(certs_tar) cf.extractall(path=certs_parent_folder_path) logs = container.logs() @@ -409,20 +410,19 @@ def image_tag(self, image_name, build_context_path=None, dependencies=()): def create_app_values_spec(self, app_name, app_path, base_image_name=None): logging.info('Generating values script for ' + app_name) - specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml') - if os.path.exists(specific_template_path): - logging.info("Specific values template found: " + - specific_template_path) + deploy_path = app_path / 'deploy' + specific_template_path = deploy_path / 'values.yaml' + if specific_template_path.exists(): + logging.info(f"Specific values template found: {specific_template_path}") values = get_template(specific_template_path) else: values = {} for e in self.env: - specific_template_path = os.path.join( - app_path, 'deploy', f'values-{e}.yaml') - if os.path.exists(specific_template_path): + specific_template_path = deploy_path / f'values-{e}.yaml' + if specific_template_path.exists(): logging.info( - "Specific environment values template found: " + specific_template_path) + f"Specific environment values template found: {specific_template_path}") with open(specific_template_path) as f: values_env_specific = yaml.safe_load(f) values = dict_merge(values, values_env_specific) @@ -433,6 +433,8 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): image_paths = [path for path in find_dockerfiles_paths( app_path) if 'tasks/' not in path and 'subapps' not in path] + import ipdb; ipdb.set_trace() # fmt: skip + if len(image_paths) > 1: logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name, image_paths[0]) @@ -463,7 +465,7 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): for task_path in task_images_paths: task_name = app_name_from_path(os.path.relpath( - task_path, os.path.dirname(app_path))) + task_path, app_path.parent)) img_name = image_name_from_dockerfile_path(task_name, base_image_name) values[KEY_TASK_IMAGES][task_name] = self.image_tag( @@ -503,53 +505,51 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat :param exclude: :return: """ - app_base_path = os.path.join(search_root, APPS_PATH) + app_base_path = search_root / APPS_PATH - for app_path in get_sub_paths(app_base_path): - app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) + for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories + app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path)) if app_name in exclude or (include and not any(inc in app_name for inc in include)): continue - template_dir = os.path.join(app_path, 'deploy', 'templates', templates_path) - if os.path.exists(template_dir): - dest_dir = os.path.join( - dest_helm_chart_path, 'templates', app_name) + template_dir = app_path / 'deploy' / 'templates' / templates_path + if template_dir.exists(): + dest_dir = dest_helm_chart_path / 'templates' / app_name logging.info( "Collecting templates for application %s to %s", app_name, dest_dir) - if os.path.exists(dest_dir): + if dest_dir.exists(): logging.warning( "Merging/overriding all files in directory %s", dest_dir) - merge_configuration_directories(template_dir, dest_dir) + merge_configuration_directories(f"{template_dir}", f"{dest_dir}") else: shutil.copytree(template_dir, dest_dir) - resources_dir = os.path.join(app_path, 'deploy/resources') - if os.path.exists(resources_dir): - dest_dir = os.path.join( - dest_helm_chart_path, 'resources', app_name) + resources_dir = app_path / 'deploy' / 'resources' + if resources_dir.exists(): + dest_dir = dest_helm_chart_path / 'resources' / app_name logging.info( "Collecting resources for application %s to %s", app_name, dest_dir) - merge_configuration_directories(resources_dir, dest_dir) + merge_configuration_directories(f"{resources_dir}", f"{dest_dir}") - subchart_dir = os.path.join(app_path, 'deploy/charts') - if os.path.exists(subchart_dir): - dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name) + subchart_dir = app_path / 'deploy/charts' + if subchart_dir.exists(): + dest_dir = dest_helm_chart_path / 'charts' / app_name logging.info( "Collecting templates for application %s to %s", app_name, dest_dir) - if os.path.exists(dest_dir): + if dest_dir.exists(): logging.warning( "Merging/overriding all files in directory %s", dest_dir) - merge_configuration_directories(subchart_dir, dest_dir) + merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") else: shutil.copytree(subchart_dir, dest_dir) def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): - if not os.path.exists(base_helm_chart): + if not base_helm_chart.exists(): return - if os.path.exists(dest_helm_chart_path): + if dest_helm_chart_path.exists(): logging.info("Merging/overriding all files in directory %s", dest_helm_chart_path) merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}") @@ -563,9 +563,7 @@ def collect_helm_values(deployment_root, env=()): """ Creates helm values from a cloudharness deployment scaffolding """ - - values_template_path = os.path.join( - deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml') + values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml' values = get_template(values_template_path) diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment index d8aaebda..e5cf49f1 100644 --- a/tools/deployment-cli-tools/harness-deployment +++ b/tools/deployment-cli-tools/harness-deployment @@ -131,7 +131,6 @@ if __name__ == "__main__": tls=not args.no_tls, env=envs, namespace=args.namespace, - templates_path="compose", ) merged_root_paths = preprocess_build_overrides( From 6bbae19137873aa6970ae36d77680b2f0d750d3c Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 7 Feb 2024 10:24:48 -0600 Subject: [PATCH 012/210] CH-100 Add first skaffold dedicated generation for docker compose --- .../cloudharness_utils/constants.py | 2 + .../ch_cli_tools/dockercompose.py | 47 +++- .../ch_cli_tools/skaffoldcompose.py | 251 ++++++++++++++++++ tools/deployment-cli-tools/harness-deployment | 6 +- 4 files changed, 300 insertions(+), 6 deletions(-) create mode 100644 tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py index 4b42761a..168b7811 100644 --- a/libraries/cloudharness-utils/cloudharness_utils/constants.py +++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py @@ -10,6 +10,8 @@ HELM_PATH = "helm" HELM_CHART_PATH = HELM_PATH +COMPOSE = 'compose' + INFRASTRUCTURE_PATH = 'infrastructure' STATIC_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'common-images') BASE_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'base-images') diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 06bf6d23..2cf768a4 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -15,10 +15,10 @@ from . import HERE, CH_ROOT from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ - DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH + DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \ get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \ - find_dockerfiles_paths + find_dockerfiles_paths, find_file_paths from .models import HarnessMainConfig @@ -31,7 +31,6 @@ KEY_TEST_IMAGES = 'test-images' DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') -COMPOSE = 'compose' def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, @@ -50,7 +49,7 @@ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=Non assert domain, 'A domain must be specified' self.root_paths = [Path(r) for r in root_paths] self.tag = tag - if not registry.endswith('/'): + if registry and not registry.endswith('/'): self.registry = f'{registry}/' else: self.registry = registry @@ -433,7 +432,10 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): image_paths = [path for path in find_dockerfiles_paths( app_path) if 'tasks/' not in path and 'subapps' not in path] - import ipdb; ipdb.set_trace() # fmt: skip + + # Inject entry points commands + for image_path in image_paths: + self.inject_entry_points_commands(values, image_path, app_path) if len(image_paths) > 1: logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name, @@ -474,6 +476,18 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): return values + def inject_entry_points_commands(self, helm_values, image_path, app_path): + context_path = os.path.relpath(image_path, '.') + + mains_candidates = find_file_paths(context_path, '__main__.py') + + task_main_file = identify_unicorn_based_main(mains_candidates, app_path) + + if task_main_file: + helm_values[KEY_HARNESS]['deployment']['command'] = ['python'] + helm_values[KEY_HARNESS]['deployment']['args'] = [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'] + + def get_included_with_dependencies(values, include): app_values = values['apps'].values() directly_included = [app for app in app_values if any( @@ -749,3 +763,26 @@ def validate_dependencies(values): if not_found: raise ValuesValidationException( f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") + + +def identify_unicorn_based_main(candidates, app_path): + import re + gunicorn_pattern = re.compile(r"gunicorn") + # sort candidates, shortest path first + for candidate in sorted(candidates,key=lambda x: len(x.split("/"))): + dockerfile_path = f"{candidate}/.." + while not os.path.exists(f"{dockerfile_path}/Dockerfile") and os.path.abspath(dockerfile_path) != os.path.abspath(app_path): + dockerfile_path += "/.." + dockerfile = f"{dockerfile_path}/Dockerfile" + if not os.path.exists(dockerfile): + continue + with open(dockerfile, 'r') as file: + if re.search(gunicorn_pattern, file.read()): + return candidate + requirements = f"{candidate}/../requirements.txt" + if not os.path.exists(requirements): + continue + with open(requirements, 'r') as file: + if re.search(gunicorn_pattern, file.read()): + return candidate + return None \ No newline at end of file diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py b/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py new file mode 100644 index 00000000..27a4701a --- /dev/null +++ b/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py @@ -0,0 +1,251 @@ +import os +import logging +import json +import time + +from os.path import join, relpath, basename, exists, abspath +from cloudharness_model import ApplicationTestConfig, HarnessMainConfig + +from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \ + BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE, HELM_PATH +from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES +from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \ + find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name + +def relpath_if(p1, p2): + if os.path.isabs(p1): + return p1 + return relpath(p1, p2) + +def create_skaffold_compose_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True): + skaffold_conf = get_template('skaffold-template.yaml', True) + apps = helm_values.apps + base_image_name = (helm_values.registry.name or "") + helm_values.name + artifacts = {} + overrides = {} + + def remove_tag(image_name): + return image_name.split(":")[0] + + def get_image_tag(name): + return f"{get_image_name(name, base_image_name)}" + + builds = {} + + def build_artifact(image_name, context_path, requirements=None, dockerfile_path=''): + artifact_spec = { + 'image': image_name, + 'context': context_path, + 'docker': { + 'dockerfile': join(dockerfile_path, 'Dockerfile'), + 'buildArgs': { + 'REGISTRY': helm_values.registry.name, + 'TAG': helm_values.tag, + 'NOCACHE': str(time.time()) + }, + 'ssh': 'default' + } + } + if requirements: + artifact_spec['requires'] = [{'image': get_image_tag(req), 'alias': req.replace('-', '_').upper()} for req + in requirements] + return artifact_spec + + + base_images = set() + + def process_build_dockerfile(dockerfile_path, root_path, global_context=False, requirements=None, app_name=None): + if app_name is None: + app_name = app_name_from_path(basename(dockerfile_path)) + if app_name in helm_values[KEY_TASK_IMAGES] or app_name.replace("-", "_") in helm_values.apps: + context_path = relpath_if(root_path, output_path) if global_context else relpath_if(dockerfile_path, output_path) + + builds[app_name] = context_path + base_images.add(get_image_name(app_name)) + artifacts[app_name] = build_artifact( + get_image_tag(app_name), + context_path, + dockerfile_path=relpath(dockerfile_path, output_path), + requirements=requirements or guess_build_dependencies_from_dockerfile(dockerfile_path) + ) + + for root_path in root_paths: + skaffold_conf = dict_merge(skaffold_conf, get_template( + join(root_path, DEPLOYMENT_CONFIGURATION_PATH, 'skaffold-template.yaml'))) + + base_dockerfiles = find_dockerfiles_paths( + join(root_path, BASE_IMAGES_PATH)) + + for dockerfile_path in base_dockerfiles: + process_build_dockerfile(dockerfile_path, root_path, global_context=True) + + release_config = skaffold_conf['deploy']['helm']['releases'][0] + release_config['name'] = helm_values.namespace + release_config['namespace'] = helm_values.namespace + release_config['artifactOverrides'][KEY_APPS] = {} + + static_images = set() + for root_path in root_paths: + static_dockerfiles = find_dockerfiles_paths( + join(root_path, STATIC_IMAGES_PATH)) + + for dockerfile_path in static_dockerfiles: + process_build_dockerfile(dockerfile_path, root_path) + + + for root_path in root_paths: + apps_path = join(root_path, APPS_PATH) + app_dockerfiles = find_dockerfiles_paths(apps_path) + + release_config['artifactOverrides'][KEY_TASK_IMAGES] = { + task_image: remove_tag(helm_values[KEY_TASK_IMAGES][task_image]) + for task_image in helm_values[KEY_TASK_IMAGES] + } + for dockerfile_path in app_dockerfiles: + app_relative_to_skaffold = os.path.relpath( + dockerfile_path, output_path) + context_path = os.path.relpath(dockerfile_path, '.') + app_relative_to_base = os.path.relpath(dockerfile_path, apps_path) + app_name = app_name_from_path(app_relative_to_base) + app_key = app_name.replace('-', '_') + if app_key not in apps: + if 'tasks' in app_relative_to_base and manage_task_images: + parent_app_name = app_name_from_path( + app_relative_to_base.split('/tasks')[0]) + parent_app_key = parent_app_name.replace('-', '_') + + if parent_app_key in apps: + artifacts[app_key] = build_artifact(get_image_tag(app_name), app_relative_to_skaffold, + base_images.union(static_images)) + + continue + + build_requirements = apps[app_key][KEY_HARNESS].dependencies.build + # app_image_tag = remove_tag( + # apps[app_key][KEY_HARNESS][KEY_DEPLOYMENT]['image']) + # artifacts[app_key] = build_artifact( + # app_image_tag, app_relative_to_skaffold, build_requirements) + process_build_dockerfile(dockerfile_path, root_path, requirements=build_requirements, app_name=app_name) + app = apps[app_key] + if app[KEY_HARNESS][KEY_DEPLOYMENT]['image']: + release_config['artifactOverrides']['apps'][app_key] = \ + { + KEY_HARNESS: { + KEY_DEPLOYMENT: { + 'image': remove_tag(app[KEY_HARNESS][KEY_DEPLOYMENT]['image']) + } + } + } + + mains_candidates = find_file_paths(context_path, '__main__.py') + + def identify_unicorn_based_main(candidates): + import re + gunicorn_pattern = re.compile(r"gunicorn") + # sort candidates, shortest path first + for candidate in sorted(candidates,key=lambda x: len(x.split("/"))): + dockerfile_path = f"{candidate}/.." + while not exists(f"{dockerfile_path}/Dockerfile") and abspath(dockerfile_path) != abspath(root_path): + dockerfile_path += "/.." + dockerfile = f"{dockerfile_path}/Dockerfile" + if not exists(dockerfile): + continue + with open(dockerfile, 'r') as file: + if re.search(gunicorn_pattern, file.read()): + return candidate + requirements = f"{candidate}/../requirements.txt" + if not exists(requirements): + continue + with open(requirements, 'r') as file: + if re.search(gunicorn_pattern, file.read()): + return candidate + return None + + task_main_file = identify_unicorn_based_main(mains_candidates) + + if task_main_file: + release_config['overrides']['apps'][app_key] = \ + { + 'harness': { + 'deployment': { + 'command': ['python'], + 'args': [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'] + } + } + } + + test_config: ApplicationTestConfig = helm_values.apps[app_key].harness.test + if test_config.unit.enabled and test_config.unit.commands: + + skaffold_conf['test'].append(dict( + image=get_image_tag(app_name), + custom=[dict(command="docker run $IMAGE " + cmd) for cmd in test_config.unit.commands] + )) + + + del skaffold_conf['deploy'] + skaffold_conf['deploy'] = { + 'docker': { + 'useCompose': True, + 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']] + } + } + + skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()] + import ipdb; ipdb.set_trace() # fmt: skip + + merge_to_yaml_file(skaffold_conf, os.path.join( + output_path, 'skaffold.yaml')) + + return skaffold_conf + + +def create_vscode_debug_configuration(root_paths, helm_values): + logging.info( + "Creating VS code cloud build configuration.\nCloud build extension is needed to debug.") + + vscode_launch_path = '.vscode/launch.json' + + vs_conf = get_json_template(vscode_launch_path, True) + base_image_name = helm_values.name + debug_conf = get_json_template('vscode-debug-template.json', True) + + def get_image_tag(name): + return f"{get_image_name(name, base_image_name)}" + + if helm_values.registry.name: + base_image_name = helm_values.registry.name + helm_values.name + for i in range(len(vs_conf['configurations'])): + conf = vs_conf['configurations'][i] + if conf['name'] == debug_conf['name']: + del vs_conf['configurations'][i] + break + vs_conf['configurations'].append(debug_conf) + + apps = helm_values.apps + + for root_path in root_paths: + apps_path = os.path.join(root_path, 'applications') + + src_root_paths = find_file_paths(apps_path, 'setup.py') + + for path in src_root_paths: + app_relative_to_base = os.path.relpath(path, apps_path) + app_relative_to_root = os.path.relpath(path, '.') + app_name = app_name_from_path(app_relative_to_base.split('/')[0]) + app_key = app_name.replace('-', '_') + if app_key in apps.keys(): + debug_conf["debug"].append({ + "image": get_image_tag(app_name), + "sourceFileMap": { + "justMyCode": False, + f"${{workspaceFolder}}/{app_relative_to_root}": apps[app_key].harness.get('sourceRoot', + "/usr/src/app"), + } + }) + + + if not os.path.exists(os.path.dirname(vscode_launch_path)): + os.makedirs(os.path.dirname(vscode_launch_path)) + with open(vscode_launch_path, 'w') as f: + json.dump(vs_conf, f, indent=2, sort_keys=True) \ No newline at end of file diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment index e5cf49f1..97897516 100644 --- a/tools/deployment-cli-tools/harness-deployment +++ b/tools/deployment-cli-tools/harness-deployment @@ -7,6 +7,7 @@ import os from ch_cli_tools.dockercompose import create_docker_compose_configuration from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration +from ch_cli_tools.skaffoldcompose import create_skaffold_compose_configuration from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file from ch_cli_tools.preprocessing import preprocess_build_overrides from ch_cli_tools.utils import merge_app_directories @@ -148,7 +149,10 @@ if __name__ == "__main__": if args.write_env: write_env_file(helm_values, os.path.join(root_paths[-1], DEPLOYMENT_PATH, ".env")) - create_skaffold_configuration(merged_root_paths, helm_values) + if not args.docker_compose: + create_skaffold_configuration(merged_root_paths, helm_values) + else: + create_skaffold_compose_configuration(merged_root_paths, helm_values) create_vscode_debug_configuration(root_paths, helm_values) hosts_info(helm_values) From 528754579c3a33e993f623cb9e1a4fe9d86748fa Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 7 Feb 2024 10:33:07 -0600 Subject: [PATCH 013/210] CH-100 Make skaffold script a little bit more generic (ugly) --- .../cloudharness_utils/constants.py | 2 + .../ch_cli_tools/skaffold.py | 19 +- .../ch_cli_tools/skaffoldcompose.py | 251 ------------------ tools/deployment-cli-tools/harness-deployment | 21 +- 4 files changed, 18 insertions(+), 275 deletions(-) delete mode 100644 tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py index 168b7811..a5163f2b 100644 --- a/libraries/cloudharness-utils/cloudharness_utils/constants.py +++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py @@ -9,8 +9,10 @@ HELM_PATH = "helm" HELM_CHART_PATH = HELM_PATH +HELM_ENGINE = HELM_PATH COMPOSE = 'compose' +COMPOSE_ENGINE = 'docker-compose' INFRASTRUCTURE_PATH = 'infrastructure' STATIC_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'common-images') diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py index c0de5764..bc66d616 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py +++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py @@ -7,7 +7,7 @@ from cloudharness_model import ApplicationTestConfig, HarnessMainConfig from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \ - BASE_IMAGES_PATH, STATIC_IMAGES_PATH + BASE_IMAGES_PATH, STATIC_IMAGES_PATH, HELM_ENGINE, COMPOSE_ENGINE from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \ find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name @@ -17,12 +17,13 @@ def relpath_if(p1, p2): return p1 return relpath(p1, p2) -def create_skaffold_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True): +def create_skaffold_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True, backend_deploy=HELM_ENGINE): skaffold_conf = get_template('skaffold-template.yaml', True) apps = helm_values.apps base_image_name = (helm_values.registry.name or "") + helm_values.name artifacts = {} overrides = {} + backend = backend_deploy or HELM_ENGINE def remove_tag(image_name): return image_name.split(":")[0] @@ -183,10 +184,18 @@ def identify_unicorn_based_main(candidates): custom=[dict(command="docker run $IMAGE " + cmd) for cmd in test_config.unit.commands] )) + if backend == COMPOSE_ENGINE: + del skaffold_conf['deploy'] + skaffold_conf['deploy'] = { + 'docker': { + 'useCompose': True, + 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']] + } + } - skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()] - merge_to_yaml_file(skaffold_conf, os.path.join( - output_path, 'skaffold.yaml')) + skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()] + merge_to_yaml_file(skaffold_conf, os.path.join( + output_path, 'skaffold.yaml')) return skaffold_conf diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py b/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py deleted file mode 100644 index 27a4701a..00000000 --- a/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py +++ /dev/null @@ -1,251 +0,0 @@ -import os -import logging -import json -import time - -from os.path import join, relpath, basename, exists, abspath -from cloudharness_model import ApplicationTestConfig, HarnessMainConfig - -from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \ - BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE, HELM_PATH -from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES -from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \ - find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name - -def relpath_if(p1, p2): - if os.path.isabs(p1): - return p1 - return relpath(p1, p2) - -def create_skaffold_compose_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True): - skaffold_conf = get_template('skaffold-template.yaml', True) - apps = helm_values.apps - base_image_name = (helm_values.registry.name or "") + helm_values.name - artifacts = {} - overrides = {} - - def remove_tag(image_name): - return image_name.split(":")[0] - - def get_image_tag(name): - return f"{get_image_name(name, base_image_name)}" - - builds = {} - - def build_artifact(image_name, context_path, requirements=None, dockerfile_path=''): - artifact_spec = { - 'image': image_name, - 'context': context_path, - 'docker': { - 'dockerfile': join(dockerfile_path, 'Dockerfile'), - 'buildArgs': { - 'REGISTRY': helm_values.registry.name, - 'TAG': helm_values.tag, - 'NOCACHE': str(time.time()) - }, - 'ssh': 'default' - } - } - if requirements: - artifact_spec['requires'] = [{'image': get_image_tag(req), 'alias': req.replace('-', '_').upper()} for req - in requirements] - return artifact_spec - - - base_images = set() - - def process_build_dockerfile(dockerfile_path, root_path, global_context=False, requirements=None, app_name=None): - if app_name is None: - app_name = app_name_from_path(basename(dockerfile_path)) - if app_name in helm_values[KEY_TASK_IMAGES] or app_name.replace("-", "_") in helm_values.apps: - context_path = relpath_if(root_path, output_path) if global_context else relpath_if(dockerfile_path, output_path) - - builds[app_name] = context_path - base_images.add(get_image_name(app_name)) - artifacts[app_name] = build_artifact( - get_image_tag(app_name), - context_path, - dockerfile_path=relpath(dockerfile_path, output_path), - requirements=requirements or guess_build_dependencies_from_dockerfile(dockerfile_path) - ) - - for root_path in root_paths: - skaffold_conf = dict_merge(skaffold_conf, get_template( - join(root_path, DEPLOYMENT_CONFIGURATION_PATH, 'skaffold-template.yaml'))) - - base_dockerfiles = find_dockerfiles_paths( - join(root_path, BASE_IMAGES_PATH)) - - for dockerfile_path in base_dockerfiles: - process_build_dockerfile(dockerfile_path, root_path, global_context=True) - - release_config = skaffold_conf['deploy']['helm']['releases'][0] - release_config['name'] = helm_values.namespace - release_config['namespace'] = helm_values.namespace - release_config['artifactOverrides'][KEY_APPS] = {} - - static_images = set() - for root_path in root_paths: - static_dockerfiles = find_dockerfiles_paths( - join(root_path, STATIC_IMAGES_PATH)) - - for dockerfile_path in static_dockerfiles: - process_build_dockerfile(dockerfile_path, root_path) - - - for root_path in root_paths: - apps_path = join(root_path, APPS_PATH) - app_dockerfiles = find_dockerfiles_paths(apps_path) - - release_config['artifactOverrides'][KEY_TASK_IMAGES] = { - task_image: remove_tag(helm_values[KEY_TASK_IMAGES][task_image]) - for task_image in helm_values[KEY_TASK_IMAGES] - } - for dockerfile_path in app_dockerfiles: - app_relative_to_skaffold = os.path.relpath( - dockerfile_path, output_path) - context_path = os.path.relpath(dockerfile_path, '.') - app_relative_to_base = os.path.relpath(dockerfile_path, apps_path) - app_name = app_name_from_path(app_relative_to_base) - app_key = app_name.replace('-', '_') - if app_key not in apps: - if 'tasks' in app_relative_to_base and manage_task_images: - parent_app_name = app_name_from_path( - app_relative_to_base.split('/tasks')[0]) - parent_app_key = parent_app_name.replace('-', '_') - - if parent_app_key in apps: - artifacts[app_key] = build_artifact(get_image_tag(app_name), app_relative_to_skaffold, - base_images.union(static_images)) - - continue - - build_requirements = apps[app_key][KEY_HARNESS].dependencies.build - # app_image_tag = remove_tag( - # apps[app_key][KEY_HARNESS][KEY_DEPLOYMENT]['image']) - # artifacts[app_key] = build_artifact( - # app_image_tag, app_relative_to_skaffold, build_requirements) - process_build_dockerfile(dockerfile_path, root_path, requirements=build_requirements, app_name=app_name) - app = apps[app_key] - if app[KEY_HARNESS][KEY_DEPLOYMENT]['image']: - release_config['artifactOverrides']['apps'][app_key] = \ - { - KEY_HARNESS: { - KEY_DEPLOYMENT: { - 'image': remove_tag(app[KEY_HARNESS][KEY_DEPLOYMENT]['image']) - } - } - } - - mains_candidates = find_file_paths(context_path, '__main__.py') - - def identify_unicorn_based_main(candidates): - import re - gunicorn_pattern = re.compile(r"gunicorn") - # sort candidates, shortest path first - for candidate in sorted(candidates,key=lambda x: len(x.split("/"))): - dockerfile_path = f"{candidate}/.." - while not exists(f"{dockerfile_path}/Dockerfile") and abspath(dockerfile_path) != abspath(root_path): - dockerfile_path += "/.." - dockerfile = f"{dockerfile_path}/Dockerfile" - if not exists(dockerfile): - continue - with open(dockerfile, 'r') as file: - if re.search(gunicorn_pattern, file.read()): - return candidate - requirements = f"{candidate}/../requirements.txt" - if not exists(requirements): - continue - with open(requirements, 'r') as file: - if re.search(gunicorn_pattern, file.read()): - return candidate - return None - - task_main_file = identify_unicorn_based_main(mains_candidates) - - if task_main_file: - release_config['overrides']['apps'][app_key] = \ - { - 'harness': { - 'deployment': { - 'command': ['python'], - 'args': [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'] - } - } - } - - test_config: ApplicationTestConfig = helm_values.apps[app_key].harness.test - if test_config.unit.enabled and test_config.unit.commands: - - skaffold_conf['test'].append(dict( - image=get_image_tag(app_name), - custom=[dict(command="docker run $IMAGE " + cmd) for cmd in test_config.unit.commands] - )) - - - del skaffold_conf['deploy'] - skaffold_conf['deploy'] = { - 'docker': { - 'useCompose': True, - 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']] - } - } - - skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()] - import ipdb; ipdb.set_trace() # fmt: skip - - merge_to_yaml_file(skaffold_conf, os.path.join( - output_path, 'skaffold.yaml')) - - return skaffold_conf - - -def create_vscode_debug_configuration(root_paths, helm_values): - logging.info( - "Creating VS code cloud build configuration.\nCloud build extension is needed to debug.") - - vscode_launch_path = '.vscode/launch.json' - - vs_conf = get_json_template(vscode_launch_path, True) - base_image_name = helm_values.name - debug_conf = get_json_template('vscode-debug-template.json', True) - - def get_image_tag(name): - return f"{get_image_name(name, base_image_name)}" - - if helm_values.registry.name: - base_image_name = helm_values.registry.name + helm_values.name - for i in range(len(vs_conf['configurations'])): - conf = vs_conf['configurations'][i] - if conf['name'] == debug_conf['name']: - del vs_conf['configurations'][i] - break - vs_conf['configurations'].append(debug_conf) - - apps = helm_values.apps - - for root_path in root_paths: - apps_path = os.path.join(root_path, 'applications') - - src_root_paths = find_file_paths(apps_path, 'setup.py') - - for path in src_root_paths: - app_relative_to_base = os.path.relpath(path, apps_path) - app_relative_to_root = os.path.relpath(path, '.') - app_name = app_name_from_path(app_relative_to_base.split('/')[0]) - app_key = app_name.replace('-', '_') - if app_key in apps.keys(): - debug_conf["debug"].append({ - "image": get_image_tag(app_name), - "sourceFileMap": { - "justMyCode": False, - f"${{workspaceFolder}}/{app_relative_to_root}": apps[app_key].harness.get('sourceRoot', - "/usr/src/app"), - } - }) - - - if not os.path.exists(os.path.dirname(vscode_launch_path)): - os.makedirs(os.path.dirname(vscode_launch_path)) - with open(vscode_launch_path, 'w') as f: - json.dump(vs_conf, f, indent=2, sort_keys=True) \ No newline at end of file diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment index 97897516..9a5cc78c 100644 --- a/tools/deployment-cli-tools/harness-deployment +++ b/tools/deployment-cli-tools/harness-deployment @@ -7,11 +7,10 @@ import os from ch_cli_tools.dockercompose import create_docker_compose_configuration from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration -from ch_cli_tools.skaffoldcompose import create_skaffold_compose_configuration from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file from ch_cli_tools.preprocessing import preprocess_build_overrides from ch_cli_tools.utils import merge_app_directories -from cloudharness_utils.constants import DEPLOYMENT_PATH +from cloudharness_utils.constants import DEPLOYMENT_PATH, COMPOSE_ENGINE HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/') ROOT = os.path.dirname(os.path.dirname(HERE)).replace(os.path.sep, '/') @@ -85,22 +84,6 @@ if __name__ == "__main__": merge_app_directories(root_paths, destination=args.merge) root_paths = [args.merge] - # helm_values = create_helm_chart( - # root_paths, - # tag=args.tag, - # registry=args.registry, - # domain=args.domain, - # local=args.local, - # secured=not args.unsecured, - # output_path=args.output_path, - # exclude=args.exclude, - # include=args.include, - # registry_secret=args.registry_secret, - # tls=not args.no_tls, - # env=envs, - # namespace=args.namespace - # ) - if not args.docker_compose: helm_values = create_helm_chart( root_paths, @@ -152,7 +135,7 @@ if __name__ == "__main__": if not args.docker_compose: create_skaffold_configuration(merged_root_paths, helm_values) else: - create_skaffold_compose_configuration(merged_root_paths, helm_values) + create_skaffold_configuration(merged_root_paths, helm_values, backend_deploy=COMPOSE_ENGINE) create_vscode_debug_configuration(root_paths, helm_values) hosts_info(helm_values) From 9f75c9c109f0591ff845c20d370aba21e93fc74e Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 7 Feb 2024 11:30:18 -0600 Subject: [PATCH 014/210] CH-100 Fix issue with entrypoint --- deployment-configuration/compose/templates/auto-compose.yaml | 3 +++ tools/deployment-cli-tools/ch_cli_tools/dockercompose.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 5b4893ba..43bd8401 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -29,6 +29,9 @@ services: reservations: cpus: {{ $deployment.resources.requests.cpu | default "25m" }} memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }} + {{- with $deployment.command }} + entrypoint: {{ cat . $deployment.args }} + {{- end }} environment: - CH_CURRENT_APP_NAME={{ $app_name | quote }} diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 2cf768a4..a935899e 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -484,8 +484,8 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path): task_main_file = identify_unicorn_based_main(mains_candidates, app_path) if task_main_file: - helm_values[KEY_HARNESS]['deployment']['command'] = ['python'] - helm_values[KEY_HARNESS]['deployment']['args'] = [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'] + helm_values[KEY_HARNESS]['deployment']['command'] = 'python' + helm_values[KEY_HARNESS]['deployment']['args'] = f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py' def get_included_with_dependencies(values, include): From c159a4f0d266592d4269e4911969065bd7acb764 Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 8 Feb 2024 08:18:56 -0600 Subject: [PATCH 015/210] CH-100 Remove generation of chart files for docker-compose --- .../ch_cli_tools/dockercompose.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index a935899e..9abcd565 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -546,18 +546,18 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat merge_configuration_directories(f"{resources_dir}", f"{dest_dir}") - subchart_dir = app_path / 'deploy/charts' - if subchart_dir.exists(): - dest_dir = dest_helm_chart_path / 'charts' / app_name - - logging.info( - "Collecting templates for application %s to %s", app_name, dest_dir) - if dest_dir.exists(): - logging.warning( - "Merging/overriding all files in directory %s", dest_dir) - merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") - else: - shutil.copytree(subchart_dir, dest_dir) + # subchart_dir = app_path / 'deploy/charts' + # if subchart_dir.exists(): + # dest_dir = dest_helm_chart_path / 'charts' / app_name + + # logging.info( + # "Collecting templates for application %s to %s", app_name, dest_dir) + # if dest_dir.exists(): + # logging.warning( + # "Merging/overriding all files in directory %s", dest_dir) + # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") + # else: + # shutil.copytree(subchart_dir, dest_dir) def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): From 02bd318b84667e8cfc4a3a94fc0e2020c2ff79ac Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 9 Feb 2024 10:34:44 -0600 Subject: [PATCH 016/210] CH-100 Rollback default per-app helm template directory --- .../argo/deploy/templates/{helm => }/argo-sa.yaml | 0 .../deploy/templates/{helm => }/broker-config.yml | 0 .../events/deploy/templates/{helm => }/configmap.yaml | 0 .../deploy/templates/{helm => }/deployments.yml | 0 .../events/deploy/templates/{helm => }/roles.yml | 0 .../events/deploy/templates/{helm => }/services.yml | 0 .../events/deploy/templates/{helm => }/zoo-config.yml | 0 .../templates/{helm => }/_helpers-auth-rework.tpl | 0 .../deploy/templates/{helm => }/_helpers-names.tpl | 0 .../deploy/templates/{helm => }/_helpers.tpl | 0 .../templates/{helm => }/hub/_helpers-passwords.tpl | 0 .../deploy/templates/{helm => }/hub/configmap.yaml | 0 .../deploy/templates/{helm => }/hub/deployment.yaml | 0 .../deploy/templates/{helm => }/hub/netpol.yaml | 0 .../deploy/templates/{helm => }/hub/pdb.yaml | 0 .../deploy/templates/{helm => }/hub/pvc.yaml | 0 .../deploy/templates/{helm => }/hub/rbac.yaml | 0 .../deploy/templates/{helm => }/hub/secret.yaml | 0 .../deploy/templates/{helm => }/hub/service.yaml | 0 .../{helm => }/image-puller/_helpers-daemonset.tpl | 0 .../{helm => }/image-puller/daemonset-continuous.yaml | 0 .../{helm => }/image-puller/daemonset-hook.yaml | 0 .../deploy/templates/{helm => }/image-puller/job.yaml | 0 .../templates/{helm => }/image-puller/rbac.yaml | 0 .../templates/{helm => }/proxy/autohttps/_README.txt | 0 .../{helm => }/proxy/autohttps/configmap.yaml | 0 .../{helm => }/proxy/autohttps/deployment.yaml | 0 .../templates/{helm => }/proxy/autohttps/rbac.yaml | 0 .../templates/{helm => }/proxy/autohttps/service.yaml | 0 .../deploy/templates/{helm => }/proxy/deployment.yaml | 0 .../deploy/templates/{helm => }/proxy/netpol.yaml | 0 .../deploy/templates/{helm => }/proxy/pdb.yaml | 0 .../deploy/templates/{helm => }/proxy/secret.yaml | 0 .../deploy/templates/{helm => }/proxy/service.yaml | 0 .../{helm => }/scheduling/_scheduling-helpers.tpl | 0 .../{helm => }/scheduling/priorityclass.yaml | 0 .../{helm => }/scheduling/user-placeholder/pdb.yaml | 0 .../scheduling/user-placeholder/priorityclass.yaml | 0 .../scheduling/user-placeholder/statefulset.yaml | 0 .../scheduling/user-scheduler/configmap.yaml | 0 .../scheduling/user-scheduler/deployment.yaml | 0 .../{helm => }/scheduling/user-scheduler/pdb.yaml | 0 .../{helm => }/scheduling/user-scheduler/rbac.yaml | 0 .../templates/{helm => }/singleuser/netpol.yaml | 0 .../deploy/templates/{helm => }/_helpers.tpl | 0 .../deploy/templates/{helm => }/clusterrole.yaml | 0 .../templates/{helm => }/clusterrolebinding.yaml | 0 .../deploy/templates/{helm => }/nfs-server.yaml | 0 .../templates/{helm => }/podsecuritypolicy.yaml | 0 .../nfsserver/deploy/templates/{helm => }/role.yaml | 0 .../deploy/templates/{helm => }/rolebinding.yaml | 0 .../deploy/templates/{helm => }/serviceaccount.yaml | 0 .../deploy/templates/{helm => }/storageclass.yaml | 0 .../sentry/deploy/templates/{helm => }/redis.yaml | 0 .../compose/templates/auto-compose.yaml | 11 +++++++---- .../ch_cli_tools/dockercompose.py | 2 +- tools/deployment-cli-tools/ch_cli_tools/helm.py | 2 +- 57 files changed, 9 insertions(+), 6 deletions(-) rename applications/argo/deploy/templates/{helm => }/argo-sa.yaml (100%) rename applications/events/deploy/templates/{helm => }/broker-config.yml (100%) rename applications/events/deploy/templates/{helm => }/configmap.yaml (100%) rename applications/events/deploy/templates/{helm => }/deployments.yml (100%) rename applications/events/deploy/templates/{helm => }/roles.yml (100%) rename applications/events/deploy/templates/{helm => }/services.yml (100%) rename applications/events/deploy/templates/{helm => }/zoo-config.yml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/_helpers-auth-rework.tpl (100%) rename applications/jupyterhub/deploy/templates/{helm => }/_helpers-names.tpl (100%) rename applications/jupyterhub/deploy/templates/{helm => }/_helpers.tpl (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/_helpers-passwords.tpl (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/configmap.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/netpol.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/pvc.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/secret.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/hub/service.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/_helpers-daemonset.tpl (100%) rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/daemonset-continuous.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/daemonset-hook.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/job.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/_README.txt (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/configmap.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/service.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/netpol.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/secret.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/proxy/service.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/_scheduling-helpers.tpl (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/priorityclass.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-placeholder/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-placeholder/priorityclass.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-placeholder/statefulset.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/configmap.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/deployment.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/pdb.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/rbac.yaml (100%) rename applications/jupyterhub/deploy/templates/{helm => }/singleuser/netpol.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/_helpers.tpl (100%) rename applications/nfsserver/deploy/templates/{helm => }/clusterrole.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/clusterrolebinding.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/nfs-server.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/podsecuritypolicy.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/role.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/rolebinding.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/serviceaccount.yaml (100%) rename applications/nfsserver/deploy/templates/{helm => }/storageclass.yaml (100%) rename applications/sentry/deploy/templates/{helm => }/redis.yaml (100%) diff --git a/applications/argo/deploy/templates/helm/argo-sa.yaml b/applications/argo/deploy/templates/argo-sa.yaml similarity index 100% rename from applications/argo/deploy/templates/helm/argo-sa.yaml rename to applications/argo/deploy/templates/argo-sa.yaml diff --git a/applications/events/deploy/templates/helm/broker-config.yml b/applications/events/deploy/templates/broker-config.yml similarity index 100% rename from applications/events/deploy/templates/helm/broker-config.yml rename to applications/events/deploy/templates/broker-config.yml diff --git a/applications/events/deploy/templates/helm/configmap.yaml b/applications/events/deploy/templates/configmap.yaml similarity index 100% rename from applications/events/deploy/templates/helm/configmap.yaml rename to applications/events/deploy/templates/configmap.yaml diff --git a/applications/events/deploy/templates/helm/deployments.yml b/applications/events/deploy/templates/deployments.yml similarity index 100% rename from applications/events/deploy/templates/helm/deployments.yml rename to applications/events/deploy/templates/deployments.yml diff --git a/applications/events/deploy/templates/helm/roles.yml b/applications/events/deploy/templates/roles.yml similarity index 100% rename from applications/events/deploy/templates/helm/roles.yml rename to applications/events/deploy/templates/roles.yml diff --git a/applications/events/deploy/templates/helm/services.yml b/applications/events/deploy/templates/services.yml similarity index 100% rename from applications/events/deploy/templates/helm/services.yml rename to applications/events/deploy/templates/services.yml diff --git a/applications/events/deploy/templates/helm/zoo-config.yml b/applications/events/deploy/templates/zoo-config.yml similarity index 100% rename from applications/events/deploy/templates/helm/zoo-config.yml rename to applications/events/deploy/templates/zoo-config.yml diff --git a/applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl rename to applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl diff --git a/applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl b/applications/jupyterhub/deploy/templates/_helpers-names.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl rename to applications/jupyterhub/deploy/templates/_helpers-names.tpl diff --git a/applications/jupyterhub/deploy/templates/helm/_helpers.tpl b/applications/jupyterhub/deploy/templates/_helpers.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/_helpers.tpl rename to applications/jupyterhub/deploy/templates/_helpers.tpl diff --git a/applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl b/applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl rename to applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl diff --git a/applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml b/applications/jupyterhub/deploy/templates/hub/configmap.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml rename to applications/jupyterhub/deploy/templates/hub/configmap.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml b/applications/jupyterhub/deploy/templates/hub/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml rename to applications/jupyterhub/deploy/templates/hub/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml b/applications/jupyterhub/deploy/templates/hub/netpol.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml rename to applications/jupyterhub/deploy/templates/hub/netpol.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml b/applications/jupyterhub/deploy/templates/hub/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml rename to applications/jupyterhub/deploy/templates/hub/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml b/applications/jupyterhub/deploy/templates/hub/pvc.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml rename to applications/jupyterhub/deploy/templates/hub/pvc.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml b/applications/jupyterhub/deploy/templates/hub/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml rename to applications/jupyterhub/deploy/templates/hub/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/secret.yaml b/applications/jupyterhub/deploy/templates/hub/secret.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/secret.yaml rename to applications/jupyterhub/deploy/templates/hub/secret.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/hub/service.yaml b/applications/jupyterhub/deploy/templates/hub/service.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/hub/service.yaml rename to applications/jupyterhub/deploy/templates/hub/service.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl rename to applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml b/applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml rename to applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml b/applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml rename to applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml b/applications/jupyterhub/deploy/templates/image-puller/job.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml rename to applications/jupyterhub/deploy/templates/image-puller/job.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml b/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml rename to applications/jupyterhub/deploy/templates/image-puller/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt b/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt rename to applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml rename to applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml rename to applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml rename to applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml rename to applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml rename to applications/jupyterhub/deploy/templates/proxy/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml b/applications/jupyterhub/deploy/templates/proxy/netpol.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml rename to applications/jupyterhub/deploy/templates/proxy/netpol.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml b/applications/jupyterhub/deploy/templates/proxy/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml rename to applications/jupyterhub/deploy/templates/proxy/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml b/applications/jupyterhub/deploy/templates/proxy/secret.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml rename to applications/jupyterhub/deploy/templates/proxy/secret.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/service.yaml b/applications/jupyterhub/deploy/templates/proxy/service.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/proxy/service.yaml rename to applications/jupyterhub/deploy/templates/proxy/service.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl b/applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl rename to applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml rename to applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml diff --git a/applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml b/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml similarity index 100% rename from applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml rename to applications/jupyterhub/deploy/templates/singleuser/netpol.yaml diff --git a/applications/nfsserver/deploy/templates/helm/_helpers.tpl b/applications/nfsserver/deploy/templates/_helpers.tpl similarity index 100% rename from applications/nfsserver/deploy/templates/helm/_helpers.tpl rename to applications/nfsserver/deploy/templates/_helpers.tpl diff --git a/applications/nfsserver/deploy/templates/helm/clusterrole.yaml b/applications/nfsserver/deploy/templates/clusterrole.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/clusterrole.yaml rename to applications/nfsserver/deploy/templates/clusterrole.yaml diff --git a/applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml b/applications/nfsserver/deploy/templates/clusterrolebinding.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml rename to applications/nfsserver/deploy/templates/clusterrolebinding.yaml diff --git a/applications/nfsserver/deploy/templates/helm/nfs-server.yaml b/applications/nfsserver/deploy/templates/nfs-server.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/nfs-server.yaml rename to applications/nfsserver/deploy/templates/nfs-server.yaml diff --git a/applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml b/applications/nfsserver/deploy/templates/podsecuritypolicy.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml rename to applications/nfsserver/deploy/templates/podsecuritypolicy.yaml diff --git a/applications/nfsserver/deploy/templates/helm/role.yaml b/applications/nfsserver/deploy/templates/role.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/role.yaml rename to applications/nfsserver/deploy/templates/role.yaml diff --git a/applications/nfsserver/deploy/templates/helm/rolebinding.yaml b/applications/nfsserver/deploy/templates/rolebinding.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/rolebinding.yaml rename to applications/nfsserver/deploy/templates/rolebinding.yaml diff --git a/applications/nfsserver/deploy/templates/helm/serviceaccount.yaml b/applications/nfsserver/deploy/templates/serviceaccount.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/serviceaccount.yaml rename to applications/nfsserver/deploy/templates/serviceaccount.yaml diff --git a/applications/nfsserver/deploy/templates/helm/storageclass.yaml b/applications/nfsserver/deploy/templates/storageclass.yaml similarity index 100% rename from applications/nfsserver/deploy/templates/helm/storageclass.yaml rename to applications/nfsserver/deploy/templates/storageclass.yaml diff --git a/applications/sentry/deploy/templates/helm/redis.yaml b/applications/sentry/deploy/templates/redis.yaml similarity index 100% rename from applications/sentry/deploy/templates/helm/redis.yaml rename to applications/sentry/deploy/templates/redis.yaml diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 43bd8401..cdf8dddd 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -45,10 +45,13 @@ services: - {{ .name }}={{ .value | quote }} {{- end }} {{- with $app_config.harness.dependencies.soft }} - # links: - # {{- range . }} - # - {{ . }} - # {{- end }} + links: + {{- range . }} + - {{ . }} + {{- with $app_config.harness.domain }} + :{{- . }} + {{- end }} + {{- end }} {{- end }} {{- with $app_config.harness.dependencies.hard }} depends_on: diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 9abcd565..2c2a2c35 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -525,7 +525,7 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path)) if app_name in exclude or (include and not any(inc in app_name for inc in include)): continue - template_dir = app_path / 'deploy' / 'templates' / templates_path + template_dir = app_path / 'deploy' / f'templates-{templates_path}' if template_dir.exists(): dest_dir = dest_helm_chart_path / 'templates' / app_name diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index 9bd43b8c..64683197 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -521,7 +521,7 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), i app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) if app_name in exclude or (include and not any(inc in app_name for inc in include)): continue - template_dir = os.path.join(app_path, 'deploy', 'templates', HELM_PATH) + template_dir = os.path.join(app_path, 'deploy', 'templates') if os.path.exists(template_dir): dest_dir = os.path.join( dest_helm_chart_path, 'templates', app_name) From 1a570647182d3f89dd057cbe7a8be770fa39b52a Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 9 Feb 2024 10:35:41 -0600 Subject: [PATCH 017/210] CH-100 Add subdomain configuration --- deployment-configuration/compose/templates/auto-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index cdf8dddd..9ba6a3ef 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -48,8 +48,8 @@ services: links: {{- range . }} - {{ . }} - {{- with $app_config.harness.domain }} - :{{- . }} + {{- with $app_config.harness.subdomain }} + {{- ":" }}{{ . }}.{{ $.Values.domain }} {{- end }} {{- end }} {{- end }} From b97c19c827e3dbd4a51ff2df273b0e63e810be1a Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 9 Feb 2024 12:20:31 -0600 Subject: [PATCH 018/210] CH-100 Fix bad "links" generation --- .../compose/templates/auto-compose.yaml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 9ba6a3ef..b15d32cd 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -47,10 +47,7 @@ services: {{- with $app_config.harness.dependencies.soft }} links: {{- range . }} - - {{ . }} - {{- with $app_config.harness.subdomain }} - {{- ":" }}{{ . }}.{{ $.Values.domain }} - {{- end }} + - {{ . }}:{{ . }}.{{ $.Values.domain }} {{- end }} {{- end }} {{- with $app_config.harness.dependencies.hard }} @@ -77,7 +74,7 @@ services: {{- end }} traefik: - image: "traefik:v2.2" + image: "traefik:v2.10" container_name: "traefik" networks: - ch From 85dcfd93cb131bd29097479ee6bcdd08ee966dbc Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 12 Feb 2024 12:54:00 -0600 Subject: [PATCH 019/210] CH-100 Add support for aliases and service links --- .../compose/templates/auto-compose.yaml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index b15d32cd..512ba0db 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -2,16 +2,22 @@ version: '3.7' services: {{- range $app_name, $app_config := .Values.apps }} - {{- if has $app_name (list "argo" "nfsserver" "workflows" "events" ) }} {{- /* We deactivate generation for some services */}} + {{ $deployment := $app_config.harness.deployment }} + {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }} {{- continue }} {{- end}} - {{ $deployment := $app_config.harness.deployment }} {{ $app_name }}: {{- with $app_config.domain }} domainname: {{ . }} {{- end }} networks: - - ch + {{- if ne $app_config.harness.service.name $app_name}} + ch: + aliases: + - {{ $app_config.harness.service.name }} + {{- else }} + - ch + {{- end}} {{- with $app_config.image }} image: {{ . }} {{- end }} @@ -47,7 +53,8 @@ services: {{- with $app_config.harness.dependencies.soft }} links: {{- range . }} - - {{ . }}:{{ . }}.{{ $.Values.domain }} + {{- $service_name := (get $.Values.apps .).harness.service.name }} + - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} {{- end }} {{- end }} {{- with $app_config.harness.dependencies.hard }} From 14292113f978415b7cff3a69e937785602449622 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Tue, 13 Feb 2024 18:38:03 +0100 Subject: [PATCH 020/210] CH-118 update node version --- .../base-images/cloudharness-frontend-build/Dockerfile | 2 +- test/test-e2e/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/base-images/cloudharness-frontend-build/Dockerfile b/infrastructure/base-images/cloudharness-frontend-build/Dockerfile index ec4c5990..412125e6 100644 --- a/infrastructure/base-images/cloudharness-frontend-build/Dockerfile +++ b/infrastructure/base-images/cloudharness-frontend-build/Dockerfile @@ -1,3 +1,3 @@ -FROM node:15.5 +FROM node:20 diff --git a/test/test-e2e/Dockerfile b/test/test-e2e/Dockerfile index 48c8f855..ef17adee 100644 --- a/test/test-e2e/Dockerfile +++ b/test/test-e2e/Dockerfile @@ -1,4 +1,4 @@ -FROM node:lts-slim +FROM node:20 # Install latest chrome dev package and fonts to support major charsets (Chinese, Japanese, Arabic, Hebrew, Thai and a few others) # Note: this installs the necessary libs to make the bundled version of Chromium that Puppeteer From 0f81d23699d2a8d99597ae8275b0a317c6647d7e Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Tue, 13 Feb 2024 18:39:08 +0100 Subject: [PATCH 021/210] CH-118 clean unused constant --- docs/dev.md | 1 - libraries/cloudharness-utils/cloudharness_utils/constants.py | 4 ---- 2 files changed, 5 deletions(-) diff --git a/docs/dev.md b/docs/dev.md index 113e0c86..996e6350 100644 --- a/docs/dev.md +++ b/docs/dev.md @@ -93,7 +93,6 @@ This file is part of the CloudHarness runtime. Other constants are located there as shown in the following code extract. ```python -NODE_BUILD_IMAGE = 'node:8.16.1-alpine' APPLICATION_TEMPLATE_PATH = 'application-templates' # ... APPS_PATH = 'applications' diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py index e2a6a48b..53282691 100644 --- a/libraries/cloudharness-utils/cloudharness_utils/constants.py +++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py @@ -1,9 +1,5 @@ import os -NODE_BUILD_IMAGE = 'node:8.16.1-alpine' - - - APPLICATION_TEMPLATE_PATH = 'application-templates' DEFAULT_MERGE_PATH = ".overrides" From 1ac82264e80fa1d19730d85036878ed4c75c6b81 Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 14 Feb 2024 10:34:54 -0600 Subject: [PATCH 022/210] CH-100 Add first support for auto databases --- .../compose/templates/auto-compose.yaml | 23 ++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 512ba0db..80f4845b 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -78,8 +78,29 @@ services: {{- end }} {{- end}} {{- end }} + {{- with $app_config.harness.database }} + {{- if not .auto }} + {{- continue}} + {{- end }} + # Database for {{ $app_name }}, type {{ .type }} named {{ .name }} + {{ .name }}: + {{- $db_infos := (get . .type) }} + image: {{ $db_infos.image }} + expose: + {{- range $port := $db_infos.ports }} + - {{ $port.port | quote }} + {{- end }} + {{- with .resources }} + resources: + limits: + cpus: {{ .limits.cpu | default "1000m" }} + memory: {{ trimSuffix "i" .limits.memory | default "2G" }} + reservations: + cpus: {{ .requests.cpu | default "100m" }} + memory: {{ trimSuffix "i" .requests.memory | default "512M" }} + {{- end }} + {{- end}} {{- end }} - traefik: image: "traefik:v2.10" container_name: "traefik" From 56e22b7b883a57cc0eb9c672c5b00f43cc8172a0 Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 14 Feb 2024 10:41:32 -0600 Subject: [PATCH 023/210] CH-100 Add finer grain port handling --- .../compose/templates/auto-compose.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 80f4845b..c432d522 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -24,6 +24,10 @@ services: {{- with $app_config.harness.service.port }} ports: - "{{ . }}:{{ $app_config.harness.deployment.port }}" + {{- end }} + {{- with $app_config.harness.deployment.port }} + expose: + - {{ . | quote }} {{- end}} deploy: mode: "replicated" @@ -85,6 +89,8 @@ services: # Database for {{ $app_name }}, type {{ .type }} named {{ .name }} {{ .name }}: {{- $db_infos := (get . .type) }} + networks: + ch: image: {{ $db_infos.image }} expose: {{- range $port := $db_infos.ports }} From 336b5585f7fa059b282cede4fcedd6023bcf563a Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 15 Feb 2024 08:11:42 -0600 Subject: [PATCH 024/210] CH-100 Change way port is exposed to outside world --- .../compose/templates/auto-compose.yaml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index c432d522..894814f6 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -97,13 +97,14 @@ services: - {{ $port.port | quote }} {{- end }} {{- with .resources }} - resources: - limits: - cpus: {{ .limits.cpu | default "1000m" }} - memory: {{ trimSuffix "i" .limits.memory | default "2G" }} - reservations: - cpus: {{ .requests.cpu | default "100m" }} - memory: {{ trimSuffix "i" .requests.memory | default "512M" }} + deploy: + resources: + limits: + cpus: {{ .limits.cpu | default "1000m" }} + memory: {{ trimSuffix "i" .limits.memory | default "2G" }} + reservations: + cpus: {{ .requests.cpu | default "100m" }} + memory: {{ trimSuffix "i" .requests.memory | default "512M" }} {{- end }} {{- end}} {{- end }} From a004ffb80583dee5be24181789e8ca13cc8f508a Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 15 Feb 2024 11:02:12 -0600 Subject: [PATCH 025/210] CH-100 Fix issue with env var quoting --- .../compose/templates/auto-compose.yaml | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 894814f6..138826e3 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -21,10 +21,12 @@ services: {{- with $app_config.image }} image: {{ . }} {{- end }} + {{- if eq $.Values.mainapp $app_name }} {{- with $app_config.harness.service.port }} ports: - "{{ . }}:{{ $app_config.harness.deployment.port }}" {{- end }} + {{- end }} {{- with $app_config.harness.deployment.port }} expose: - {{ . | quote }} @@ -40,19 +42,20 @@ services: cpus: {{ $deployment.resources.requests.cpu | default "25m" }} memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }} {{- with $deployment.command }} - entrypoint: {{ cat . $deployment.args }} + # entrypoint: {{ cat . $deployment.args }} {{- end }} environment: - - CH_CURRENT_APP_NAME={{ $app_name | quote }} + - CH_CURRENT_APP_NAME={{ $app_name }} + - CH_VALUES_PATH=/opt/cloudharness/resources/allvalues.yaml {{- range $.Values.env }} - - {{ .name }}={{ .value | quote }} + - {{ .name }}={{ .value }} {{- end }} {{- /*{{- range $.Values.env }} - - {{ .name }}={{ .value | quote }} + - {{ .name }}={{ .value }} {{- end }} */}} {{- range $app_config.harness.env }} - - {{ .name }}={{ .value | quote }} + - {{ .name }}={{ .value }} {{- end }} {{- with $app_config.harness.dependencies.soft }} links: @@ -67,8 +70,9 @@ services: - {{ . }} {{- end }} {{- end }} - {{- if or $deployment.volume $app_config.harness.resources }} volumes: + - ./compose/values.yaml:/opt/cloudharness/resources/allvalues.yaml:ro + {{- if or $deployment.volume $app_config.harness.resources }} {{- with $deployment.volume }} - type: volume source: {{ .name }} From f73108c2fa976d7654c76798cde9f2697e2ac23c Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 15 Feb 2024 11:20:41 -0600 Subject: [PATCH 026/210] CH-100 Add special behavior to produce allvalues.yaml --- .../compose/templates/auto-compose.yaml | 2 +- .../ch_cli_tools/dockercompose.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 138826e3..3896ae13 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -71,7 +71,7 @@ services: {{- end }} {{- end }} volumes: - - ./compose/values.yaml:/opt/cloudharness/resources/allvalues.yaml:ro + - ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro {{- if or $deployment.volume $app_config.harness.resources }} {{- with $deployment.volume }} - type: volume diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 2c2a2c35..1c51abd6 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -11,6 +11,7 @@ import tarfile from docker import from_env as DockerClient from pathlib import Path +import copy from . import HERE, CH_ROOT @@ -134,6 +135,12 @@ def process_values(self) -> HarnessMainConfig: merge_to_yaml_file({'metadata': {'namespace': self.namespace}, 'name': helm_values['name']}, self.helm_chart_path) validate_helm_values(merged_values) + + # All values save + all_values = self.__get_default_helm_values_with_secrets(merged_values) + + merge_to_yaml_file(all_values, self.dest_deployment_path / 'allvalues.yaml') + return HarnessMainConfig.from_dict(merged_values) def __process_applications(self, helm_values, base_image_name): @@ -234,6 +241,18 @@ def __get_default_helm_values(self): return helm_values + def __get_default_helm_values_with_secrets(self, helm_values): + helm_values = copy.deepcopy(helm_values) + # {{- $values_copy := deepCopy .Values }} + # {{- range $key, $val := .Values.apps }} + # {{- $new_secrets := dict "apps" (dict $key (dict "harness" (dict "secrets"))) }} + # {{- $tmp := mergeOverwrite $values_copy $new_secrets }} + # {{- end }} + # {{ $values_copy | toYaml | indent 4 }} + for key, val in helm_values['apps'].items(): + helm_values['apps'][key]['harness']['secrets'] = {} + return helm_values + def create_tls_certificate(self, helm_values): if not self.tls: helm_values['tls'] = None From 2370b20c646e2cb4c2c87221e746d23ab1ad5ec3 Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 15 Feb 2024 12:08:01 -0600 Subject: [PATCH 027/210] CH-100 Add actual docker-compose.yaml generation in harness-deployment --- .../compose/templates/auto-compose.yaml | 1 - .../ch_cli_tools/dockercompose.py | 12 ++++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 3896ae13..4999b46f 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -46,7 +46,6 @@ services: {{- end }} environment: - CH_CURRENT_APP_NAME={{ $app_name }} - - CH_VALUES_PATH=/opt/cloudharness/resources/allvalues.yaml {{- range $.Values.env }} - {{ .name }}={{ .value }} diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 1c51abd6..dfe0bf5b 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -141,8 +141,20 @@ def process_values(self) -> HarnessMainConfig: merge_to_yaml_file(all_values, self.dest_deployment_path / 'allvalues.yaml') + self.generate_docker_compose_yaml() + return HarnessMainConfig.from_dict(merged_values) + def generate_docker_compose_yaml(self): + compose_templates = self.dest_deployment_path + dest_compose_yaml = self.dest_deployment_path.parent / "docker-compose.yaml" + + logging.info(f'Generate docker compose configuration in: {dest_compose_yaml}, using templates from {compose_templates}') + command = f"helm template {compose_templates} > {dest_compose_yaml}" + + subprocess.call(command, shell=True) + + def __process_applications(self, helm_values, base_image_name): for root_path in self.root_paths: app_values = init_app_values( From 5ed630d8dcb9a12dbd7238dad51ad6ed03e3bbcd Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 15 Feb 2024 12:40:31 -0600 Subject: [PATCH 028/210] CH-100 Add first dedicated templates for postgres --- .../compose/templates/auto-compose.yaml | 7 ++++--- .../compose/templates/auto-database-postgres.yaml | 7 +++++++ 2 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 deployment-configuration/compose/templates/auto-database-postgres.yaml diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 4999b46f..22f8c5a2 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -86,9 +86,9 @@ services: {{- end}} {{- end }} {{- with $app_config.harness.database }} - {{- if not .auto }} - {{- continue}} - {{- end }} + {{- if not .auto }} + {{- continue}} + {{- end }} # Database for {{ $app_name }}, type {{ .type }} named {{ .name }} {{ .name }}: {{- $db_infos := (get . .type) }} @@ -109,6 +109,7 @@ services: cpus: {{ .requests.cpu | default "100m" }} memory: {{ trimSuffix "i" .requests.memory | default "512M" }} {{- end }} + {{- include "deploy_utils.database.postgres" . }} {{- end}} {{- end }} traefik: diff --git a/deployment-configuration/compose/templates/auto-database-postgres.yaml b/deployment-configuration/compose/templates/auto-database-postgres.yaml new file mode 100644 index 00000000..d832193f --- /dev/null +++ b/deployment-configuration/compose/templates/auto-database-postgres.yaml @@ -0,0 +1,7 @@ +{{- define "deploy_utils.database.postgres" }} + environment: + - POSTGRES_DB={{ .postgres.initialdb | quote }} + - POSTGRES_USER={{ .user | quote }} + - POSTGRES_PASSWORD={{ .pass | quote }} + - PGDATA=/data/db/pgdata +{{- end }} \ No newline at end of file From 111a4f3e5be5d7655faa783009106a0dafebfd6f Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 15 Feb 2024 13:09:12 -0600 Subject: [PATCH 029/210] CH-100 Add volumes for db --- .../compose/templates/auto-compose.yaml | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 22f8c5a2..f14c7e11 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -109,7 +109,16 @@ services: cpus: {{ .requests.cpu | default "100m" }} memory: {{ trimSuffix "i" .requests.memory | default "512M" }} {{- end }} - {{- include "deploy_utils.database.postgres" . }} + volumes: + - type: volume + source: {{ .name }} + target: /data/db + {{- if eq .type "postgres" }} + - type: volume + source: dshm + target: /dev/shm + {{- include "deploy_utils.database.postgres" . }} + {{- end }} {{- end}} {{- end }} traefik: @@ -142,4 +151,10 @@ volumes: # this inclusion needs to be conditional {{- with $app_config.harness.deployment.volume }} {{ .name }}: {{- end }} + {{- with $app_config.harness.database }} + {{ .name }}: + {{- if eq .type "postgres" }} + dshm: + {{- end }} + {{- end }} {{- end }} \ No newline at end of file From ce4596ab1b62110c56c0bb1ef339cf9cc7cbdc91 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 16 Feb 2024 06:46:34 -0600 Subject: [PATCH 030/210] CH-100 Add first template for events --- .../templates-compose/events-deployment.yaml | 13 +++++ .../compose/templates/auto-compose.yaml | 47 +++++++++++-------- 2 files changed, 40 insertions(+), 20 deletions(-) create mode 100644 applications/events/deploy/templates-compose/events-deployment.yaml diff --git a/applications/events/deploy/templates-compose/events-deployment.yaml b/applications/events/deploy/templates-compose/events-deployment.yaml new file mode 100644 index 00000000..f16e7e38 --- /dev/null +++ b/applications/events/deploy/templates-compose/events-deployment.yaml @@ -0,0 +1,13 @@ +{{- define "events.deployment" }} +{{- $nfs := .apps.nfsserver}} + +{{ $nfs.name }}: + image: {{ $nfs.harness.deployment.image }} + environment: + # NFS useDNS? {{ $nfs.nfs.useDNS }} + {{- if $nfs.nfs.useDNS }} + - NFS_SERVER={{ printf "nfs-server.%s.svc.cluster.local" .namespace }} + {{- end }} + - NFS_PATH={{ $nfs.nfs.path }} + - PROVISIONER_NAME={{ printf "%s-nfs-provisioner" .namespace }} +{{- end }} \ No newline at end of file diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index f14c7e11..b51aa02e 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -1,8 +1,32 @@ version: '3.7' services: + traefik: + image: "traefik:v2.10" + container_name: "traefik" + networks: + - ch + command: + - "--log.level=INFO" + - "--api.insecure=true" + - "--providers.docker=true" + - "--providers.docker.exposedbydefault=false" + - "--entrypoints.web.address=:80" + - "--entrypoints.websecure.address=:443" + - "--providers.file.directory=/etc/traefik/dynamic_conf" + ports: + - "80:80" + - "443:443" + volumes: + - "/var/run/docker.sock:/var/run/docker.sock:ro" + - "./certs/:/certs/:ro" + - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" + {{- range $app_name, $app_config := .Values.apps }} {{ $deployment := $app_config.harness.deployment }} + {{- if eq $app_name "nfsserver" }} + {{- include "events.deployment" $.Values}} + {{- end }} {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }} {{- continue }} {{- end}} @@ -120,28 +144,11 @@ services: {{- include "deploy_utils.database.postgres" . }} {{- end }} {{- end}} + {{- end }} - traefik: - image: "traefik:v2.10" - container_name: "traefik" - networks: - - ch - command: - - "--log.level=INFO" - - "--api.insecure=true" - - "--providers.docker=true" - - "--providers.docker.exposedbydefault=false" - - "--entrypoints.web.address=:80" - - "--entrypoints.websecure.address=:443" - - "--providers.file.directory=/etc/traefik/dynamic_conf" - ports: - - "80:80" - - "443:443" - volumes: - - "/var/run/docker.sock:/var/run/docker.sock:ro" - - "./certs/:/certs/:ro" - - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" + +# Network definition networks: ch: name: ch_network From 3868b56b476c97d0b3f2d84ea44c96f12339341a Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 16 Feb 2024 07:04:48 -0600 Subject: [PATCH 031/210] CH-100 Refactor templates for the db --- .../compose/templates/auto-compose.yaml | 35 ++----------------- .../templates/auto-database-postgres.yaml | 10 +++--- .../compose/templates/auto-database.yaml | 32 +++++++++++++++++ 3 files changed, 39 insertions(+), 38 deletions(-) create mode 100644 deployment-configuration/compose/templates/auto-database.yaml diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index b51aa02e..0ac27f32 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -25,7 +25,7 @@ services: {{- range $app_name, $app_config := .Values.apps }} {{ $deployment := $app_config.harness.deployment }} {{- if eq $app_name "nfsserver" }} - {{- include "events.deployment" $.Values}} + {{- include "events.deployment" $.Values | indent 2 }} {{- end }} {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }} {{- continue }} @@ -113,41 +113,10 @@ services: {{- if not .auto }} {{- continue}} {{- end }} - # Database for {{ $app_name }}, type {{ .type }} named {{ .name }} - {{ .name }}: - {{- $db_infos := (get . .type) }} - networks: - ch: - image: {{ $db_infos.image }} - expose: - {{- range $port := $db_infos.ports }} - - {{ $port.port | quote }} - {{- end }} - {{- with .resources }} - deploy: - resources: - limits: - cpus: {{ .limits.cpu | default "1000m" }} - memory: {{ trimSuffix "i" .limits.memory | default "2G" }} - reservations: - cpus: {{ .requests.cpu | default "100m" }} - memory: {{ trimSuffix "i" .requests.memory | default "512M" }} - {{- end }} - volumes: - - type: volume - source: {{ .name }} - target: /data/db - {{- if eq .type "postgres" }} - - type: volume - source: dshm - target: /dev/shm - {{- include "deploy_utils.database.postgres" . }} - {{- end }} + {{- include "db.deployment" . | indent 2}} {{- end}} - {{- end }} - # Network definition networks: ch: diff --git a/deployment-configuration/compose/templates/auto-database-postgres.yaml b/deployment-configuration/compose/templates/auto-database-postgres.yaml index d832193f..b6db420c 100644 --- a/deployment-configuration/compose/templates/auto-database-postgres.yaml +++ b/deployment-configuration/compose/templates/auto-database-postgres.yaml @@ -1,7 +1,7 @@ {{- define "deploy_utils.database.postgres" }} - environment: - - POSTGRES_DB={{ .postgres.initialdb | quote }} - - POSTGRES_USER={{ .user | quote }} - - POSTGRES_PASSWORD={{ .pass | quote }} - - PGDATA=/data/db/pgdata + environment: + - POSTGRES_DB={{ .postgres.initialdb }} + - POSTGRES_USER={{ .user }} + - POSTGRES_PASSWORD={{ .pass }} + - PGDATA=/data/db/pgdata {{- end }} \ No newline at end of file diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml new file mode 100644 index 00000000..70bda63a --- /dev/null +++ b/deployment-configuration/compose/templates/auto-database.yaml @@ -0,0 +1,32 @@ +{{- define "db.deployment" }} +# Database type {{ .type }} named {{ .name }} +{{ .name }}: + {{- $db_infos := (get . .type) }} + networks: + ch: + image: {{ $db_infos.image }} + expose: + {{- range $port := $db_infos.ports }} + - {{ $port.port | quote }} + {{- end }} + {{- with .resources }} + deploy: + resources: + limits: + cpus: {{ .limits.cpu | default "1000m" }} + memory: {{ trimSuffix "i" .limits.memory | default "2G" }} + reservations: + cpus: {{ .requests.cpu | default "100m" }} + memory: {{ trimSuffix "i" .requests.memory | default "512M" }} + {{- end }} + volumes: + - type: volume + source: {{ .name }} + target: /data/db + {{- if eq .type "postgres" }} + - type: volume + source: dshm + target: /dev/shm + {{- include "deploy_utils.database.postgres" . }} + {{- end }} +{{- end }} \ No newline at end of file From a0476d6cd29401a486e96d8654490e784bd0bcac Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 16 Feb 2024 07:13:09 -0600 Subject: [PATCH 032/210] CH-100 Add /etc/hosts generation --- .../compose/templates/auto-compose.yaml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 0ac27f32..04db3327 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -104,11 +104,22 @@ services: {{- with $app_config.harness.resources }} {{- range .}} - type: bind - source: compose/resources/{{ $app_name }}/{{.src }} + source: compose/resources/{{ $app_name }}/{{ .src }} target: {{ .dst }} {{- end }} {{- end}} {{- end }} + {{- if $.Values.local }} + {{- $domain := $.Values.domain }} + {{- $ip := $.Values.localIp }} + extra_hosts: + - "{{ $.Values.domain }}:{{ $ip }}" + {{- range $app := $.Values.apps }} + {{- with $app.harness.subdomain}} + - "{{ . }}.{{ $domain }}:{{ $ip }}" + {{- end }} + {{- end }} + {{- end }} {{- with $app_config.harness.database }} {{- if not .auto }} {{- continue}} From bd3e423a08b34ece388d2cf175b9b6b270d4747a Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 16 Feb 2024 07:27:20 -0600 Subject: [PATCH 033/210] CH-100 Add specific varenvs by container --- .../compose/templates/auto-compose.yaml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 04db3327..e05934b9 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -74,9 +74,13 @@ services: {{- range $.Values.env }} - {{ .name }}={{ .value }} {{- end }} - {{- /*{{- range $.Values.env }} - - {{ .name }}={{ .value }} - {{- end }} */}} + {{- with $.Values.apps.accounts }} + - CH_ACCOUNTS_CLIENT_SECRET={{ .client.secret }} + - CH_ACCOUNTS_REALM={{ $.Values.namespace }} + - CH_ACCOUNTS_AUTH_DOMAIN={{ printf "%s.%s" .harness.subdomain $.Values.domain }} + - CH_ACCOUNTS_CLIENT_ID={{ .client.id }} + - DOMAIN={{ $.Values.domain }} + {{- end}} {{- range $app_config.harness.env }} - {{ .name }}={{ .value }} {{- end }} @@ -110,6 +114,7 @@ services: {{- end}} {{- end }} {{- if $.Values.local }} + # Extra /etc/hosts list {{- $domain := $.Values.domain }} {{- $ip := $.Values.localIp }} extra_hosts: From 8851c48ef3777aef08144bcda1e309680cc878ca Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 20 Feb 2024 11:06:17 -0600 Subject: [PATCH 034/210] CH-100 Rename some templates --- .../nfsserver-deployment.yaml} | 5 +- .../compose/templates/auto-compose.yaml | 24 +++++----- .../ch_cli_tools/dockercompose.py | 46 ++++++++++++++++++- 3 files changed, 60 insertions(+), 15 deletions(-) rename applications/{events/deploy/templates-compose/events-deployment.yaml => nfsserver/deploy/templates-compose/nfsserver-deployment.yaml} (74%) diff --git a/applications/events/deploy/templates-compose/events-deployment.yaml b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml similarity index 74% rename from applications/events/deploy/templates-compose/events-deployment.yaml rename to applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml index f16e7e38..2dd8509b 100644 --- a/applications/events/deploy/templates-compose/events-deployment.yaml +++ b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml @@ -1,7 +1,10 @@ -{{- define "events.deployment" }} +{{- define "nfsserver.deployment" }} {{- $nfs := .apps.nfsserver}} {{ $nfs.name }}: + build: + context: {{ $nfs.build.context }} + dockerfile: {{ $nfs.build.dockerfile }} image: {{ $nfs.harness.deployment.image }} environment: # NFS useDNS? {{ $nfs.nfs.useDNS }} diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index e05934b9..f174d943 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -25,7 +25,7 @@ services: {{- range $app_name, $app_config := .Values.apps }} {{ $deployment := $app_config.harness.deployment }} {{- if eq $app_name "nfsserver" }} - {{- include "events.deployment" $.Values | indent 2 }} + {{- include "nfsserver.deployment" $.Values | indent 2 }} {{- end }} {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }} {{- continue }} @@ -114,16 +114,16 @@ services: {{- end}} {{- end }} {{- if $.Values.local }} - # Extra /etc/hosts list - {{- $domain := $.Values.domain }} - {{- $ip := $.Values.localIp }} - extra_hosts: - - "{{ $.Values.domain }}:{{ $ip }}" - {{- range $app := $.Values.apps }} - {{- with $app.harness.subdomain}} - - "{{ . }}.{{ $domain }}:{{ $ip }}" - {{- end }} - {{- end }} + # Extra /etc/hosts list + {{- $domain := $.Values.domain }} + {{- $ip := $.Values.localIp }} + extra_hosts: + - "{{ $.Values.domain }}={{ $ip }}" + {{- range $app := $.Values.apps }} + {{- with $app.harness.subdomain}} + - "{{ . }}.{{ $domain }}={{ $ip }}" + {{- end }} + {{- end }} {{- end }} {{- with $app_config.harness.database }} {{- if not .auto }} @@ -149,4 +149,4 @@ volumes: # this inclusion needs to be conditional dshm: {{- end }} {{- end }} -{{- end }} \ No newline at end of file +{{- end }} diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index dfe0bf5b..6563eac0 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -29,6 +29,7 @@ KEY_DEPLOYMENT = 'deployment' KEY_APPS = 'apps' KEY_TASK_IMAGES = 'task-images' +# KEY_TASK_IMAGES_BUILD = f"{KEY_TASK_IMAGES}-build" KEY_TEST_IMAGES = 'test-images' DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') @@ -180,6 +181,30 @@ def collect_app_values(self, app_base_path, base_image_name=None): app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) + # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None) + # # for dockerfile_path in app_path.rglob('**/Dockerfile'): + # # parent_name = dockerfile_path.parent.name.replace("-", "_") + # # if parent_name == app_key: + # # app_values['build'] = { + # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", + # # 'dockerfile': "Dockerfile", + # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # } + # # elif "tasks/" in f"{dockerfile_path}": + # # parent_name = parent_name.upper() + # # values.setdefault("task-images-build", {})[parent_name] = { + # # 'dockerfile': "Dockerfile", + # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # } + # # import ipdb; ipdb.set_trace() # fmt: skip + + # if dockerfile_path: + # app_values['build'] = { + # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", + # 'dockerfile': "Dockerfile", + # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # } + values[app_key] = dict_merge( values[app_key], app_values) if app_key in values else app_values @@ -201,10 +226,15 @@ def __assign_static_build_dependencies(self, helm_values): for dep in dependencies: if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] + # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = { + # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent), + # 'dockerfile': 'Dockerfile', + # } for image_name in helm_values[KEY_TASK_IMAGES].keys(): if image_name in self.exclude: del helm_values[KEY_TASK_IMAGES][image_name] + # del helm_values[KEY_TASK_IMAGES_BUILD][image_name] def __init_base_images(self, base_image_name): @@ -501,8 +531,20 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): task_path, app_path.parent)) img_name = image_name_from_dockerfile_path(task_name, base_image_name) - values[KEY_TASK_IMAGES][task_name] = self.image_tag( - img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) + # import ipdb; ipdb.set_trace() # fmt: skip + + # values[KEY_TASK_IMAGES][task_name] = self.image_tag( + # img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) + # values.setdefault(KEY_TASK_IMAGES_BUILD, {})[task_name] = { + # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent), + # 'dockerfile': 'Dockerfile', + # } + + values[KEY_TASK_IMAGES][task_name] = { + 'name': self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()), + # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent), + # 'dockerfile': 'Dockerfile', + } return values From ced7cd2b189c2e40ab3f77e95fb3437eb5fa909a Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 20 Feb 2024 11:28:00 -0600 Subject: [PATCH 035/210] CH-100 Add first traeffik configuration --- .../templates-compose/nfsserver-deployment.yaml | 3 --- .../compose/templates/auto-compose.yaml | 9 +++++++++ .../ch_cli_tools/dockercompose.py | 12 +++++++----- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml index 2dd8509b..7e9b6819 100644 --- a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml +++ b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml @@ -2,9 +2,6 @@ {{- $nfs := .apps.nfsserver}} {{ $nfs.name }}: - build: - context: {{ $nfs.build.context }} - dockerfile: {{ $nfs.build.dockerfile }} image: {{ $nfs.harness.deployment.image }} environment: # NFS useDNS? {{ $nfs.nfs.useDNS }} diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index f174d943..11996c91 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -125,6 +125,15 @@ services: {{- end }} {{- end }} {{- end }} + labels: + - "traefik.enable=true" + {{- with $app_config.harness.service.port }} + - "traefik.http.services.{{ $app_name }}.loadbalancer.server.port={{ . }}" + {{- end }} + # - "traefik.http.middlewares.redirect-middleware.redirectscheme.scheme=https" + # - "traefik.http.routers.{{ .app_name }}.middlewares=redirect-middleware" + - "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)" + - "traefik.http.routers.{{ $app_name }}.entrypoints=web" {{- with $app_config.harness.database }} {{- if not .auto }} {{- continue}} diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 6563eac0..83e04024 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -540,11 +540,13 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): # 'dockerfile': 'Dockerfile', # } - values[KEY_TASK_IMAGES][task_name] = { - 'name': self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()), - # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent), - # 'dockerfile': 'Dockerfile', - } + # values[KEY_TASK_IMAGES][task_name] = { + # 'name': self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()), + # # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent), + # # 'dockerfile': 'Dockerfile', + # } + + values[KEY_TASK_IMAGES][task_name] = self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) return values From f7ebeb03606302e9ffc56bf010ad49ad36cf428b Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 20 Feb 2024 11:40:27 -0600 Subject: [PATCH 036/210] CH-100 Fix bad indentation in docker-compose.yaml --- deployment-configuration/compose/templates/auto-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 11996c91..b175f2b5 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -125,7 +125,7 @@ services: {{- end }} {{- end }} {{- end }} - labels: + labels: - "traefik.enable=true" {{- with $app_config.harness.service.port }} - "traefik.http.services.{{ $app_name }}.loadbalancer.server.port={{ . }}" From 8fdcba6e840340b4a347eb6ec00c61c0547ee645 Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 20 Feb 2024 11:41:41 -0600 Subject: [PATCH 037/210] CH-100 Remove exposition of ports --- .../compose/templates/auto-compose.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index b175f2b5..f25f558e 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -46,10 +46,10 @@ services: image: {{ . }} {{- end }} {{- if eq $.Values.mainapp $app_name }} - {{- with $app_config.harness.service.port }} - ports: - - "{{ . }}:{{ $app_config.harness.deployment.port }}" - {{- end }} + # {{- with $app_config.harness.service.port }} + # ports: + # - "{{ . }}:{{ $app_config.harness.deployment.port }}" + # {{- end }} {{- end }} {{- with $app_config.harness.deployment.port }} expose: From 428a6191b0ec0fd5a09a59082b2d376785967328 Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 21 Feb 2024 06:48:39 -0600 Subject: [PATCH 038/210] CH-100 Add post-process mechanism to generate files --- .../ch_cli_tools/dockercompose.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 83e04024..c088dc44 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -155,6 +155,32 @@ def generate_docker_compose_yaml(self): subprocess.call(command, shell=True) + self.__post_process_multiple_document_docker_compose(dest_compose_yaml) + + def __post_process_multiple_document_docker_compose(self, yaml_document): + if not yaml_document.exists(): + logging.warning("Something went wrong during the docker-compose.yaml generation, cannot post-process it") + return + + with open(yaml_document, "r") as f: + documents = yaml.safe_load_all(f) + + for document in documents: + if "cloudharness-metadata" in document: + document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"] + logging.info("Post-process docker-compose.yaml, creating %s", document_path) + document_path.write_text(document["data"]) + else: + with open(yaml_document, "w") as f: + yaml.dump(document, f) + + # cloudharness-metadata: + # path: secrets.yaml + + # data: | + # sdfmsldkf + # sdfmlskdfmslkdfs + # sdmlksdf def __process_applications(self, helm_values, base_image_name): for root_path in self.root_paths: From 9d6c8b2f015c5269cb44dd27c9ae96038a39b46e Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 21 Feb 2024 08:12:00 -0600 Subject: [PATCH 039/210] CH-100 Add new templates --- .../deploy/templates-compose/deployments.yaml | 97 +++++++++++++++++++ .../compose/templates/allvalues-template.yaml | 20 ++++ 2 files changed, 117 insertions(+) create mode 100644 applications/events/deploy/templates-compose/deployments.yaml create mode 100644 deployment-configuration/compose/templates/allvalues-template.yaml diff --git a/applications/events/deploy/templates-compose/deployments.yaml b/applications/events/deploy/templates-compose/deployments.yaml new file mode 100644 index 00000000..0001fde4 --- /dev/null +++ b/applications/events/deploy/templates-compose/deployments.yaml @@ -0,0 +1,97 @@ +{{- define "events.deployment" }} +events: + networks: + - ch + image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d + ports: + - "9094:9092" + expose: + - 5555 + - 9094 + - 9092 + environment: + - CLASSPATH=/opt/kafka/libs/extensions/* + - KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/etc/kafka/log4j.properties + - JMX_PORT=5555 + command: + - "./bin/kafka-server-start.sh" + - "/etc/kafka/server.properties" + - "--override" + - "default.replication.factor=1" + - "--override" + - "min.insync.replicas=1" + - "--override" + - "offsets.topic.replication.factor=1" + - "--override" + - "offsets.topic.num.partitions=1" + depends_on: + events-kafka-init: + condition: service_completed_successfully + +events-kafka-init: + networks: + - ch + image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef + command: + - "/bin/bash" + - "/etc/kafka-configmap/init.sh" + environment: + +pzoo: + networks: + - ch + expose: + - 2181 + - 2888 + - 3888 + image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d + command: + - "./bin/zookeeper-server-start.sh" + - "/etc/kafka/zookeeper.properties" + environment: + - KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/etc/kafka/log4j.properties + depends_on: + events-pzoo-init: + condition: service_completed_successfully + +events-pzoo-init: + networks: + - ch + expose: + image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef + command: + - "/bin/bash" + - "/etc/kafka-configmap/init.sh" + environment: + - PZOO_REPLICAS=1 + - ZOO_REPLICAS=0 + +zoo: + networks: + - ch + expose: + - 2181 + - 2888 + - 3888 + image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d + command: + - "./bin/zookeeper-server-start.sh" + - "/etc/kafka/zookeeper.properties" + environment: + - KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/etc/kafka/log4j.properties + depends_on: + events-zoo-init: + condition: service_completed_successfully + +events-zoo-init: + networks: + - ch + image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef + command: + - "/bin/bash" + - "/etc/kafka-configmap/init.sh" + environment: + - PZOO_REPLICAS=1 + - ZOO_REPLICAS=0 + - ID_OFFSET=2 +{{- end }} \ No newline at end of file diff --git a/deployment-configuration/compose/templates/allvalues-template.yaml b/deployment-configuration/compose/templates/allvalues-template.yaml new file mode 100644 index 00000000..d69538aa --- /dev/null +++ b/deployment-configuration/compose/templates/allvalues-template.yaml @@ -0,0 +1,20 @@ +{{- /* +to replace the secrets values we create a dict with the structure: + app: + : + harness: + secrets: + +thus with an empty secrets node +and then it's mergeOverwrite the copy of the .Values we created +resulting in a copy of the .Values with all secrets being "" +*/ -}} +cloudharness-metadata: + path: allvalues2.yaml +data: | +{{- $values_copy := deepCopy .Values }} +{{- range $key, $val := .Values.apps }} + {{- $new_secrets := dict "apps" (dict $key (dict "harness" (dict "secrets"))) }} + {{- $tmp := mergeOverwrite $values_copy $new_secrets }} +{{- end }} +{{ $values_copy | toYaml | indent 4 }} From ae7cf1918e0d2d370eabdf2ceb5b7c1db3de43cf Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 21 Feb 2024 10:33:20 -0600 Subject: [PATCH 040/210] CH-100 Add new templates (not modified yet) --- .../compose/templates/auto-gatekeepers.yaml | 174 ++++++++++++++++++ .../compose/templates/auto-secrets.yaml | 50 +++++ .../ch_cli_tools/dockercompose.py | 19 +- 3 files changed, 233 insertions(+), 10 deletions(-) create mode 100644 deployment-configuration/compose/templates/auto-gatekeepers.yaml create mode 100644 deployment-configuration/compose/templates/auto-secrets.yaml diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml new file mode 100644 index 00000000..898995cd --- /dev/null +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -0,0 +1,174 @@ +{{/* Secured Services/Deployments */}} +{{- define "deploy_utils.securedservice" }} +{{- $tls := not (not .root.Values.tls) }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: "{{ .app.harness.service.name }}-gk" + labels: + app: "{{ .app.harness.service.name }}-gk" +data: + proxy.yml: |- + verbose: {{ .root.Values.debug }} + discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }} + client-id: {{ .root.Values.apps.accounts.webclient.id | quote }} + client-secret: {{ .root.Values.apps.accounts.webclient.secret }} + secure-cookie: {{ $tls }} + forbidden-page: /templates/access-denied.html.tmpl + enable-default-deny: {{ eq (.app.harness.secured | toString) "true" }} + listen: 0.0.0.0:8080 + enable-refresh-tokens: true + server-write-timeout: {{ .app.harness.proxy.timeout.send | default .root.Values.proxy.timeout.send | default 180 }}s + upstream-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s + upstream-response-header-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s + upstream-expect-continue-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s + server-read-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s + upstream-keepalive-timeout: {{ .app.harness.proxy.timeout.keepalive | default .root.Values.proxy.timeout.keepalive | default 180 }}s + http-only-cookie: false + tls-cert: + tls-private-key: + redirection-url: {{ ternary "https" "http" $tls }}://{{ .app.harness.subdomain }}.{{ .root.Values.domain }} + encryption-key: AgXa7xRcoClDEU0ZDSH4X0XhL5Qy2Z2j + upstream-url: http://{{ .app.harness.service.name }}.{{ .app.namespace | default .root.Release.Namespace }}:{{ .app.harness.service.port | default 80}} + {{ if .app.harness.secured }} + {{ with .app.harness.uri_role_mapping }} + resources: + {{. | toYaml | nindent 4 }} + {{- end }} + {{- end }} + {{ if or .root.Values.local (not $tls) }} + skip-openid-provider-tls-verify: true + skip-upstream-tls-verify: true + {{- end }} + cacert.crt: {{ .files.Get "resources/certs/cacert.crt" | quote }} + access-denied.html.tmpl: |- + + + + + 403 - Access Forbidden + + + + + + +
+
+
+
+

Oops!

+

403 Permission Denied

+
+ Sorry, you do not have access to this page, please contact your administrator. + If you have been assigned new authorizations, try to refresh the page or to login again. +
+
+
+
+
+ + + +--- +apiVersion: v1 +kind: Service +metadata: + name: "{{ .app.harness.service.name }}-gk" + labels: + app: "{{ .app.harness.service.name }}-gk" +spec: + ports: + - name: http + port: 8080 + selector: + app: "{{ .app.harness.service.name }}-gk" + type: ClusterIP +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "{{ .app.harness.service.name }}-gk" + labels: + app: "{{ .app.harness.service.name }}-gk" + +spec: + replicas: 1 + selector: + matchLabels: + app: "{{ .app.harness.service.name }}-gk" + template: + metadata: + annotations: + checksum/config: {{ .app.harness.uri_role_mapping | toString | sha256sum }} + labels: + app: "{{ .app.harness.service.name }}-gk" + spec: +{{ include "deploy_utils.etcHosts" .root | indent 6 }} + containers: + - name: {{ .app.harness.service.name | quote }} + image: "quay.io/gogatekeeper/gatekeeper:1.3.8" + imagePullPolicy: IfNotPresent + {{ if .root.Values.local }} + securityContext: + allowPrivilegeEscalation: false + runAsUser: 0 + {{- end }} + env: + - name: PROXY_CONFIG_FILE + value: /opt/proxy.yml + volumeMounts: + - name: "{{ .app.harness.service.name }}-gk-proxy-config" + mountPath: /opt/proxy.yml + subPath: proxy.yml + - name: "{{ .app.harness.service.name }}-gk-proxy-config" + mountPath: /etc/pki/ca-trust/source/anchors/cacert.crt + subPath: cacert.crt + - name: "{{ .app.harness.service.name }}-gk-proxy-config" + mountPath: /templates/access-denied.html.tmpl + subPath: access-denied.html.tmpl + ports: + - name: http + containerPort: 8080 + - name: https + containerPort: 8443 + resources: + requests: + memory: "32Mi" + cpu: "50m" + limits: + memory: "64Mi" + cpu: "100m" + volumes: + - name: "{{ .app.harness.service.name }}-gk-proxy-config" + configMap: + name: "{{ .app.harness.service.name }}-gk" +--- +{{- end }} +{{- if .Values.secured_gatekeepers }} +{{ $files := .Files }} +{{- range $app := .Values.apps }} + {{- if and (hasKey $app "port") ($app.harness.secured) }} +--- + {{ include "deploy_utils.securedservice" (dict "root" $ "app" $app "files" $files) }} + {{- end }} + {{- range $subapp := $app }} + {{- if contains "map" (typeOf $subapp) }} + {{- if and (hasKey $subapp "harness.port") (hasKey $subapp "harness.secured") }} + {{- if $subapp.harness.secured }} +--- + {{ include "deploy_utils.securedservice" (dict "root" $ "app" $subapp "files" $files) }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} diff --git a/deployment-configuration/compose/templates/auto-secrets.yaml b/deployment-configuration/compose/templates/auto-secrets.yaml new file mode 100644 index 00000000..a0a37a2f --- /dev/null +++ b/deployment-configuration/compose/templates/auto-secrets.yaml @@ -0,0 +1,50 @@ +{{- define "deploy_utils.secret" }} +{{- if .app.harness.secrets }} +{{- $secret_name := printf "%s" .app.harness.deployment.name }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ $secret_name }} + namespace: {{ .root.Values.namespace }} + labels: + app: {{ .app.harness.deployment.name }} +type: Opaque + {{- $secret := (lookup "v1" "Secret" .root.Values.namespace $secret_name) }} + {{- if $secret }} +# secret already exists + {{- if not (compact (values .app.harness.secrets)) }} +# secret values are null, copy from the existing secret +data: + {{- range $k, $v := $secret.data }} + {{ $k }}: {{ $v }} + {{- end }} + {{- else }} +# there are non default values in values.yaml, use these +stringData: + {{- range $k, $v := .app.harness.secrets }} + {{ $k }}: {{ $v | default (randAlphaNum 20) }} + {{- end }} + {{- end }} + {{- else }} +# secret doesn't exist +stringData: + {{- range $k, $v := .app.harness.secrets }} + {{ $k }}: {{ $v | default (randAlphaNum 20) }} + {{- end }} + {{- end }} +{{- end }} +--- +{{- end }} +--- +{{- range $app := .Values.apps }} +--- + {{- include "deploy_utils.secret" (dict "root" $ "app" $app) }} + {{- range $subapp := $app }} + {{- if contains "map" (typeOf $subapp) }} + {{- if hasKey $subapp "harness" }} +--- + {{- include "deploy_utils.secret" (dict "root" $ "app" $subapp) }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index c088dc44..d5d51a6f 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -166,21 +166,20 @@ def __post_process_multiple_document_docker_compose(self, yaml_document): documents = yaml.safe_load_all(f) for document in documents: + if not document: + continue if "cloudharness-metadata" in document: document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"] logging.info("Post-process docker-compose.yaml, creating %s", document_path) - document_path.write_text(document["data"]) + data = document["data"] + # if document_path.suffix == ".yaml": + # with open(document_path, "w") as f: + # yaml.dump(yaml.safe_load(data), f, default_flow_style=True) + # else: + document_path.write_text(data) else: with open(yaml_document, "w") as f: - yaml.dump(document, f) - - # cloudharness-metadata: - # path: secrets.yaml - - # data: | - # sdfmsldkf - # sdfmlskdfmslkdfs - # sdmlksdf + yaml.dump(document, f, default_flow_style=False) def __process_applications(self, helm_values, base_image_name): for root_path in self.root_paths: From 7acc7f5eade36af4f859f8b6686444b0d9ed65d6 Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 22 Feb 2024 08:21:00 -0600 Subject: [PATCH 041/210] CH-100 Add generation of resources files --- .../compose/templates/auto-compose.yaml | 2 +- .../compose/templates/auto-resources.yaml | 18 ++++++++++++++++++ .../compose/templates/auto-secrets.yaml | 3 ++- .../ch_cli_tools/dockercompose.py | 12 ++++++++++-- 4 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 deployment-configuration/compose/templates/auto-resources.yaml diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index f25f558e..9a0f4fd9 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -108,7 +108,7 @@ services: {{- with $app_config.harness.resources }} {{- range .}} - type: bind - source: compose/resources/{{ $app_name }}/{{ .src }} + source: compose/resources/generated/{{ $app_name }}/{{ .src }} target: {{ .dst }} {{- end }} {{- end}} diff --git a/deployment-configuration/compose/templates/auto-resources.yaml b/deployment-configuration/compose/templates/auto-resources.yaml new file mode 100644 index 00000000..8d1e1e73 --- /dev/null +++ b/deployment-configuration/compose/templates/auto-resources.yaml @@ -0,0 +1,18 @@ +{{- define "deploy_utils.resource"}} +{{ $service_name := .app.harness.deployment.name }} +--- +# {{ $service_name }}-{{ .resource.name }} +cloudharness-metadata: + path: resources/generated/{{ $service_name }}/{{ base .resource.src }} +data: | +{{ tpl (.root.Files.Get (print "resources/" $service_name "/" .resource.src)) .root | trim | indent 2 }} +{{- end}} + +{{- range $app := .Values.apps }} + {{- if and (hasKey $app "port") $app.harness.deployment.auto | default false }} +--- + {{- range $resource := $app.harness.resources }} + {{- include "deploy_utils.resource" (dict "app" $app "resource" $resource "root" $) }} + {{- end }} + {{- end }} + {{- end }} \ No newline at end of file diff --git a/deployment-configuration/compose/templates/auto-secrets.yaml b/deployment-configuration/compose/templates/auto-secrets.yaml index a0a37a2f..9635d33a 100644 --- a/deployment-configuration/compose/templates/auto-secrets.yaml +++ b/deployment-configuration/compose/templates/auto-secrets.yaml @@ -1,5 +1,6 @@ {{- define "deploy_utils.secret" }} -{{- if .app.harness.secrets }} +{{- if and .app.harness.secrets false }} {{/* TODO */}} + {{- $secret_name := printf "%s" .app.harness.deployment.name }} apiVersion: v1 kind: Secret diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index d5d51a6f..c28eb2b7 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -165,21 +165,29 @@ def __post_process_multiple_document_docker_compose(self, yaml_document): with open(yaml_document, "r") as f: documents = yaml.safe_load_all(f) + main_document = None for document in documents: if not document: continue if "cloudharness-metadata" in document: document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"] logging.info("Post-process docker-compose.yaml, creating %s", document_path) + document_path.parent.mkdir(parents=True, exist_ok=True) data = document["data"] # if document_path.suffix == ".yaml": # with open(document_path, "w") as f: # yaml.dump(yaml.safe_load(data), f, default_flow_style=True) # else: + document_path.write_text(data) else: - with open(yaml_document, "w") as f: - yaml.dump(document, f, default_flow_style=False) + # We need to save the main document later + # "safe_load_all" returns a generator over the file, + # so if we modify it while looping on "documents" + # the output will be affected (probably truncated for some outputs) + main_document = document # we need to save the main document later, + with open(yaml_document, "w") as f: + yaml.dump(main_document, f, default_flow_style=False) def __process_applications(self, helm_values, base_image_name): for root_path in self.root_paths: From 571c2ab494bd2207f500ae78a5738f77ec49b719 Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 22 Feb 2024 09:12:20 -0600 Subject: [PATCH 042/210] CH-100 Add dependency between service and db --- deployment-configuration/compose/templates/auto-compose.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 9a0f4fd9..abfdb2c6 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -135,6 +135,8 @@ services: - "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)" - "traefik.http.routers.{{ $app_name }}.entrypoints=web" {{- with $app_config.harness.database }} + depends_on: + - {{ .name }} {{- if not .auto }} {{- continue}} {{- end }} From 9c8c19ee7b7b79080e1ceb5c87979c363855f5ba Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 23 Feb 2024 06:00:47 -0600 Subject: [PATCH 043/210] CH-100 Add secret handling --- .../compose/templates/auto-compose.yaml | 3 ++ .../compose/templates/auto-secrets.yaml | 32 +++++++++---------- deployment/sc.yaml | 7 ---- 3 files changed, 18 insertions(+), 24 deletions(-) delete mode 100644 deployment/sc.yaml diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index abfdb2c6..120b8fa9 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -99,6 +99,9 @@ services: {{- end }} volumes: - ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro + {{- range $file_name, $_ := $app_config.harness.secrets }} + - ./compose/resources/generated/auth/{{ $file_name }}:/opt/cloudharness/resources/auth/{{ $file_name }} + {{- end }} {{- if or $deployment.volume $app_config.harness.resources }} {{- with $deployment.volume }} - type: volume diff --git a/deployment-configuration/compose/templates/auto-secrets.yaml b/deployment-configuration/compose/templates/auto-secrets.yaml index 9635d33a..ed9345d2 100644 --- a/deployment-configuration/compose/templates/auto-secrets.yaml +++ b/deployment-configuration/compose/templates/auto-secrets.yaml @@ -1,49 +1,47 @@ {{- define "deploy_utils.secret" }} -{{- if and .app.harness.secrets false }} {{/* TODO */}} +{{- if .app.harness.secrets }} {{- $secret_name := printf "%s" .app.harness.deployment.name }} -apiVersion: v1 -kind: Secret -metadata: - name: {{ $secret_name }} - namespace: {{ .root.Values.namespace }} - labels: - app: {{ .app.harness.deployment.name }} -type: Opaque {{- $secret := (lookup "v1" "Secret" .root.Values.namespace $secret_name) }} {{- if $secret }} # secret already exists {{- if not (compact (values .app.harness.secrets)) }} # secret values are null, copy from the existing secret -data: {{- range $k, $v := $secret.data }} - {{ $k }}: {{ $v }} +cloudharness-metadata: + path: resources/generated/auth/{{ $k }} + +data: {{ $v }} +--- {{- end }} {{- else }} # there are non default values in values.yaml, use these stringData: {{- range $k, $v := .app.harness.secrets }} - {{ $k }}: {{ $v | default (randAlphaNum 20) }} +cloudharness-metadata: + path: resources/generated/auth/{{ $k }} + +data: {{ $v | default (randAlphaNum 20) }} +--- {{- end }} {{- end }} {{- else }} # secret doesn't exist stringData: {{- range $k, $v := .app.harness.secrets }} - {{ $k }}: {{ $v | default (randAlphaNum 20) }} +cloudharness-metadata: + path: resources/generated/auth/{{ $k }} +data: {{ $v | default (randAlphaNum 20) }} +--- {{- end }} {{- end }} {{- end }} ---- {{- end }} ---- {{- range $app := .Values.apps }} ---- {{- include "deploy_utils.secret" (dict "root" $ "app" $app) }} {{- range $subapp := $app }} {{- if contains "map" (typeOf $subapp) }} {{- if hasKey $subapp "harness" }} ---- {{- include "deploy_utils.secret" (dict "root" $ "app" $subapp) }} {{- end }} {{- end }} diff --git a/deployment/sc.yaml b/deployment/sc.yaml deleted file mode 100644 index 69c99d99..00000000 --- a/deployment/sc.yaml +++ /dev/null @@ -1,7 +0,0 @@ -apiVersion: storage.k8s.io/v1 -kind: StorageClass -metadata: - name: standard -provisioner: docker.io/hostpath -reclaimPolicy: Delete -volumeBindingMode: Immediate \ No newline at end of file From f6d0c6763b2378ff5cfe331d019dfa735bf61f2a Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 23 Feb 2024 06:25:39 -0600 Subject: [PATCH 044/210] CH-100 Remove argo from dependencies --- .../compose/templates/auto-compose.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 120b8fa9..b28fbc88 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -91,7 +91,10 @@ services: - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} {{- end }} {{- end }} - {{- with $app_config.harness.dependencies.hard }} + {{/* Takes the hard deps, removes argo and adds the db if there is one */}} + {{/* To be sure to add the db properly, we "dig" the "harness" config for "database.name" and return "" if one of the keys doesn't exist */}} + {{/* "compact" in the beginning is to remove empty values */}} + {{- with compact (append (without $app_config.harness.dependencies.hard "argo") (dig "database" "name" "" $app_config.harness) ) }} depends_on: {{- range . }} - {{ . }} @@ -138,8 +141,6 @@ services: - "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)" - "traefik.http.routers.{{ $app_name }}.entrypoints=web" {{- with $app_config.harness.database }} - depends_on: - - {{ .name }} {{- if not .auto }} {{- continue}} {{- end }} From 4fd6deeb480add6f99fbc30223d1af8353a9288a Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 23 Feb 2024 06:41:41 -0600 Subject: [PATCH 045/210] CH-100 Change from pyyaml to ruamel The dependency is already gathered from a third-party lib --- .../ch_cli_tools/dockercompose.py | 45 +++++++++---------- 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index c28eb2b7..bafe5a00 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -2,6 +2,7 @@ Utilities to create a helm chart from a CloudHarness directory structure """ import yaml +from ruamel.yaml import YAML import os import shutil import logging @@ -162,32 +163,26 @@ def __post_process_multiple_document_docker_compose(self, yaml_document): logging.warning("Something went wrong during the docker-compose.yaml generation, cannot post-process it") return - with open(yaml_document, "r") as f: - documents = yaml.safe_load_all(f) + yaml_handler = YAML() + documents = yaml_handler.load_all(yaml_document) - main_document = None - for document in documents: - if not document: - continue - if "cloudharness-metadata" in document: - document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"] - logging.info("Post-process docker-compose.yaml, creating %s", document_path) - document_path.parent.mkdir(parents=True, exist_ok=True) - data = document["data"] - # if document_path.suffix == ".yaml": - # with open(document_path, "w") as f: - # yaml.dump(yaml.safe_load(data), f, default_flow_style=True) - # else: - - document_path.write_text(data) - else: - # We need to save the main document later - # "safe_load_all" returns a generator over the file, - # so if we modify it while looping on "documents" - # the output will be affected (probably truncated for some outputs) - main_document = document # we need to save the main document later, - with open(yaml_document, "w") as f: - yaml.dump(main_document, f, default_flow_style=False) + main_document = None + for document in documents: + if not document: + continue + if "cloudharness-metadata" in document: + document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"] + logging.info("Post-process docker-compose.yaml, creating %s", document_path) + document_path.parent.mkdir(parents=True, exist_ok=True) + data = document["data"] + document_path.write_text(data) + else: + # We need to save the main document later + # "load_all" returns a generator over the file, + # so if we modify it while looping on "documents" + # the output will be affected (probably truncated for some outputs) + main_document = document # we need to save the main document later + yaml_handler.dump(main_document, yaml_document) def __process_applications(self, helm_values, base_image_name): for root_path in self.root_paths: From 86c0e4f51c4c4a56a4b59e148f1c1454675286ec Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 23 Feb 2024 06:42:24 -0600 Subject: [PATCH 046/210] CH-100 Remove tmp "events" from deps --- deployment-configuration/compose/templates/auto-compose.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index b28fbc88..8fc62e83 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -88,7 +88,11 @@ services: links: {{- range . }} {{- $service_name := (get $.Values.apps .).harness.service.name }} + {{- if eq . "events"}} +# - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} + {{- else }} - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} + {{- end }} {{- end }} {{- end }} {{/* Takes the hard deps, removes argo and adds the db if there is one */}} From a5909cea41d1af911c025c29711adb73fb299ed4 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 23 Feb 2024 06:46:25 -0600 Subject: [PATCH 047/210] CH-100 Add back sc.yaml --- deployment/sc.yaml | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 deployment/sc.yaml diff --git a/deployment/sc.yaml b/deployment/sc.yaml new file mode 100644 index 00000000..69c99d99 --- /dev/null +++ b/deployment/sc.yaml @@ -0,0 +1,7 @@ +apiVersion: storage.k8s.io/v1 +kind: StorageClass +metadata: + name: standard +provisioner: docker.io/hostpath +reclaimPolicy: Delete +volumeBindingMode: Immediate \ No newline at end of file From 4379252dd0bea6e6b4ad4d66f671e075fca598f9 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 23 Feb 2024 07:46:38 -0600 Subject: [PATCH 048/210] CH-100 Add first handling of NFS volume --- .../nfsserver-deployment.yaml | 18 ++++++++++++------ .../compose/templates/auto-compose.yaml | 7 +++++++ 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml index 7e9b6819..50dc08ed 100644 --- a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml +++ b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml @@ -1,13 +1,19 @@ {{- define "nfsserver.deployment" }} -{{- $nfs := .apps.nfsserver}} +{{- with .apps.nfsserver}} -{{ $nfs.name }}: - image: {{ $nfs.harness.deployment.image }} +{{ .name }}: + image: {{ .harness.deployment.image }} environment: - # NFS useDNS? {{ $nfs.nfs.useDNS }} - {{- if $nfs.nfs.useDNS }} + # NFS useDNS? {{ .nfs.useDNS }} + {{- if .nfs.useDNS }} - NFS_SERVER={{ printf "nfs-server.%s.svc.cluster.local" .namespace }} {{- end }} - - NFS_PATH={{ $nfs.nfs.path }} + - NFS_PATH={{ .nfs.path }} - PROVISIONER_NAME={{ printf "%s-nfs-provisioner" .namespace }} + + volumes: + - type: volume + source: {{ .nfs.volumeName }} + target: {{ .nfs.path }} +{{- end }} {{- end }} \ No newline at end of file diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 8fc62e83..a8ac40cf 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -168,4 +168,11 @@ volumes: # this inclusion needs to be conditional dshm: {{- end }} {{- end }} + {{- if eq $app_name "nfsserver" }} + {{ $app_config.nfs.volumeName }}: + # driver_opts: + # type: "nfs" + # o: "{{ join "," $app_config.nfs.mountOptions }}" + # device: ":{{ $app_config.nfs.path }}" + {{- end }} {{- end }} From 46ee1408a87baf8f4f387a45ba4173af77b6a0e4 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 26 Feb 2024 11:36:48 -0600 Subject: [PATCH 049/210] CH-100 Add volumes conditionnally --- .../compose/templates/auto-compose.yaml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index a8ac40cf..c2aad014 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -157,7 +157,20 @@ networks: ch: name: ch_network -volumes: # this inclusion needs to be conditional +{{- range $app_name, $app_config := .Values.apps }} + {{- with $app_config.harness.deployment.volume }} +volumes: + {{- break }} + {{- end }} + {{- with $app_config.harness.database }} +volumes: + {{- break }} + {{- end }} + {{- if eq $app_name "nfsserver" }} +volumes: + {{- break }} + {{- end }} +{{- end }} {{- range $app_name, $app_config := .Values.apps }} {{- with $app_config.harness.deployment.volume }} {{ .name }}: From e135c6a6dbef827854d742e188f4866703da2b00 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 26 Feb 2024 12:27:56 -0600 Subject: [PATCH 050/210] CH-100 Add first tutorial about docker compose --- ...clock-application-with-docker-compose.adoc | 328 ++++++++++++++++++ 1 file changed, 328 insertions(+) create mode 100644 docs/tutorials/simple-date-clock-application-with-docker-compose.adoc diff --git a/docs/tutorials/simple-date-clock-application-with-docker-compose.adoc b/docs/tutorials/simple-date-clock-application-with-docker-compose.adoc new file mode 100644 index 00000000..70e8f1e2 --- /dev/null +++ b/docs/tutorials/simple-date-clock-application-with-docker-compose.adoc @@ -0,0 +1,328 @@ +:repo_url: ../../../../../ +:ch: CloudHarness +:dc: Docker Compose +:dc-: docker compose +:repo_fragment: MetaCell/cloud-harness + + += A simple date-clock application: tutorial {ch} targeting {dc} + +In this small tutorial, we will see different aspects about the development of applications with {ch} through the development from scratch of a small webapp that fetches information from a server on a regular basis. +This tutorial will show you how to generate the `{dc-}` configuration and how to build and deploy this simple application. + +{ch} generates the initial files and folders for your project depending on some templates tacking different aspects of your app depending on your requirements, __e.g.__, for a webapp project, it generates the frontend initial files for ReactJS and the initial Flask files for the backend. +For the API part, {ch} relies on OpenAPI 3 to deal with the endpoints/model description. + +The different aspects that will be covered here are: + +* how to bootstrap a new app, build it, and deploy it on {dc}; +* how to modify/update the app, built it and run it again. + +== The tools you need to deploy/build your application + +The following tools, beside python, are not required to work with {ch}. +Before installing everything, please be sure you have the following tools installed on your machine: + +* `python` +* `yarn` +* `{ch}` -- if not installed, please check other documentation and tutorials +* `helm` -- to deal with the generation of the {dc} +* `skaffold` -- to build the different images that will run on {dc} +* `{dc-}` -- to actually run the built application + + +== Creating a very simple webapp + +Now that we know how to configure/run/deploy apps on our local cluster, we will create a very simple webapp. +In this first time, we will only generate the project's artifacts using the `harness-application`, then, we will build/run/deploy it. +In a second time, we will modify the API to add new endpoints and deal with the frontend accordingly. + +=== Creating a new webapp and building the frontend + +The webapp that we will create will be a useless webapp that will fetch the current date and time when a button is pressed. +Nothing fancy, just a way to see how to interact with the generated sources and get everything running on your local cluster. + +The first step is to generate the projects files. +In our case, we want to develop a webapp, meaning that we want a frontend and a backend. +We use `harness-application` to generate the first files with a specific templates: `webapp` and `flask-server`. +We first place ourself in the parent directory of where you cloned the `cloud-harness` repository. + +[NOTE] +We could place ourself anywhere, we would just have to remember the path towards the `cloud-harness` repository. + +.Generating the first project's file +[source,bash] +---- +harness-application clockdate -t webapp -t flask-server +---- + +The name of the application is `clockdate` and we use the `webapp` and `flask-server` template. +There is various existing templates with different purpose: for DB interaction, backend, frontend, ... + +We observe now that a new directory had been created in an `applications` folder named `clockdate`. +The folder is organized with many sub-folders, all playing a different role in the app. + +.Ensuring that the backend is considered as a webapp +We will now make a small modification, or ensure that the code of the backend includes its activation as "webapp". +Open the file generated in `clockdate/backend/clockdate/__main__.py` and check that the following line has the keyword parameter `webapp` set to `True`. + +[source,python] +---- +app = init_flask(title="clockdate", init_app_fn=None, webapp=True) +---- + +This option ensures the registration of some specific endpoints by {ch}. +In this case, it ensures that the `/` endpoint will be mapped to the `index.html` produced for the frontend. + +.Building the frontend + +In this tutorial, before generating the configuration files for {dc} by {ch}, we will build the frontend using `yarn`. +Enter in the `clockdate/frontend` folder and just type + +[source, bash] +---- +yarn install +---- + +This will generate a `yarn.lock` which is required later for the build of the Docker images. + +[NOTE] +This step could have been done later, but it has to be done *before* the build of the different Docker images using `skaffold`. + + +.Generating the `{dc-}` configuration files for our `clockdate` app +[source,bash] +---- +# run in the directory that contains the cloud-harness repository +harness-deployment cloud-harness . -u -dtls -l -d azathoth.local -e local -n azathoth -i clockdate --docker-compose +---- + +The key here is to add the `--docker-compose` option that will trigger the generation of a set of files in the `deployment` folder, +as well as a slightly modified version of the `skaffold.yaml` file. + +As a result, in the `deployment` folder, we should have something that looks like this: + +[source] +---- ++- CURRENT_DIRECTORY + [...] + + deployment/ + + compose/ -> the template files and some generated files dedicated to docker compose + `- docker-compose.yaml -> the main file used by {dc} to deploy everything + `- skaffold.yaml -> used by skaffold to build the Docker images +---- + +Now you can build/deploy/run it using `skaffold`. + +[source,bash] +---- +skaffold build +---- + +.Deploying your app on {dc} + +To deploy the application on {dc}, you only need to position yourself in the directory where the `docker-compose.yaml` file was generated, so in the `deployment` folder. + +[source,bash] +---- +cd deployment +docker compose up +---- + +This command will download the necessary images and reuses the ones built by `skaffold` to deploy everything. + +Now, to be sure to access properly the app, a small addition to your `/etc/hosts` file is required as such: + +[source] +---- +127.0.0.1 clockdate.azathoth.local +---- + +Now you can open your browser to `http://clockdate.azathoth.local` and see that everything is running properly. +You can also go to `http://clockdate.azathoth.local/api/ping` and check that you have a message. + + +=== Modifying your webapp, adding behavior + +We are currently capable of generating/running applications, but we did not add our own behavior. +We need to modify the generated sources to do so. +If we take a deeper look to the folder generated by `harness-application`, we observe three folders that are the one we will modify on a normal usage/base: + +.Generated directory organization +[source] +---- ++- api -> owns the OpenAPI definition of the endpoints/resources handled by the API ++- backend + `- clockdate -> the project backend files + |- controllers -> the controller definition + `- models -> the resources exposed by the API ++- frontend -> the webpage files +---- + +In a first time, we will modify the backend to add a new endpoint that will answer in a string the current date and time. +The process is the following: + +. we add the new endpoint in the `openapi` folder, modifying the `openapi.yaml` file, +. we regenerate the code of the application using `harness-generate` +. we code the behavior of the endpoint in the dedicated method generated in the `backend/clockdate/controllers` folder. +. we build/deploy/run the code to see it running (this step can be changed with a pure python run of the backend for a quicker dev loop). + +==== Adding the new endpoint to the openapi specification + +We will add a new endpoint named `currentdate` that will answer a string when `GET`. +To do so, we add a special path in the `path` section. + +.Modifying the `api/openapi.yaml` file +[source,yaml] +---- +paths: + /currentdate: + get: + operationId: currentdate + responses: + "200": + content: + application/json: + schema: + type: string + description: Current date and time + "500": + description: System cannot give the current time + summary: Gets the current date and time + tags: [datetime] +---- + +[NOTE] +The name of the controller in which the function related to the endpoint will be generated depends on the `tags` value in defined in the `api/openapi.yaml` file. + +We validate that our openAPI specification is correct. + +[source] +---- +$ openapi-spec-validator applications/clockdate/api/openapi.yaml +OK +---- + +Now we generate again the code the application using `harness-application` another time. + +.Regenerating the code of our modified app +[source,bash] +---- +harness-application clockdate -t flask-server -t webapp +---- + +This will add a new `datetime_controller.py` in the `backend/clockdate/controllers` package. + +[IMPORTANT] +You need to notice that all the controllers files (and all the files) are overridden in the `backend` directory. +To prevent files of being overridden, you need to edit the `.openapi-generator-ignore` file, that acts like a `.gitignore` file (in a way), by marking the files/directories that needs to be ignored by the generation. + +When we open this file, we get the following controller method: + +[source,python] +---- +def currentdate(): # noqa: E501 + """Gets the current date and time + + # noqa: E501 + + + :rtype: str + """ + return 'do some magic!' +---- + +This is the moment to add the behavior we want: + +[source,python] +---- +def currentdate(): # noqa: E501 + """Gets the current date and time + + # noqa: E501 + + + :rtype: str + """ + from datetime import datetime + return f'{datetime.now()}' +---- + +We simply import the `datetime` module and type, and we ask for the current date and time. +Here a string interpolation is used only to force the result to be considered and formatted as a string. +It's not mandatory. + +Now that our new endpoint is coded, we can build/deploy/run it on our local cluster using `skaffold build` then `{dc-} up`. +Once the deployment is done, we can navigate to: http://clockdate.azathoth.local/api/currentdate to appreciate the result. + + +=== A quick and dirty frontend to test our endpoint + + +Now that we have the "backend" running, we will modify the frontend to get a label and a button that will fetch the information about date and time from the new endpoint we defined. +If we look in the frontend source code generated, we see a `src/rest/api.ts` file. +The generated code targets ReactJS as framework. +This module provides clients for the API generated from the `api/openapi.yaml` specification. +Exactly, it provides one client by `tag` defined in the openAPI specification. +In our case, we defined a tag `datetime`, so we find in `api.ts` a class `DatetimeApi`. +This is the class we will instantiate and use to deal with the call to the API and the endpoint we defined in the previous section. + +First, we are going to code a new React component that will provide a header with the current date and time and a button to ask for a "fetch" of the current date and time from the server. + +We call this component `DateTime` inside of a `DateTime.tsx` file that is placed in the `src/components` directory. + +.Code of the `frontend/src/component/DateTime.tsx` component +[source,javascript] +---- +import React, { useState, useEffect, useCallback } from 'react'; +import { DatetimeApi } from '../rest/api' + +const api = new DatetimeApi() <1> + +const DateTime = () => { + const [datetime, setDatetime] = useState('unavailable'); + useEffect(() => updateDate(), []); + + const updateDate = useCallback(() => { + api.currentdate().then(r => setDatetime(r.data)); <2> + }, []); + + return ( +
+

{datetime}

+ +
+ ) +} + +export default DateTime; +---- + +<1> The `DatetimeApi` class is instantiated, this is now the instance we will use everytime we need to perform a request toward an API endpoint. +<2> is where is actually perform the call. The `currentdate` method is generated by {ch}. + +Now that we have our dedicated component, we will integrate it in the current page. +To do that, we need to modify the `App.tsx` component. +This component is located in `frontend/src/App.tsx`. +We modify the content of this file this way: + +.Code of the `frontend/src/App.tsx` component +[source,javascript] +---- +import React from 'react'; +import './styles/style.less'; +import DateTime from './components/DateTime'; + +const Main = () => ( + <> +

Ask for date and time

+ +

See api documentation here

+ +); + +export default Main; +---- + +Once this is done, we can build/deploy/run again our webapp on our local cluster using `skaffold buld` then `{dc-} up`. +That's it! From 0f701461b25885eec04a25c2d02d8085204967c0 Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 29 Feb 2024 12:35:18 -0600 Subject: [PATCH 051/210] CH-100 Add first real config for gatekeeper --- .../compose/templates/auto-compose.yaml | 18 +++ .../compose/templates/auto-gatekeepers.yaml | 134 ++++++------------ 2 files changed, 63 insertions(+), 89 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index c2aad014..67154da4 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -23,10 +23,28 @@ services: - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" {{- range $app_name, $app_config := .Values.apps }} + {{- if eq $app_name "argo" -}} + {{- continue -}} + {{- end -}} {{ $deployment := $app_config.harness.deployment }} {{- if eq $app_name "nfsserver" }} {{- include "nfsserver.deployment" $.Values | indent 2 }} {{- end }} + {{- if $.Values.secured_gatekeepers }} + {{ if and (hasKey $app_config "port") $app_config.harness.secured }} + # Gatekeeper for {{ $app_config.harness.service.name }}-gk +{{- include "securedservice.deploy" (dict "root" $ "app" $app_config) | indent 2 }} + {{- end }} + {{- range $subapp := $app_config }} + {{- if contains "map" (typeOf $subapp) }} + {{- if and (hasKey $subapp "harness.port") (hasKey $subapp "harness.secured") }} + {{- if $subapp.harness.secured }} +{{ include "securedservice.deploy" (dict "root" $ "app" $subapp) | indent 2 }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }} {{- continue }} {{- end}} diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml index 898995cd..f0d68c7f 100644 --- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -1,14 +1,35 @@ {{/* Secured Services/Deployments */}} -{{- define "deploy_utils.securedservice" }} +{{- define "securedservice.deploy" }} {{- $tls := not (not .root.Values.tls) }} -apiVersion: v1 -kind: ConfigMap -metadata: - name: "{{ .app.harness.service.name }}-gk" - labels: - app: "{{ .app.harness.service.name }}-gk" -data: - proxy.yml: |- +{{ .app.harness.service.name }}-gk: + image: quay.io/gogatekeeper/gatekeeper:1.3.8 + expose: + - '8080' + - '8443' + deploy: + mode: replicated + replicas: 1 + resources: + limits: + cpus: 100m + memory: 64M + reservations: + cpus: 50m + memory: 32M + environment: + - PROXY_CONFIG_FILE=/opt/proxy.yml + volumes: + - compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml + - compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt + - compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl +{{- end }} + +{{- define "securedservice.deploy.resources" }} +{{- $tls := not (not .root.Values.tls) }} +cloudharness-metadata: + path: resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml + +data: |- verbose: {{ .root.Values.debug }} discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }} client-id: {{ .root.Values.apps.accounts.webclient.id | quote }} @@ -40,8 +61,17 @@ data: skip-openid-provider-tls-verify: true skip-upstream-tls-verify: true {{- end }} - cacert.crt: {{ .files.Get "resources/certs/cacert.crt" | quote }} - access-denied.html.tmpl: |- +--- +cloudharness-metadata: + path: resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt + +data: |- +{{ .files.Get "resources/certs/cacert.crt" | indent 2 }} +--- +cloudharness-metadata: + path: resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl + +data: |- @@ -79,93 +109,19 @@ data: --- -apiVersion: v1 -kind: Service -metadata: - name: "{{ .app.harness.service.name }}-gk" - labels: - app: "{{ .app.harness.service.name }}-gk" -spec: - ports: - - name: http - port: 8080 - selector: - app: "{{ .app.harness.service.name }}-gk" - type: ClusterIP ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: "{{ .app.harness.service.name }}-gk" - labels: - app: "{{ .app.harness.service.name }}-gk" - -spec: - replicas: 1 - selector: - matchLabels: - app: "{{ .app.harness.service.name }}-gk" - template: - metadata: - annotations: - checksum/config: {{ .app.harness.uri_role_mapping | toString | sha256sum }} - labels: - app: "{{ .app.harness.service.name }}-gk" - spec: -{{ include "deploy_utils.etcHosts" .root | indent 6 }} - containers: - - name: {{ .app.harness.service.name | quote }} - image: "quay.io/gogatekeeper/gatekeeper:1.3.8" - imagePullPolicy: IfNotPresent - {{ if .root.Values.local }} - securityContext: - allowPrivilegeEscalation: false - runAsUser: 0 - {{- end }} - env: - - name: PROXY_CONFIG_FILE - value: /opt/proxy.yml - volumeMounts: - - name: "{{ .app.harness.service.name }}-gk-proxy-config" - mountPath: /opt/proxy.yml - subPath: proxy.yml - - name: "{{ .app.harness.service.name }}-gk-proxy-config" - mountPath: /etc/pki/ca-trust/source/anchors/cacert.crt - subPath: cacert.crt - - name: "{{ .app.harness.service.name }}-gk-proxy-config" - mountPath: /templates/access-denied.html.tmpl - subPath: access-denied.html.tmpl - ports: - - name: http - containerPort: 8080 - - name: https - containerPort: 8443 - resources: - requests: - memory: "32Mi" - cpu: "50m" - limits: - memory: "64Mi" - cpu: "100m" - volumes: - - name: "{{ .app.harness.service.name }}-gk-proxy-config" - configMap: - name: "{{ .app.harness.service.name }}-gk" ---- {{- end }} + {{- if .Values.secured_gatekeepers }} {{ $files := .Files }} {{- range $app := .Values.apps }} {{- if and (hasKey $app "port") ($app.harness.secured) }} ---- - {{ include "deploy_utils.securedservice" (dict "root" $ "app" $app "files" $files) }} - {{- end }} + {{ include "securedservice.deploy.resources" (dict "root" $ "app" $app "files" $files) }} + {{- end }} {{- range $subapp := $app }} {{- if contains "map" (typeOf $subapp) }} {{- if and (hasKey $subapp "harness.port") (hasKey $subapp "harness.secured") }} {{- if $subapp.harness.secured }} ---- - {{ include "deploy_utils.securedservice" (dict "root" $ "app" $subapp "files" $files) }} + {{ include "securedservice.deploy.resources" (dict "root" $ "app" $subapp "files" $files) }} {{- end }} {{- end }} {{- end }} From 6bbef64e633cecebc14d0c85cfc18393f843122d Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 29 Feb 2024 12:55:39 -0600 Subject: [PATCH 052/210] CH-100 Add reverse proxy config for gatekeeper --- .../compose/templates/auto-compose.yaml | 25 ++++++++++++------- .../compose/templates/auto-gatekeepers.yaml | 5 ++++ 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 67154da4..9379c57d 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -30,8 +30,9 @@ services: {{- if eq $app_name "nfsserver" }} {{- include "nfsserver.deployment" $.Values | indent 2 }} {{- end }} - {{- if $.Values.secured_gatekeepers }} - {{ if and (hasKey $app_config "port") $app_config.harness.secured }} + {{- $isSecured := (and $.Values.secured_gatekeepers ( and (hasKey $app_config "port") $app_config.harness.secured )) -}} + {{ if $isSecured }} + # Gatekeeper for {{ $app_config.harness.service.name }}-gk {{- include "securedservice.deploy" (dict "root" $ "app" $app_config) | indent 2 }} {{- end }} @@ -44,7 +45,6 @@ services: {{- end }} {{- end }} {{- end }} - {{- end }} {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }} {{- continue }} {{- end}} @@ -104,19 +104,24 @@ services: {{- end }} {{- with $app_config.harness.dependencies.soft }} links: - {{- range . }} - {{- $service_name := (get $.Values.apps .).harness.service.name }} - {{- if eq . "events"}} + {{- range . -}} + {{- $service_name := (get $.Values.apps .).harness.service.name -}} + {{- if eq . "events" }} # - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} {{- else }} - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} {{- end }} - {{- end }} - {{- end }} + {{- end -}} + {{- end -}} {{/* Takes the hard deps, removes argo and adds the db if there is one */}} {{/* To be sure to add the db properly, we "dig" the "harness" config for "database.name" and return "" if one of the keys doesn't exist */}} {{/* "compact" in the beginning is to remove empty values */}} - {{- with compact (append (without $app_config.harness.dependencies.hard "argo") (dig "database" "name" "" $app_config.harness) ) }} + {{- with compact + (append + (append + (without $app_config.harness.dependencies.hard "argo") + (dig "database" "name" "" $app_config.harness)) + (ternary (printf "%s-gk" $app_config.harness.service.name) "" $isSecured)) -}} depends_on: {{- range . }} - {{ . }} @@ -153,6 +158,7 @@ services: {{- end }} {{- end }} {{- end }} + {{- if not $isSecured }} labels: - "traefik.enable=true" {{- with $app_config.harness.service.port }} @@ -162,6 +168,7 @@ services: # - "traefik.http.routers.{{ .app_name }}.middlewares=redirect-middleware" - "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)" - "traefik.http.routers.{{ $app_name }}.entrypoints=web" + {{- end }} {{- with $app_config.harness.database }} {{- if not .auto }} {{- continue}} diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml index f0d68c7f..d27a6f50 100644 --- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -22,6 +22,11 @@ - compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml - compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt - compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl + labels: + - "traefik.enable=true" + - "traefik.http.services.{{ .app.harness.service.name }}-gk.loadbalancer.server.port={{ .app.harness.service.port }}" + - "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)" + - "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web" {{- end }} {{- define "securedservice.deploy.resources" }} From 3b437cf018a01085e13d462f168e7796568376d7 Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 29 Feb 2024 13:27:30 -0600 Subject: [PATCH 053/210] CH-100 Fix path to generated resources --- .../compose/templates/auto-compose.yaml | 2 +- .../compose/templates/auto-gatekeepers.yaml | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 9379c57d..ba7dd79e 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -141,7 +141,7 @@ services: {{- with $app_config.harness.resources }} {{- range .}} - type: bind - source: compose/resources/generated/{{ $app_name }}/{{ .src }} + source: ./compose/resources/generated/{{ $app_name }}/{{ .src }} target: {{ .dst }} {{- end }} {{- end}} diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml index d27a6f50..fc27efd0 100644 --- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -19,14 +19,20 @@ environment: - PROXY_CONFIG_FILE=/opt/proxy.yml volumes: - - compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml - - compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt - - compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl + - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml + - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt + - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl labels: - "traefik.enable=true" - "traefik.http.services.{{ .app.harness.service.name }}-gk.loadbalancer.server.port={{ .app.harness.service.port }}" - "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)" - "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web" + depends_on: + - accounts + links: + - accounts:accounts.{{ .root.Values.domain }} + extra_hosts: + - "accounts.{{ .root.Values.domain }}=127.0.0.11" {{- end }} {{- define "securedservice.deploy.resources" }} From 1135b992db0b533997b8866ee0fa5becff7543b5 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 1 Mar 2024 07:25:07 -0600 Subject: [PATCH 054/210] CH-100 Fix bad GK configuration --- .../compose/templates/auto-compose.yaml | 13 ++++++++++--- .../compose/templates/auto-gatekeepers.yaml | 10 ++++++---- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index ba7dd79e..7ef77386 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -86,6 +86,13 @@ services: {{- with $deployment.command }} # entrypoint: {{ cat . $deployment.args }} {{- end }} + {{- if eq $app_name "accounts" }} + healthcheck: + test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/auth/realms/azathoth/account"] + interval: 1s + timeout: 3s + retries: 30 + {{- end }} environment: - CH_CURRENT_APP_NAME={{ $app_name }} @@ -117,11 +124,9 @@ services: {{/* To be sure to add the db properly, we "dig" the "harness" config for "database.name" and return "" if one of the keys doesn't exist */}} {{/* "compact" in the beginning is to remove empty values */}} {{- with compact - (append (append (without $app_config.harness.dependencies.hard "argo") - (dig "database" "name" "" $app_config.harness)) - (ternary (printf "%s-gk" $app_config.harness.service.name) "" $isSecured)) -}} + (dig "database" "name" "" $app_config.harness)) -}} depends_on: {{- range . }} - {{ . }} @@ -146,6 +151,7 @@ services: {{- end }} {{- end}} {{- end }} + {{/* {{- if $.Values.local }} # Extra /etc/hosts list {{- $domain := $.Values.domain }} @@ -158,6 +164,7 @@ services: {{- end }} {{- end }} {{- end }} + */}} {{- if not $isSecured }} labels: - "traefik.enable=true" diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml index fc27efd0..a91bd96a 100644 --- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -2,6 +2,9 @@ {{- define "securedservice.deploy" }} {{- $tls := not (not .root.Values.tls) }} {{ .app.harness.service.name }}-gk: + networks: + - ch + restart: always image: quay.io/gogatekeeper/gatekeeper:1.3.8 expose: - '8080' @@ -28,11 +31,10 @@ - "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)" - "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web" depends_on: - - accounts + accounts: + condition: service_healthy links: - accounts:accounts.{{ .root.Values.domain }} - extra_hosts: - - "accounts.{{ .root.Values.domain }}=127.0.0.11" {{- end }} {{- define "securedservice.deploy.resources" }} @@ -42,7 +44,7 @@ cloudharness-metadata: data: |- verbose: {{ .root.Values.debug }} - discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }} + discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}:8080/auth/realms/{{ .root.Values.namespace }} client-id: {{ .root.Values.apps.accounts.webclient.id | quote }} client-secret: {{ .root.Values.apps.accounts.webclient.secret }} secure-cookie: {{ $tls }} From b23f03b87319d4dce2c1b1288834ad00b95b4755 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 1 Mar 2024 08:04:40 -0600 Subject: [PATCH 055/210] CH-100 Adapt gk configuration --- .../compose/templates/auto-compose.yaml | 10 +++++----- .../compose/templates/auto-gatekeepers.yaml | 17 ++++++++++------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 7ef77386..ff6145cb 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -64,14 +64,14 @@ services: image: {{ . }} {{- end }} {{- if eq $.Values.mainapp $app_name }} - # {{- with $app_config.harness.service.port }} - # ports: - # - "{{ . }}:{{ $app_config.harness.deployment.port }}" - # {{- end }} + {{- with $app_config.harness.service.port }} + ports: + - "{{ . }}:{{ $app_config.harness.deployment.port }}" + {{- end }} {{- end }} {{- with $app_config.harness.deployment.port }} expose: - - {{ . | quote }} + - {{ . }} {{- end}} deploy: mode: "replicated" diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml index a91bd96a..b9e6f8cb 100644 --- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -1,7 +1,8 @@ {{/* Secured Services/Deployments */}} {{- define "securedservice.deploy" }} {{- $tls := not (not .root.Values.tls) }} -{{ .app.harness.service.name }}-gk: +{{- $gk_name := printf "%s-gk" .app.harness.service.name }} +{{ $gk_name }}: networks: - ch restart: always @@ -22,19 +23,21 @@ environment: - PROXY_CONFIG_FILE=/opt/proxy.yml volumes: - - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml - - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt - - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl + - ./compose/resources/generated/{{ $gk_name }}/proxy.yml:/opt/proxy.yml + - ./compose/resources/generated/{{ $gk_name }}/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt + - ./compose/resources/generated/{{ $gk_name }}/access-denied.html.tmpl:/templates/access-denied.html.tmpl labels: - "traefik.enable=true" - - "traefik.http.services.{{ .app.harness.service.name }}-gk.loadbalancer.server.port={{ .app.harness.service.port }}" - - "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)" - - "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web" + - "traefik.http.services.{{ $gk_name }}.loadbalancer.server.port={{ .app.harness.service.port }}" + - "traefik.http.routers.gatekeeper.middlewares=redirect-middleware" + - "traefik.http.routers.{{ $gk_name }}.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)" + - "traefik.http.routers.{{ $gk_name }}.entrypoints=web" depends_on: accounts: condition: service_healthy links: - accounts:accounts.{{ .root.Values.domain }} + - {{ .app.harness.service.name }}:{{ .app.harness.service.name }}.default {{- end }} {{- define "securedservice.deploy.resources" }} From 361ca4633a302a8b3d27e10db862cd1bba370eeb Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 1 Mar 2024 13:02:16 -0600 Subject: [PATCH 056/210] CH-100 Fix gk configuration --- applications/samples/frontend/webpack.config.js | 2 +- deployment-configuration/compose/templates/auto-compose.yaml | 2 +- .../compose/templates/auto-gatekeepers.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/applications/samples/frontend/webpack.config.js b/applications/samples/frontend/webpack.config.js index ad5ee556..69f80cb9 100644 --- a/applications/samples/frontend/webpack.config.js +++ b/applications/samples/frontend/webpack.config.js @@ -29,7 +29,7 @@ module.exports = function webpacking(envVariables) { const output = { path: path.resolve(__dirname, "dist"), - filename: "[name].[contenthash].js", + filename: "js/[name].[contenthash].js", publicPath: "/" }; diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index ff6145cb..156547a4 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -23,7 +23,7 @@ services: - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" {{- range $app_name, $app_config := .Values.apps }} - {{- if eq $app_name "argo" -}} + {{- if has $app_name (list "argo" "events") -}} {{- continue -}} {{- end -}} {{ $deployment := $app_config.harness.deployment }} diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml index b9e6f8cb..4094a925 100644 --- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml +++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml @@ -36,7 +36,7 @@ accounts: condition: service_healthy links: - - accounts:accounts.{{ .root.Values.domain }} + - traefik:accounts.{{ .root.Values.domain }} - {{ .app.harness.service.name }}:{{ .app.harness.service.name }}.default {{- end }} @@ -47,7 +47,7 @@ cloudharness-metadata: data: |- verbose: {{ .root.Values.debug }} - discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}:8080/auth/realms/{{ .root.Values.namespace }} + discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }} client-id: {{ .root.Values.apps.accounts.webclient.id | quote }} client-secret: {{ .root.Values.apps.accounts.webclient.secret }} secure-cookie: {{ $tls }} From 74d074eec720731bd2c1b4d4601914ef07e3c7f8 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 1 Mar 2024 13:15:04 -0600 Subject: [PATCH 057/210] CH-100 Comment unused "ports" --- deployment-configuration/compose/templates/auto-compose.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 156547a4..52f5a157 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -63,12 +63,13 @@ services: {{- with $app_config.image }} image: {{ . }} {{- end }} + {{/* {{- if eq $.Values.mainapp $app_name }} {{- with $app_config.harness.service.port }} ports: - "{{ . }}:{{ $app_config.harness.deployment.port }}" {{- end }} - {{- end }} + {{- end }}*/}} {{- with $app_config.harness.deployment.port }} expose: - {{ . }} From fec25bee546836c823020e25aa45fd62c69a7b88 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 15 Mar 2024 06:05:26 -0600 Subject: [PATCH 058/210] CH-100 Fix issue with service names in dependencies --- .../compose/templates/auto-compose.yaml | 29 +++++++++++++------ 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 52f5a157..8f1543bd 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -23,7 +23,7 @@ services: - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" {{- range $app_name, $app_config := .Values.apps }} - {{- if has $app_name (list "argo" "events") -}} + {{- if has $app_name (list "argo" "events" "nfsserver") -}} {{- continue -}} {{- end -}} {{ $deployment := $app_config.harness.deployment }} @@ -113,12 +113,17 @@ services: {{- with $app_config.harness.dependencies.soft }} links: {{- range . -}} - {{- $service_name := (get $.Values.apps .).harness.service.name -}} - {{- if eq . "events" }} -# - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} + {{- $service := .}} + {{- range $name, $conf := $.Values.apps }} + {{- if eq $conf.harness.name $service }} + {{- if has . (list "events" "nfsserver") }} +# - {{ $name }}:{{ $service }}.{{ $.Values.domain }} {{- else }} - - {{ . }}:{{ $service_name }}.{{ $.Values.domain }} - {{- end }} + - {{ $name }}:{{ $service }}.{{ $.Values.domain }} + {{- end }} + {{- break -}} + {{- end -}} + {{- end -}} {{- end -}} {{- end -}} {{/* Takes the hard deps, removes argo and adds the db if there is one */}} @@ -129,9 +134,15 @@ services: (without $app_config.harness.dependencies.hard "argo") (dig "database" "name" "" $app_config.harness)) -}} depends_on: - {{- range . }} - - {{ . }} - {{- end }} + {{- range . -}} + {{- $service := .}} + {{- range $name, $conf := $.Values.apps -}} + {{- if eq $conf.harness.name $service }} + - {{ $name }} + {{- break -}} + {{- end -}} + {{- end -}} + {{- end -}} {{- end }} volumes: - ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro From 0db12d801a4b729d68c3126aaddd251f0dfbe721 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 15 Mar 2024 06:19:44 -0600 Subject: [PATCH 059/210] CH-100 Fix issue with db-volumes --- .../compose/templates/auto-compose.yaml | 8 ++++++-- .../compose/templates/auto-database.yaml | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 8f1543bd..07d120dd 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -138,11 +138,15 @@ services: {{- $service := .}} {{- range $name, $conf := $.Values.apps -}} {{- if eq $conf.harness.name $service }} + {{- if has $name (list "events" "nfsserver") }} +# - {{ $name }} + {{- else }} - {{ $name }} + {{- end }} {{- break -}} {{- end -}} {{- end -}} - {{- end -}} + {{- end }} {{- end }} volumes: - ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro @@ -222,7 +226,7 @@ volumes: {{- with $app_config.harness.database }} {{ .name }}: {{- if eq .type "postgres" }} - dshm: + dshm-{{ $app_name }}: {{- end }} {{- end }} {{- if eq $app_name "nfsserver" }} diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml index 70bda63a..569bb220 100644 --- a/deployment-configuration/compose/templates/auto-database.yaml +++ b/deployment-configuration/compose/templates/auto-database.yaml @@ -25,7 +25,7 @@ target: /data/db {{- if eq .type "postgres" }} - type: volume - source: dshm + source: dshm-{{ .name }} target: /dev/shm {{- include "deploy_utils.database.postgres" . }} {{- end }} From f8d4c84c909440358a6e345e69402dd0ddcb3036 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 15 Mar 2024 07:14:17 -0600 Subject: [PATCH 060/210] CH-100 Fix health check for account services --- deployment-configuration/compose/templates/auto-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 07d120dd..e932362d 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -89,7 +89,7 @@ services: {{- end }} {{- if eq $app_name "accounts" }} healthcheck: - test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/auth/realms/azathoth/account"] + test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/auth/realms/{{ $.Values.namespace }}/account"] interval: 1s timeout: 3s retries: 30 @@ -226,7 +226,7 @@ volumes: {{- with $app_config.harness.database }} {{ .name }}: {{- if eq .type "postgres" }} - dshm-{{ $app_name }}: + dshm-{{ .name }}: {{- end }} {{- end }} {{- if eq $app_name "nfsserver" }} From 41211804e81089366b1efdbc7b46e82ac9006d5d Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 15 Mar 2024 09:51:44 -0600 Subject: [PATCH 061/210] CH-100 Fix issue with dependencies --- .../compose/templates/auto-compose.yaml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index e932362d..1302040e 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -110,13 +110,13 @@ services: {{- range $app_config.harness.env }} - {{ .name }}={{ .value }} {{- end }} - {{- with $app_config.harness.dependencies.soft }} + {{- with (concat (without $app_config.harness.dependencies.hard "argo") $app_config.harness.dependencies.soft) }} links: {{- range . -}} {{- $service := .}} {{- range $name, $conf := $.Values.apps }} {{- if eq $conf.harness.name $service }} - {{- if has . (list "events" "nfsserver") }} + {{- if has $name (list "events" "nfsserver") }} # - {{ $name }}:{{ $service }}.{{ $.Values.domain }} {{- else }} - {{ $name }}:{{ $service }}.{{ $.Values.domain }} @@ -131,9 +131,11 @@ services: {{/* "compact" in the beginning is to remove empty values */}} {{- with compact (append - (without $app_config.harness.dependencies.hard "argo") + (without $app_config.harness.dependencies.hard "argo" ) (dig "database" "name" "" $app_config.harness)) -}} + {{- with without $app_config.harness.dependencies.hard "argo" "events" }} depends_on: + {{- end }} {{- range . -}} {{- $service := .}} {{- range $name, $conf := $.Values.apps -}} From 5bf5532d88e651ac443b5082be7dafd4ec25a932 Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 26 Mar 2024 12:50:24 -0600 Subject: [PATCH 062/210] CH-100 Change tagPolicy for docker-compose target --- tools/deployment-cli-tools/ch_cli_tools/skaffold.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py index 55efc587..7859d043 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py +++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py @@ -199,6 +199,11 @@ def identify_unicorn_based_main(candidates): 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']] } } + skaffold_conf['build']['tagPolicy'] = { + 'envTemplate': { + 'template': "TAG" + } + } skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()] merge_to_yaml_file(skaffold_conf, os.path.join( @@ -212,7 +217,7 @@ def git_clone_hook(conf: GitDependencyConfig, context_path: str): join(os.path.dirname(os.path.dirname(HERE)), 'clone.sh'), conf.branch_tag, conf.url, - join(context_path, "dependencies", conf.path or os.path.basename(conf.url).split('.')[0]) + join(context_path, "dependencies", conf.path or os.path.basename(conf.url).split('.')[0]) ] } From 319352b3c8762a536568b4b916f86b2f442bf912 Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 26 Mar 2024 13:37:08 -0600 Subject: [PATCH 063/210] CH-100 Add first information for the docker compose target --- README.md | 28 ++++++++++++------- docs/build-deploy/README.md | 54 +++++++++++++++++++++++++++++++------ 2 files changed, 65 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 459b2929..46c1cdf2 100644 --- a/README.md +++ b/README.md @@ -2,12 +2,12 @@ drawing

-CloudHarness is a base infrastructure facilitator for microservice based applications deployed on Kubernetes. +CloudHarness is a base infrastructure facilitator for microservice based applications deployed on Kubernetes and Docker Compose. Can scaffold and maintain your cloud solution on top of Cloudharness without writing Kubernetes templates, with in place common utilities and applications already configured for you. What building your cloud solution with CloudHarness gives to you: -- Common framework and utilities to develop and deploy micro-service application +- Common framework and utilities to develop and deploy micro-service application - Helm chart automatic generation - deployments - services @@ -17,6 +17,12 @@ What building your cloud solution with CloudHarness gives to you: - access gatekeepers configuration - secrets - templated config maps from files + - Docker compose configuration generation + - services + - traefik configuration + - databases (postgreql) + - access gatekeepers configuration + - secrets * Automatic build and push of images * REST-API scaffolding building based on OpenApi * Continuous deployment script generation @@ -46,14 +52,14 @@ In particular, these questions may rise: - How to manage databases without being locked to a specific vendor solution? - How to perform database backups? - How to manage secret data? - - What about having a precounfigured account management application? - - Sooner rather than later I'll need an orchestration queue. Why not have that just ready to use? + - What about having a precounfigured account management application? + - Sooner rather than later I'll need an orchestration queue. Why not have that just ready to use? # Command line tools CloudHarness provides the following command line tools to help application scaffolding and deployment. -* `harness-deployment` - generate the helm chart to deploy on Kubernetes. +* `harness-deployment` - generate the helm chart to deploy on Kubernetes. * `harness-application` - create a new CloudHarness REST application. * `harness-generate` - generates server and client code for all CloudHarness REST applications. * `harness-test` - run end to end tests @@ -67,13 +73,13 @@ Cloudharness can be used on all major operative systems. - Linux: supported and tested - MacOS: supported and tested - Windows/WSL2: supported and tested -- Windows native: mostly working, unsupported +- Windows native: mostly working, unsupported ### Python Python 3.9 must be installed. It is recommended to setup a virtual environment. -With conda: +With conda: ```bash conda create --name ch python=3.9 conda activate ch @@ -94,6 +100,10 @@ conda activate ch [Skaffold](https://skaffold.dev/docs/install/) is the way to go to build and debug your application in your local development environment. +### Docker compose + +[Docker Compose](https://docs.docker.com/compose/) is required if the docker compose system is the target (instead of Kubernetes). + ### Node environment A node environment with npm is required for developing web applications and to run end to end tests. @@ -139,7 +149,7 @@ or simply copy the *blueprint* folder. The script `harness-deployment` scans your applications and configurations to create the build and deploy artifacts. Created artifacts include: - - Helm chart + - Helm chart (or docker compose configuration file) - Skaffold build and run configuration - Visual Studio Code debug and run configuration - Codefresh pipeline yaml specification (optional) @@ -153,7 +163,7 @@ infrastructure cloud-harness ``` -run +run ``` harness-deployment cloud-harness . [PARAMS] diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md index 35e5fbdc..03046971 100644 --- a/docs/build-deploy/README.md +++ b/docs/build-deploy/README.md @@ -17,7 +17,7 @@ infrastructure cloud-harness ``` -run +run ``` harness-deployment cloud-harness . [PARAMS] @@ -34,7 +34,7 @@ Deployment definition: - `--env`, `-e`: sets a custom environment (default: none) - `--namespace`, `-n`: set the kubernetes namespace (default: ch) - `--tag`, `-t`: define build tag (default: latest) -- `--registry`, `-r`: set the Docker registry where images are pushed on build +- `--registry`, `-r`: set the Docker registry where images are pushed on build - `--include`, `-i`: set application(s) to include (with their dependencies). If not set, every application will be included - `--exclude`, `-ex`: explicitly exclude applications or task images @@ -46,6 +46,7 @@ Development parameters: Optional settings - `--output`, `-o`: specify helm chart base path (default `./deployment) +- `--docker-compose`: targets Docker Compose instead of Kubernetes (see details below) Build and deploy (deprecated, use Skaffold instead) - `--build`, `-b`: builds and pushes Docker images in the specified registry (if any) @@ -70,7 +71,7 @@ harness-deployment cloud-harness . -d mydomain.dev.org -n mynamespace -e dev -r **Note: Docker registry** -By default `skaffold` builds the images in the local Docker registry. In order to make the deploy work, we need to specify a +By default `skaffold` builds the images in the local Docker registry. In order to make the deploy work, we need to specify a registry that is visible from inside the cluster. The parameter `--registry` allows to specify a registry in which images are pushed after the build. Any public registry will work. The suggested way to go is to install a registry on localhost:5000 inside the kube cluster and push on that registry, also forwarded to localhost. @@ -90,21 +91,21 @@ for instance with a Google Cloud cluster or a local Kubernetes like Minikube or 1. Create the namespace `kubectl create ns ch` 1. Build images and Install or upgrade the helm chart with `skaffold deploy` -To build and reploy +To build and reploy ## Continuous deployment with Codefresh See [here](./codefresh.md). ## Relevant files and directory structure -Deployment files are automatically generated with the script +Deployment files are automatically generated with the script `harness-deployment`. all the resources intended to install and deploy the platform on Kubernetes. - `codefresh`: codefresh build related files (automatically generated) - `deployment/helm`: the helm chart -What this script does is to go through all the defined applications and use templates to define all the required +What this script does is to go through all the defined applications and use templates to define all the required definitions and variables. General templates are defined inside `deployment-configuration`. @@ -116,7 +117,7 @@ Applications can override templates values by defining a file `values.yaml` in t The following deployment files are generated by `harness-deployment`: - Helm chart configuration for custom deployment: **./helm/values.yaml** -- Codefresh build and deploment definition: **./codefresh/codefresh.yaml** +- Codefresh build and deploment definition: **./codefresh/codefresh.yaml** The script `harness-deployment` also generates a build script to be used by codefresh. @@ -131,6 +132,43 @@ Things to notice: - A Helm chart was created under `deployment/helm` path to handle deployments. - To populate the generated file `deployment/helm/values.yaml` is used. +## Docker compose target + +The Docker compose target, still in alpha stage, allows you to generate the adequate configuration to run all your services on docker compose. + +Not all features of the Kubernetes target are supported by this target. +Currently, the Docker compose target supports: + +- generation of services and links/bindings between them +- generation of volumes for the services +- traefik configuration +- databases (postgreql) +- access gatekeepers configuration +- secrets +- dedicated Skaffold configuration + +The following deployment files are generated by `harness-deployment ... --docker-compose`: + +- Docker compose configuration: **deployemnt/docker-compose.yaml** +- Configuration files for the services that needs to be mounted in each docker container: **deployment/resources/** +- Skaffold configuration file for building the services images: **skaffold.yaml** + +The process to build and run the services in Docker compose is the following: + +1. generate the Skaffold and Docker compose configuration (option `--docker-compose` for `harness-deployment`) +2. build the images for all the services using Skaffold +3. run Docker compose + +Translated to the command line: + +```bash +harmess-deployment ... --docker-compose # replace ... by your options +skaffold build +cd deployment +docker compose up # or "docker-compose up" depending on your installation +``` + + ## Manual configurations - [Configure user accounts](../accounts.md) @@ -141,5 +179,5 @@ In order to access the applications from your browser, set up your hosts file as Example: after running `harness-deployment -d mydomain.local -i samples`, set ``` -127.0.0.1 samples.mydomain.local workflows.mydomain.local events.mydomain.local argo.mydomain.local +127.0.0.1 samples.mydomain.local workflows.mydomain.local events.mydomain.local argo.mydomain.local ``` From 7ec48e15f2fdf5b3af7949c0c522087de88b20d9 Mon Sep 17 00:00:00 2001 From: aranega Date: Wed, 27 Mar 2024 11:05:47 -0600 Subject: [PATCH 064/210] CH-100 Update documentation --- docs/build-deploy/README.md | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md index 03046971..c3b9b72f 100644 --- a/docs/build-deploy/README.md +++ b/docs/build-deploy/README.md @@ -46,7 +46,7 @@ Development parameters: Optional settings - `--output`, `-o`: specify helm chart base path (default `./deployment) -- `--docker-compose`: targets Docker Compose instead of Kubernetes (see details below) +- `--docker-compose`: targets Docker Compose instead of Kubernetes (see [details below](#docker-compose-target)) Build and deploy (deprecated, use Skaffold instead) - `--build`, `-b`: builds and pushes Docker images in the specified registry (if any) @@ -143,7 +143,7 @@ Currently, the Docker compose target supports: - generation of volumes for the services - traefik configuration - databases (postgreql) -- access gatekeepers configuration +- access gatekeepers configuration (Keycloak) - secrets - dedicated Skaffold configuration @@ -168,6 +168,18 @@ cd deployment docker compose up # or "docker-compose up" depending on your installation ``` +### Unsupported features + +There is still some features that are not supported by the Docker compose target. +Some are planned, others will not be, or not in a form that is compatible with the original k8s target. + +Here is a list of the unsupported features at the moment: + +- certificates +- Argo (will not be supported as Argo is a dedicated solution for k8s) +- events through Kafka +- NFS server +- DB backups ## Manual configurations From 7edfcc467e3ac61bd5f542b8544359d713d6e12b Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Wed, 27 Mar 2024 19:35:32 +0100 Subject: [PATCH 065/210] #CH-100 improve documentation --- .gitignore | 1 + README.md | 20 ++++---- blueprint/.gitignore | 2 +- docs/README.md | 1 + docs/build-deploy/README.md | 48 ++---------------- docs/build-deploy/docker-compose.md | 78 +++++++++++++++++++++++++++++ docs/dev.md | 40 ++++++++++++++- 7 files changed, 133 insertions(+), 57 deletions(-) create mode 100644 docs/build-deploy/docker-compose.md diff --git a/.gitignore b/.gitignore index 90f643c0..c6add695 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ node_modules .coverage *.DS_Store deployment/helm +deployment/compose *.egg-info *.idea /build diff --git a/README.md b/README.md index 46c1cdf2..cee34fbd 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ drawing

-CloudHarness is a base infrastructure facilitator for microservice based applications deployed on Kubernetes and Docker Compose. +CloudHarness is a base infrastructure facilitator for microservice based applications deployed primarily on Kubernetes. Can scaffold and maintain your cloud solution on top of Cloudharness without writing Kubernetes templates, with in place common utilities and applications already configured for you. @@ -22,7 +22,7 @@ What building your cloud solution with CloudHarness gives to you: - traefik configuration - databases (postgreql) - access gatekeepers configuration - - secrets + - secrets and configmaps * Automatic build and push of images * REST-API scaffolding building based on OpenApi * Continuous deployment script generation @@ -63,6 +63,7 @@ CloudHarness provides the following command line tools to help application scaff * `harness-application` - create a new CloudHarness REST application. * `harness-generate` - generates server and client code for all CloudHarness REST applications. * `harness-test` - run end to end tests + # Get started ## Prerequisites @@ -137,13 +138,14 @@ To (re)generate the code for your applications, run `harness-generate` from the The script will look for all openapi applications, and regenerate the Flask server code and documentation. Note: the script will eventually override any manually modified file. To avoid that, define a file openapi-generator-ignore. -# Extend CloudHarness to build your solution -CloudHarness is born to be extended. In order to extend CloudHarness you just need to mirror the folder structure: -* **applications**: place here your custom applications, or override default ones -* **deployment-configuration**: override the helm chart default values and templates -* **infrastructure**: define base images to use in your application +# Extend CloudHarness to build your project + +CloudHarness is born to be extended. + +The quickest way to start is to install Cloud Harness, copy the *blueprint* folder and build from that with the cli tools, such as +`harness-application`, `harness-generate`, `harness-deployment`. -or simply copy the *blueprint* folder. +See the [developers documentation](docs/dev.md#start-your-project) for more information. # Build and deploy @@ -154,7 +156,7 @@ Created artifacts include: - Visual Studio Code debug and run configuration - Codefresh pipeline yaml specification (optional) -With your solution folder structure looking like +With your project folder structure looking like ``` applications diff --git a/blueprint/.gitignore b/blueprint/.gitignore index 0ea6b224..54e3e15f 100644 --- a/blueprint/.gitignore +++ b/blueprint/.gitignore @@ -2,7 +2,6 @@ .idea *.iml node_modules -mnp-custom .openapi-generator *.pyc .vscode @@ -12,6 +11,7 @@ cloud-harness build skaffold.yaml /deployment.yaml +/deployment/compose /.run/ *.egg-info .overrides diff --git a/docs/README.md b/docs/README.md index b6a8087f..df93d935 100644 --- a/docs/README.md +++ b/docs/README.md @@ -2,6 +2,7 @@ - [Create and run a deployment](./build-deploy/README.md) - [Create a codefresh continuous deployment](./build-deploy/codefresh.md) - [Configure the Helm chart](./build-deploy/helm-configuration.md) + - [Docker Compose target generation](./build-deploy/docker-compose.md) - [Set up environments](./build-deploy/environments.md) - [Work with local deployments](./build-deploy/local-deploy/README.md) - [Debug your applications](./build-deploy/local-deploy/debug.md) diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md index c3b9b72f..4b1f4b99 100644 --- a/docs/build-deploy/README.md +++ b/docs/build-deploy/README.md @@ -134,52 +134,10 @@ Things to notice: ## Docker compose target -The Docker compose target, still in alpha stage, allows you to generate the adequate configuration to run all your services on docker compose. +Docker compose is partially supported as a deployment target as an alternative when Kubernetes +is not available option or for local development. -Not all features of the Kubernetes target are supported by this target. -Currently, the Docker compose target supports: - -- generation of services and links/bindings between them -- generation of volumes for the services -- traefik configuration -- databases (postgreql) -- access gatekeepers configuration (Keycloak) -- secrets -- dedicated Skaffold configuration - -The following deployment files are generated by `harness-deployment ... --docker-compose`: - -- Docker compose configuration: **deployemnt/docker-compose.yaml** -- Configuration files for the services that needs to be mounted in each docker container: **deployment/resources/** -- Skaffold configuration file for building the services images: **skaffold.yaml** - -The process to build and run the services in Docker compose is the following: - -1. generate the Skaffold and Docker compose configuration (option `--docker-compose` for `harness-deployment`) -2. build the images for all the services using Skaffold -3. run Docker compose - -Translated to the command line: - -```bash -harmess-deployment ... --docker-compose # replace ... by your options -skaffold build -cd deployment -docker compose up # or "docker-compose up" depending on your installation -``` - -### Unsupported features - -There is still some features that are not supported by the Docker compose target. -Some are planned, others will not be, or not in a form that is compatible with the original k8s target. - -Here is a list of the unsupported features at the moment: - -- certificates -- Argo (will not be supported as Argo is a dedicated solution for k8s) -- events through Kafka -- NFS server -- DB backups +See [here](./docker-compose.md) for more information. ## Manual configurations diff --git a/docs/build-deploy/docker-compose.md b/docs/build-deploy/docker-compose.md new file mode 100644 index 00000000..361ae173 --- /dev/null +++ b/docs/build-deploy/docker-compose.md @@ -0,0 +1,78 @@ +# Docker compose target generation + +The Docker compose target, still in alpha stage, allows you to generate the base +configuration to run your services on docker compose. + +This feature is intended for limited and development purposes, so no all features of +the Kubernetes target are supported by this target. + +## How to use + +The process to build and run the services in Docker compose is the following: + +1. generate the Skaffold and Docker compose configuration (option `--docker-compose` for `harness-deployment`) +2. build the images for all the services using Skaffold +3. run Docker compose + +Translated to the command line: + +```bash +harness-deployment ... --docker-compose # replace ... by your options +skaffold build +cd deployment +docker compose up # or "docker-compose up" depending on your installation +``` + + +## Supported features +In general, the supported scope for docker compose services includes the ones that are automatically +handled by Cloud Harness, hence: + +```yaml +harness: + deployment: + auto: true + service: + auto: true +``` +Other custom Kubernetes templates are not included in the deployment. + +Currently, the Docker compose target supports: + +- generation of "auto" deployment/services (service and deployment bing to the same artifact in Docker) + - environmental variables + - links/bindings between services + - readiness/liveness probes as healthchecks + - Resources requests and limits controls + - replicas +- generation of "auto" volumes and mounting on the services +- resources (handled as configmaps in Kubernetes, handled as file mounts here) +- reverse proxy (traefik configuration) +- "auto" databases (postgresql) +- secured: access gatekeepers configuration (Keycloak) +- secrets (no encryption) + +The following deployment files are generated by `harness-deployment ... --docker-compose`: + +- Docker compose configuration: **deployment/docker-compose.yaml** +- Configuration files for the services that needs to be mounted in each docker container: **deployment/compose/resources/** +- Skaffold configuration file for building the services images: **skaffold.yaml** + +## Unsupported features + +There are still some features that are not supported by the Docker compose target. +Some are planned, others will not be, or not in a form that is compatible with the original k8s target. + +Here is a list of the unsupported features at the moment that are in the roadmap: + +- TLS certificates +- application proxy (use_services) specification +- definition of custom compose templates +- Events (through Kafka) +- Jupyterhub + +These features are not currently in the roadmap for Docker compose +- Unsupported Kubernetes features from Docker +- NFS server +- Workflows and tasks (will not be supported as Argo is a dedicated solution for k8s) +- DB backups \ No newline at end of file diff --git a/docs/dev.md b/docs/dev.md index 113e0c86..8871806e 100644 --- a/docs/dev.md +++ b/docs/dev.md @@ -4,6 +4,42 @@ This documentation is meant to be read by developers that needs to make modifica The goal of this doc is to show how CloudHarness is internally built, the different parts of the code/files that are relative to specific features, and to provide a map to be able to modify or implement new features quickly. CloudHarness is a project that allows you to: quickly generate the code of your webapp, considering that it runs in the cloud with a micro-service architecture, and to easily connect all those micro-services together to finally build the final app. + +## Prerequisites and installation + +This information is covered in the [main readme](../../README.md#prerequisites) + +## Start your project + +A Cloud Harness project can go to a simple service deployed on Kubernetes and so taking advantage of the +Helm Chart and CI/CD generation, to anything more structured using a mix of custom applications and +applications that are built-in in Cloud Harness. + +The quickest way to start your project is to copy the **blueprint** directory from cloudharness somewhere and +commit that to your repository and start building from that. + +What the blueprint gives us is basically a mirror the folder structure that can be recognized by Cloud Harness: +* **applications**: place here your custom applications, or override default ones +* **deployment-configuration**: override the helm chart default values and templates +* **infrastructure**: define base images to use in your application + +An initial workflow to start the first project with Cloud Harness can look like this: + +1. Copy blueprint to a *my-project* folder +2. Commit push to a (git or any other) repository +3. Clone cloud-harness inside it. Cloud harness can be placed anywhere and shared across different projects but it's easier to start our tutorials with this structure. +4. Use `harness-application myapp` to create one service/application from one of the available templates. +5. Play with the `applications/myapp/deploy/values.yaml` file to configure your deployment and add a database, a volume, or other applications as dependencies +6. Use `harness-deployment cloud-harness . -i myapp` to start generating a deployment including your application and its dependencies +7. Run locally with `skaffold dev` + +The above workflow based on an application template is a great place to get started, but anything can be deployed with Cloud Harness, +including custom templates and even helm charts. + +The above workflow and more is covered in our [tutorials](./tutorials/). + + +## Built-in applications and features Currently, the tools that CloudHarness can consider to build the final app are the following: * [OpenAPI](https://www.openapis.org/) for generating the model and API of your application (based on an OpenAPI specification), @@ -14,8 +50,8 @@ Currently, the tools that CloudHarness can consider to build the final app are t * [JupyterHub](https://jupyter.org/hub) to provide jupyter notebooks access to a group of users, * [Volume Manager](../applications/volumemanager/) to deal with external file system, * [NFS Server](../applications/nfsserver/) to provide storage of file on an external NFS file system, -* [Kubernete](https://kubernetes.io/) is used to manage the auto-scaling, deployements, ... of micro-services on a cluster, -* [Code Fresh](https://codefresh.io/) for the remote build of the application, and it is configured to initiate a deployment on a remote Kubernete cluster, +* [Kubernetes](https://kubernetes.io/) is used to manage the auto-scaling, deployements, ... of micro-services on a cluster, +* [Codefresh](https://codefresh.io/) for the remote build of the application, and it is configured to initiate a deployment on a remote Kubernete cluster, * [Helm Chart](https://helm.sh/docs/topics/charts/) for the packaging of Kubernete resources to simplify the deployment of the application, * [Skaffold](https://skaffold.dev/) to help deploying the packaged application in a Kubernete cluster. From e5d56b7fb6941020ddedb50341a908f78253b404 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 07:28:50 -0600 Subject: [PATCH 066/210] CH-100 First refactoring --- .../compose/templates/auto-compose.yaml | 14 - .../ch_cli_tools/codefresh.py | 16 +- .../ch_cli_tools/configurationgenerator.py | 654 ++++++++++ .../ch_cli_tools/dockercompose.py | 1138 ++++++++--------- .../deployment-cli-tools/ch_cli_tools/helm.py | 1089 ++++++++-------- .../tests/test_codefresh.py | 7 +- tools/deployment-cli-tools/tests/test_helm.py | 25 +- 7 files changed, 1782 insertions(+), 1161 deletions(-) create mode 100644 tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index 1302040e..ca024edd 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -27,9 +27,6 @@ services: {{- continue -}} {{- end -}} {{ $deployment := $app_config.harness.deployment }} - {{- if eq $app_name "nfsserver" }} - {{- include "nfsserver.deployment" $.Values | indent 2 }} - {{- end }} {{- $isSecured := (and $.Values.secured_gatekeepers ( and (hasKey $app_config "port") $app_config.harness.secured )) -}} {{ if $isSecured }} @@ -213,10 +210,6 @@ volumes: {{- break }} {{- end }} {{- with $app_config.harness.database }} -volumes: - {{- break }} - {{- end }} - {{- if eq $app_name "nfsserver" }} volumes: {{- break }} {{- end }} @@ -231,11 +224,4 @@ volumes: dshm-{{ .name }}: {{- end }} {{- end }} - {{- if eq $app_name "nfsserver" }} - {{ $app_config.nfs.volumeName }}: - # driver_opts: - # type: "nfs" - # o: "{{ join "," $app_config.nfs.mountOptions }}" - # device: ":{{ $app_config.nfs.path }}" - {{- end }} {{- end }} diff --git a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py index c4b7dd26..bb4b8434 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py +++ b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py @@ -11,7 +11,7 @@ from cloudharness_utils.testing.util import get_app_environment from .models import HarnessMainConfig, ApplicationTestConfig, ApplicationHarnessConfig from cloudharness_utils.constants import * -from .helm import KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, generate_tag_from_content +from .configurationgenerator import KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES from .utils import check_docker_manifest_exists, find_dockerfiles_paths, get_app_relative_to_base_path, guess_build_dependencies_from_dockerfile, \ get_image_name, get_template, dict_merge, app_name_from_path, clean_path from cloudharness_utils.testing.api import get_api_filename, get_schemathesis_command, get_urls_from_api_file @@ -74,7 +74,7 @@ def check_image_exists(name, image): else: env[app_specific_tag_variable(name) + "_NEW"] = 1 - + for app in helm_values.apps.values(): if app.harness and app.harness.deployment.image: @@ -128,7 +128,7 @@ def create_codefresh_deployment_scripts(root_paths, envs=(), include=(), exclude for root_path in root_paths: for e in envs: - + template_name = f"codefresh-template-{e}.yaml" template_path = join( root_path, DEPLOYMENT_CONFIGURATION_PATH, template_name) @@ -245,7 +245,7 @@ def codefresh_steps_from_base_path(base_path, build_step, fixed_context=None, in clean_path(dockerfile_relative_to_root), app_name), environment=e2e_test_environment(app_config) ) - + def add_unit_test_step(app_config: ApplicationHarnessConfig): # Create a run step for each application with tests/unit.yaml file using the corresponding image built at the previous step @@ -280,7 +280,7 @@ def add_unit_test_step(app_config: ApplicationHarnessConfig): codefresh_steps_from_base_path(join( root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False) steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name) - + if not codefresh: logging.warning( "No template file found. Codefresh script not created.") @@ -420,7 +420,7 @@ def codefresh_app_build_spec(app_name, app_context_path, dockerfile_path="Docker title=title, working_directory='./' + app_context_path, dockerfile=dockerfile_path) - + tag = app_specific_tag_variable(app_name) build["tag"] = "${{%s}}" % tag @@ -450,7 +450,7 @@ def add_arg_dependencies(dependencies): helm_values.apps[values_key].harness.dependencies.build) except (KeyError, AttributeError): add_arg_dependencies(helm_values['task-images']) - + when_condition = existing_build_when_condition(tag) build["when"] = when_condition return build @@ -471,5 +471,5 @@ def existing_build_when_condition(tag): } } } - + return when_condition diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py new file mode 100644 index 00000000..9a445456 --- /dev/null +++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py @@ -0,0 +1,654 @@ +""" +Utilities to create a helm chart from a CloudHarness directory structure +""" +import yaml +from ruamel.yaml import YAML +import os +import shutil +import logging +from hashlib import sha1 +import subprocess +from functools import cache +import tarfile +from docker import from_env as DockerClient +from pathlib import Path +import copy + + +from . import HERE, CH_ROOT +from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ + DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE +from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \ + get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \ + find_dockerfiles_paths, find_file_paths + +from .models import HarnessMainConfig + + +KEY_HARNESS = 'harness' +KEY_SERVICE = 'service' +KEY_DATABASE = 'database' +KEY_DEPLOYMENT = 'deployment' +KEY_APPS = 'apps' +KEY_TASK_IMAGES = 'task-images' +# KEY_TASK_IMAGES_BUILD = f"{KEY_TASK_IMAGES}-build" +KEY_TEST_IMAGES = 'test-images' + +DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') + + +class ConfigurationGenerator(object): + def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, + output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, + namespace=None, templates_path=HELM_PATH): + assert domain, 'A domain must be specified' + self.root_paths = [Path(r) for r in root_paths] + self.tag = tag + if registry and not registry.endswith('/'): + self.registry = f'{registry}/' + else: + self.registry = registry + self.local = local + self.domain = domain + self.exclude = exclude + self.secured = secured + self.output_path = Path(output_path) + self.include = include + self.registry_secret = registry_secret + self.tls = tls + self.env = env + self.namespace = namespace + + self.templates_path = templates_path + self.dest_deployment_path = self.output_path / templates_path + self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml' + self.__init_deployment() + + self.static_images = set() + self.base_images = {} + self.all_images = {} + + def __init_deployment(self): + """ + Create the base helm chart + """ + if self.dest_deployment_path.exists(): + shutil.rmtree(self.dest_deployment_path) + # Initialize with default + copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path) + + # Override for every cloudharness scaffolding + for root_path in self.root_paths: + copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, + base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path) + collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, + dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path) + + def _adjust_missing_values(self, helm_values): + if 'name' not in helm_values: + with open(self.helm_chart_path) as f: + chart_idx_content = yaml.safe_load(f) + helm_values['name'] = chart_idx_content['name'].lower() + + def _process_applications(self, helm_values, base_image_name): + for root_path in self.root_paths: + app_values = init_app_values( + root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) + helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + app_values) + + app_base_path = root_path / APPS_PATH + app_values = self.collect_app_values( + app_base_path, base_image_name=base_image_name) + helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + app_values) + + def collect_app_values(self, app_base_path, base_image_name=None): + values = {} + + for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories + app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}") + + if app_name in self.exclude: + continue + app_key = app_name.replace('-', '_') + + app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) + + # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None) + # # for dockerfile_path in app_path.rglob('**/Dockerfile'): + # # parent_name = dockerfile_path.parent.name.replace("-", "_") + # # if parent_name == app_key: + # # app_values['build'] = { + # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", + # # 'dockerfile': "Dockerfile", + # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # } + # # elif "tasks/" in f"{dockerfile_path}": + # # parent_name = parent_name.upper() + # # values.setdefault("task-images-build", {})[parent_name] = { + # # 'dockerfile': "Dockerfile", + # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # } + # # import ipdb; ipdb.set_trace() # fmt: skip + + # if dockerfile_path: + # app_values['build'] = { + # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", + # 'dockerfile': "Dockerfile", + # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # } + + values[app_key] = dict_merge( + values[app_key], app_values) if app_key in values else app_values + + return values + + def _init_static_images(self, base_image_name): + for static_img_dockerfile in self.static_images: + img_name = image_name_from_dockerfile_path(os.path.basename( + static_img_dockerfile), base_name=base_image_name) + self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( + img_name, build_context_path=static_img_dockerfile) + + def _assign_static_build_dependencies(self, helm_values): + for static_img_dockerfile in self.static_images: + key = os.path.basename(static_img_dockerfile) + if key in helm_values[KEY_TASK_IMAGES]: + dependencies = guess_build_dependencies_from_dockerfile( + f"{static_img_dockerfile}") + for dep in dependencies: + if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: + helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] + # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = { + # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent), + # 'dockerfile': 'Dockerfile', + # } + + for image_name in list(helm_values[KEY_TASK_IMAGES].keys()): + if image_name in self.exclude: + del helm_values[KEY_TASK_IMAGES][image_name] + # del helm_values[KEY_TASK_IMAGES_BUILD][image_name] + + def _init_base_images(self, base_image_name): + + for root_path in self.root_paths: + for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): + img_name = image_name_from_dockerfile_path( + os.path.basename(base_img_dockerfile), base_name=base_image_name) + self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + img_name, build_context_path=root_path) + + self.static_images.update(find_dockerfiles_paths( + os.path.join(root_path, STATIC_IMAGES_PATH))) + return self.base_images + + def _init_test_images(self, base_image_name): + test_images = {} + for root_path in self.root_paths: + for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): + img_name = image_name_from_dockerfile_path( + os.path.basename(base_img_dockerfile), base_name=base_image_name) + test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + img_name, build_context_path=base_img_dockerfile) + + return test_images + + def __find_static_dockerfile_paths(self, root_path): + return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) + + def _merge_base_helm_values(self, helm_values): + # Override for every cloudharness scaffolding + for root_path in self.root_paths: + helm_values = dict_merge( + helm_values, + collect_helm_values(root_path, env=self.env) + ) + + return helm_values + + def _get_default_helm_values(self): + ch_root_path = Path(CH_ROOT) + values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml' + helm_values = get_template(values_yaml_path) + helm_values = dict_merge(helm_values, + collect_helm_values(ch_root_path, env=self.env)) + + return helm_values + + def create_tls_certificate(self, helm_values): + if not self.tls: + helm_values['tls'] = None + return + if not self.local: + return + helm_values['tls'] = self.domain.replace(".", "-") + "-tls" + + bootstrap_file = 'bootstrap.sh' + certs_parent_folder_path = self.output_path / 'helm' / 'resources' + certs_folder_path = certs_parent_folder_path / 'certs' + + # if os.path.exists(os.path.join(certs_folder_path)): + if certs_folder_path.exists(): + # don't overwrite the certificate if it exists + return + + try: + client = DockerClient() + client.ping() + except: + raise ConnectionRefusedError( + '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') + + # Create CA and sign cert for domain + container = client.containers.run(image='frapsoft/openssl', + command=f'sleep 60', + entrypoint="", + detach=True, + environment=[ + f"DOMAIN={self.domain}"], + ) + + container.exec_run('mkdir -p /mnt/vol1') + container.exec_run('mkdir -p /mnt/certs') + + # copy bootstrap file + cur_dir = os.getcwd() + os.chdir(Path(HERE) / 'scripts') + tar = tarfile.open(bootstrap_file + '.tar', mode='w') + try: + tar.add(bootstrap_file) + finally: + tar.close() + data = open(bootstrap_file + '.tar', 'rb').read() + container.put_archive('/mnt/vol1', data) + os.chdir(cur_dir) + container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') + + # exec bootstrap file + container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') + + # retrieve the certs from the container + bits, stat = container.get_archive('/mnt/certs') + if not certs_folder_path.exists(): + certs_folder_path.mkdir(parents=True) + certs_tar = certs_parent_folder_path / 'certs.tar' + with open(certs_tar, 'wb') as f: + for chunk in bits: + f.write(chunk) + cf = tarfile.open(certs_tar) + cf.extractall(path=certs_parent_folder_path) + + logs = container.logs() + logging.info(f'openssl container logs: {logs}') + + # stop the container + container.kill() + + logging.info("Created certificates for local deployment") + + def _clear_unused_db_configuration(self, harness_config): + database_config = harness_config[KEY_DATABASE] + database_type = database_config.get('type', None) + if database_type is None: + del harness_config[KEY_DATABASE] + return + db_specific_keys = [k for k, v in database_config.items() + if isinstance(v, dict) and 'image' in v and 'ports' in v] + for db in db_specific_keys: + if database_type != db: + del database_config[db] + + def image_tag(self, image_name, build_context_path=None, dependencies=()): + tag = self.tag + if tag is None and not self.local: + logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") + ignore_path = os.path.join(build_context_path, '.dockerignore') + ignore = set(DEFAULT_IGNORE) + if os.path.exists(ignore_path): + with open(ignore_path) as f: + ignore = ignore.union({line.strip() for line in f}) + logging.info(f"Ignoring {ignore}") + tag = generate_tag_from_content(build_context_path, ignore) + logging.info(f"Content hash: {tag}") + dependencies = dependencies or guess_build_dependencies_from_dockerfile(f"{build_context_path}") + tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() + logging.info(f"Generated tag: {tag}") + app_name = image_name.split("/")[-1] # the image name can have a prefix + self.all_images[app_name] = tag + return self.registry + image_name + (f':{tag}' if tag else '') + + +def get_included_with_dependencies(values, include): + app_values = values['apps'].values() + directly_included = [app for app in app_values if any( + inc == app[KEY_HARNESS]['name'] for inc in include)] + + dependent = set(include) + for app in directly_included: + if app['harness']['dependencies'].get('hard', None): + dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) + if app['harness']['dependencies'].get('soft', None): + dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) + if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: + dependent.add('accounts') + if len(dependent) == len(include): + return dependent + return get_included_with_dependencies(values, dependent) + + +def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): + pass + + +def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): + if not base_helm_chart.exists(): + return + if dest_helm_chart_path.exists(): + logging.info("Merging/overriding all files in directory %s", + dest_helm_chart_path) + merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}") + else: + logging.info("Copying base deployment chart from %s to %s", + base_helm_chart, dest_helm_chart_path) + shutil.copytree(base_helm_chart, dest_helm_chart_path) + + +def collect_helm_values(deployment_root, env=()): + """ + Creates helm values from a cloudharness deployment scaffolding + """ + values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml' + + values = get_template(values_template_path) + + for e in env: + specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, + f'values-template-{e}.yaml') + if os.path.exists(specific_template_path): + logging.info( + "Specific environment values template found: " + specific_template_path) + with open(specific_template_path) as f: + values_env_specific = yaml.safe_load(f) + values = dict_merge(values, values_env_specific) + return values + + +def init_app_values(deployment_root, exclude, values=None): + values = values if values is not None else {} + app_base_path = os.path.join(deployment_root, APPS_PATH) + overridden_template_path = os.path.join( + deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + default_values_path = os.path.join( + CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + + for app_path in get_sub_paths(app_base_path): + + app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) + + if app_name in exclude: + continue + app_key = app_name.replace('-', '_') + if app_key not in values: + default_values = get_template(default_values_path) + values[app_key] = default_values + overridden_defaults = get_template(overridden_template_path) + values[app_key] = dict_merge(values[app_key], overridden_defaults) + + return values + + +def values_from_legacy(values): + if KEY_HARNESS not in values: + values[KEY_HARNESS] = {} + harness = values[KEY_HARNESS] + if KEY_SERVICE not in harness: + harness[KEY_SERVICE] = {} + if KEY_DEPLOYMENT not in harness: + harness[KEY_DEPLOYMENT] = {} + if KEY_DATABASE not in harness: + harness[KEY_DATABASE] = {} + + if 'subdomain' in values: + harness['subdomain'] = values['subdomain'] + if 'autodeploy' in values: + harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] + if 'autoservice' in values: + harness[KEY_SERVICE]['auto'] = values['autoservice'] + if 'secureme' in values: + harness['secured'] = values['secureme'] + if 'resources' in values: + harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) + if 'replicas' in values: + harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] + if 'image' in values: + harness[KEY_DEPLOYMENT]['image'] = values['image'] + if 'port' in values: + harness[KEY_DEPLOYMENT]['port'] = values['port'] + harness[KEY_SERVICE]['port'] = values['port'] + + +def values_set_legacy(values): + harness = values[KEY_HARNESS] + if 'image' in harness[KEY_DEPLOYMENT]: + values['image'] = harness[KEY_DEPLOYMENT]['image'] + + values['name'] = harness['name'] + if harness[KEY_DEPLOYMENT].get('port', None): + values['port'] = harness[KEY_DEPLOYMENT]['port'] + if 'resources' in harness[KEY_DEPLOYMENT]: + values['resources'] = harness[KEY_DEPLOYMENT]['resources'] + + +def generate_tag_from_content(content_path, ignore=()): + from dirhash import dirhash + return dirhash(content_path, 'sha1', ignore=ignore) + + +def extract_env_variables_from_values(values, envs=tuple(), prefix=''): + if isinstance(values, dict): + newenvs = list(envs) + for key, value in values.items(): + v = extract_env_variables_from_values( + value, envs, f"{prefix}_{key}".replace('-', '_').upper()) + if key in ('name', 'port', 'subdomain'): + newenvs.extend(v) + return newenvs + else: + return [env_variable(prefix, values)] + + +def create_env_variables(values): + for app_name, value in values[KEY_APPS].items(): + if KEY_HARNESS in value: + values['env'].extend(extract_env_variables_from_values( + value[KEY_HARNESS], prefix='CH_' + app_name)) + values['env'].append(env_variable('CH_DOMAIN', values['domain'])) + values['env'].append(env_variable( + 'CH_IMAGE_REGISTRY', values['registry']['name'])) + values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) + + +def hosts_info(values): + domain = values['domain'] + namespace = values['namespace'] + subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if + KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if + KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] + try: + ip = get_cluster_ip() + except: + logging.warning('Cannot get cluster ip') + return + logging.info( + "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") + + deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] + for app in values[KEY_APPS].values() if KEY_HARNESS in app) + + logging.info( + "\nTo run locally some apps, also those references may be needed") + for appname in values[KEY_APPS]: + app = values[KEY_APPS][appname]['harness'] + if 'deployment' not in app: + continue + print( + "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( + app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) + + print( + f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") + + +class ValuesValidationException(Exception): + pass + + +def validate_helm_values(values): + validate_dependencies(values) + + +def validate_dependencies(values): + all_apps = {a for a in values["apps"]} + for app in all_apps: + app_values = values["apps"][app] + if 'dependencies' in app_values[KEY_HARNESS]: + soft_dependencies = { + d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} + not_found = {d for d in soft_dependencies if d not in all_apps} + if not_found: + logging.warning( + f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") + hard_dependencies = { + d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} + not_found = {d for d in hard_dependencies if d not in all_apps} + if not_found: + raise ValuesValidationException( + f"Bad application dependencies specified for application {app}: {','.join(not_found)}") + + build_dependencies = { + d for d in app_values[KEY_HARNESS]['dependencies']['build']} + + not_found = { + d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} + not_found = {d for d in not_found if d not in all_apps} + if not_found: + raise ValuesValidationException( + f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") + + if 'use_services' in app_values[KEY_HARNESS]: + service_dependencies = {d['name'].replace( + "-", "_") for d in app_values[KEY_HARNESS]['use_services']} + + not_found = {d for d in service_dependencies if d not in all_apps} + if not_found: + raise ValuesValidationException( + f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") + + +def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path=HELM_PATH, exclude=(), include=None): + """ + Searches recursively for helm templates inside the applications and collects the templates in the destination + + :param search_root: + :param dest_helm_chart_path: collected helm templates destination folder + :param exclude: + :return: + """ + app_base_path = search_root / APPS_PATH + + for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories + app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path)) + if app_name in exclude or (include and not any(inc in app_name for inc in include)): + continue + if templates_path == HELM_PATH: + template_dir = app_path / 'deploy' / 'templates' + else: + template_dir = app_path / 'deploy' / f'templates-{templates_path}' + if template_dir.exists(): + dest_dir = dest_helm_chart_path / 'templates' / app_name + + logging.info( + "Collecting templates for application %s to %s", app_name, dest_dir) + if dest_dir.exists(): + logging.warning( + "Merging/overriding all files in directory %s", dest_dir) + merge_configuration_directories(f"{template_dir}", f"{dest_dir}") + else: + shutil.copytree(template_dir, dest_dir) + resources_dir = app_path / 'deploy' / 'resources' + if resources_dir.exists(): + dest_dir = dest_helm_chart_path / 'resources' / app_name + + logging.info( + "Collecting resources for application %s to %s", app_name, dest_dir) + + merge_configuration_directories(f"{resources_dir}", f"{dest_dir}") + + if templates_path == HELM_PATH: + subchart_dir = app_path / 'deploy/charts' + if subchart_dir.exists(): + dest_dir = dest_helm_chart_path / 'charts' / app_name + + logging.info( + "Collecting templates for application %s to %s", app_name, dest_dir) + if dest_dir.exists(): + logging.warning( + "Merging/overriding all files in directory %s", dest_dir) + merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") + else: + shutil.copytree(subchart_dir, dest_dir) + + +# def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path=None, exclude=(), include=None): +# """ +# Searches recursively for helm templates inside the applications and collects the templates in the destination + +# :param search_root: +# :param dest_helm_chart_path: collected helm templates destination folder +# :param exclude: +# :return: +# """ +# app_base_path = os.path.join(search_root, APPS_PATH) + +# import ipdb; ipdb.set_trace() # fmt: skip + +# for app_path in get_sub_paths(app_base_path): +# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) +# if app_name in exclude or (include and not any(inc in app_name for inc in include)): +# continue +# template_dir = os.path.join(app_path, 'deploy', 'templates') +# if os.path.exists(template_dir): +# dest_dir = os.path.join( +# dest_helm_chart_path, 'templates', app_name) + +# logging.info( +# "Collecting templates for application %s to %s", app_name, dest_dir) +# if os.path.exists(dest_dir): +# logging.warning( +# "Merging/overriding all files in directory %s", dest_dir) +# merge_configuration_directories(template_dir, dest_dir) +# else: +# shutil.copytree(template_dir, dest_dir) +# resources_dir = os.path.join(app_path, 'deploy/resources') +# if os.path.exists(resources_dir): +# dest_dir = os.path.join( +# dest_helm_chart_path, 'resources', app_name) + +# logging.info( +# "Collecting resources for application %s to %s", app_name, dest_dir) + +# merge_configuration_directories(resources_dir, dest_dir) + +# subchart_dir = os.path.join(app_path, 'deploy/charts') +# if os.path.exists(subchart_dir): +# dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name) + +# logging.info( +# "Collecting templates for application %s to %s", app_name, dest_dir) +# if os.path.exists(dest_dir): +# logging.warning( +# "Merging/overriding all files in directory %s", dest_dir) +# merge_configuration_directories(subchart_dir, dest_dir) +# else: +# shutil.copytree(subchart_dir, dest_dir) \ No newline at end of file diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index bafe5a00..0e75ed7e 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -24,89 +24,81 @@ from .models import HarnessMainConfig -KEY_HARNESS = 'harness' -KEY_SERVICE = 'service' -KEY_DATABASE = 'database' -KEY_DEPLOYMENT = 'deployment' -KEY_APPS = 'apps' -KEY_TASK_IMAGES = 'task-images' -# KEY_TASK_IMAGES_BUILD = f"{KEY_TASK_IMAGES}-build" -KEY_TEST_IMAGES = 'test-images' +from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates -DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None) -> HarnessMainConfig: if (type(env)) == str: env = [env] - return CloudHarnessHelm(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured, + return CloudHarnessDockerCompose(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured, output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env, namespace=namespace, templates_path=COMPOSE).process_values() -class CloudHarnessHelm: - def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, - output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, - namespace=None, templates_path=HELM_PATH): - assert domain, 'A domain must be specified' - self.root_paths = [Path(r) for r in root_paths] - self.tag = tag - if registry and not registry.endswith('/'): - self.registry = f'{registry}/' - else: - self.registry = registry - self.local = local - self.domain = domain - self.exclude = exclude - self.secured = secured - self.output_path = Path(output_path) - self.include = include - self.registry_secret = registry_secret - self.tls = tls - self.env = env - self.namespace = namespace - - self.templates_path = templates_path - self.dest_deployment_path = self.output_path / templates_path - self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml' - self.__init_deployment() - - self.static_images = set() - self.base_images = {} - self.all_images = {} - - def __init_deployment(self): - """ - Create the base helm chart - """ - if self.dest_deployment_path.exists(): - shutil.rmtree(self.dest_deployment_path) - # Initialize with default - copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path) - - # Override for every cloudharness scaffolding - for root_path in self.root_paths: - copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, - base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path) - collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, - dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path) - - def __adjust_missing_values(self, helm_values): - if 'name' not in helm_values: - with open(self.helm_chart_path) as f: - chart_idx_content = yaml.safe_load(f) - helm_values['name'] = chart_idx_content['name'].lower() +class CloudHarnessDockerCompose(ConfigurationGenerator): + # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, + # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, + # namespace=None, templates_path=HELM_PATH): + # assert domain, 'A domain must be specified' + # self.root_paths = [Path(r) for r in root_paths] + # self.tag = tag + # if registry and not registry.endswith('/'): + # self.registry = f'{registry}/' + # else: + # self.registry = registry + # self.local = local + # self.domain = domain + # self.exclude = exclude + # self.secured = secured + # self.output_path = Path(output_path) + # self.include = include + # self.registry_secret = registry_secret + # self.tls = tls + # self.env = env + # self.namespace = namespace + + # self.templates_path = templates_path + # self.dest_deployment_path = self.output_path / templates_path + # self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml' + # self.__init_deployment() + + # self.static_images = set() + # self.base_images = {} + # self.all_images = {} + + # def __init_deployment(self): + # """ + # Create the base helm chart + # """ + # if self.dest_deployment_path.exists(): + # shutil.rmtree(self.dest_deployment_path) + # # Initialize with default + # copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path) + + # # Override for every cloudharness scaffolding + # for root_path in self.root_paths: + # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, + # base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path) + # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, + # dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path) + + # def __adjust_missing_values(self, helm_values): + # if 'name' not in helm_values: + # with open(self.helm_chart_path) as f: + # chart_idx_content = yaml.safe_load(f) + # helm_values['name'] = chart_idx_content['name'].lower() def process_values(self) -> HarnessMainConfig: """ Creates values file for the helm chart """ - helm_values = self.__get_default_helm_values() + helm_values = self._get_default_helm_values() - self.__adjust_missing_values(helm_values) + self._adjust_missing_values(helm_values) - helm_values = self.__merge_base_helm_values(helm_values) + helm_values = self._merge_base_helm_values(helm_values) helm_values[KEY_APPS] = {} @@ -114,18 +106,18 @@ def process_values(self) -> HarnessMainConfig: helm_values[KEY_TASK_IMAGES] = {} - self.__init_base_images(base_image_name) - self.__init_static_images(base_image_name) - helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name) + self._init_base_images(base_image_name) + self._init_static_images(base_image_name) + helm_values[KEY_TEST_IMAGES] = self._init_test_images(base_image_name) - self.__process_applications(helm_values, base_image_name) + self._process_applications(helm_values, base_image_name) # self.create_tls_certificate(helm_values) values, include = self.__finish_helm_values(values=helm_values) # Adjust dependencies from static (common) images - self.__assign_static_build_dependencies(helm_values) + self._assign_static_build_dependencies(helm_values) for root_path in self.root_paths: collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, @@ -184,132 +176,132 @@ def __post_process_multiple_document_docker_compose(self, yaml_document): main_document = document # we need to save the main document later yaml_handler.dump(main_document, yaml_document) - def __process_applications(self, helm_values, base_image_name): - for root_path in self.root_paths: - app_values = init_app_values( - root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) - helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - app_values) - - app_base_path = root_path / APPS_PATH - app_values = self.collect_app_values( - app_base_path, base_image_name=base_image_name) - helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - app_values) - - def collect_app_values(self, app_base_path, base_image_name=None): - values = {} - - for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories - app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}") - - if app_name in self.exclude: - continue - app_key = app_name.replace('-', '_') - - app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) - - # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None) - # # for dockerfile_path in app_path.rglob('**/Dockerfile'): - # # parent_name = dockerfile_path.parent.name.replace("-", "_") - # # if parent_name == app_key: - # # app_values['build'] = { - # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", - # # 'dockerfile': "Dockerfile", - # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), - # # } - # # elif "tasks/" in f"{dockerfile_path}": - # # parent_name = parent_name.upper() - # # values.setdefault("task-images-build", {})[parent_name] = { - # # 'dockerfile': "Dockerfile", - # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), - # # } - # # import ipdb; ipdb.set_trace() # fmt: skip - - # if dockerfile_path: - # app_values['build'] = { - # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", - # 'dockerfile': "Dockerfile", - # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), - # } - - values[app_key] = dict_merge( - values[app_key], app_values) if app_key in values else app_values - - return values - - def __init_static_images(self, base_image_name): - for static_img_dockerfile in self.static_images: - img_name = image_name_from_dockerfile_path(os.path.basename( - static_img_dockerfile), base_name=base_image_name) - self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( - img_name, build_context_path=static_img_dockerfile) - - def __assign_static_build_dependencies(self, helm_values): - for static_img_dockerfile in self.static_images: - key = os.path.basename(static_img_dockerfile) - if key in helm_values[KEY_TASK_IMAGES]: - dependencies = guess_build_dependencies_from_dockerfile( - static_img_dockerfile) - for dep in dependencies: - if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: - helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] - # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = { - # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent), - # 'dockerfile': 'Dockerfile', - # } - - for image_name in helm_values[KEY_TASK_IMAGES].keys(): - if image_name in self.exclude: - del helm_values[KEY_TASK_IMAGES][image_name] - # del helm_values[KEY_TASK_IMAGES_BUILD][image_name] - - def __init_base_images(self, base_image_name): - - for root_path in self.root_paths: - for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): - img_name = image_name_from_dockerfile_path( - os.path.basename(base_img_dockerfile), base_name=base_image_name) - self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - img_name, build_context_path=root_path) - - self.static_images.update(find_dockerfiles_paths( - os.path.join(root_path, STATIC_IMAGES_PATH))) - return self.base_images - - def __init_test_images(self, base_image_name): - test_images = {} - for root_path in self.root_paths: - for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): - img_name = image_name_from_dockerfile_path( - os.path.basename(base_img_dockerfile), base_name=base_image_name) - test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - img_name, build_context_path=base_img_dockerfile) - - return test_images - - - def __find_static_dockerfile_paths(self, root_path): - return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) - - def __merge_base_helm_values(self, helm_values): - # Override for every cloudharness scaffolding - for root_path in self.root_paths: - helm_values = dict_merge( - helm_values, - collect_helm_values(root_path, env=self.env) - ) - - return helm_values - - def __get_default_helm_values(self): - ch_root_path = Path(CH_ROOT) - values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml' - helm_values = get_template(values_yaml_path) - helm_values = dict_merge(helm_values, - collect_helm_values(ch_root_path, env=self.env)) - - return helm_values + # def __process_applications(self, helm_values, base_image_name): + # for root_path in self.root_paths: + # app_values = init_app_values( + # root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) + # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + # app_values) + + # app_base_path = root_path / APPS_PATH + # app_values = self.collect_app_values( + # app_base_path, base_image_name=base_image_name) + # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + # app_values) + + # def collect_app_values(self, app_base_path, base_image_name=None): + # values = {} + + # for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories + # app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}") + + # if app_name in self.exclude: + # continue + # app_key = app_name.replace('-', '_') + + # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) + + # # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None) + # # # for dockerfile_path in app_path.rglob('**/Dockerfile'): + # # # parent_name = dockerfile_path.parent.name.replace("-", "_") + # # # if parent_name == app_key: + # # # app_values['build'] = { + # # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", + # # # 'dockerfile': "Dockerfile", + # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # # } + # # # elif "tasks/" in f"{dockerfile_path}": + # # # parent_name = parent_name.upper() + # # # values.setdefault("task-images-build", {})[parent_name] = { + # # # 'dockerfile': "Dockerfile", + # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # # } + # # # import ipdb; ipdb.set_trace() # fmt: skip + + # # if dockerfile_path: + # # app_values['build'] = { + # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", + # # 'dockerfile': "Dockerfile", + # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), + # # } + + # values[app_key] = dict_merge( + # values[app_key], app_values) if app_key in values else app_values + + # return values + + # def __init_static_images(self, base_image_name): + # for static_img_dockerfile in self.static_images: + # img_name = image_name_from_dockerfile_path(os.path.basename( + # static_img_dockerfile), base_name=base_image_name) + # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( + # img_name, build_context_path=static_img_dockerfile) + + # def __assign_static_build_dependencies(self, helm_values): + # for static_img_dockerfile in self.static_images: + # key = os.path.basename(static_img_dockerfile) + # if key in helm_values[KEY_TASK_IMAGES]: + # dependencies = guess_build_dependencies_from_dockerfile( + # static_img_dockerfile) + # for dep in dependencies: + # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: + # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] + # # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = { + # # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent), + # # 'dockerfile': 'Dockerfile', + # # } + + # for image_name in helm_values[KEY_TASK_IMAGES].keys(): + # if image_name in self.exclude: + # del helm_values[KEY_TASK_IMAGES][image_name] + # # del helm_values[KEY_TASK_IMAGES_BUILD][image_name] + + # def __init_base_images(self, base_image_name): + + # for root_path in self.root_paths: + # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): + # img_name = image_name_from_dockerfile_path( + # os.path.basename(base_img_dockerfile), base_name=base_image_name) + # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + # img_name, build_context_path=root_path) + + # self.static_images.update(find_dockerfiles_paths( + # os.path.join(root_path, STATIC_IMAGES_PATH))) + # return self.base_images + + # def __init_test_images(self, base_image_name): + # test_images = {} + # for root_path in self.root_paths: + # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): + # img_name = image_name_from_dockerfile_path( + # os.path.basename(base_img_dockerfile), base_name=base_image_name) + # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + # img_name, build_context_path=base_img_dockerfile) + + # return test_images + + + # def __find_static_dockerfile_paths(self, root_path): + # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) + + # def __merge_base_helm_values(self, helm_values): + # # Override for every cloudharness scaffolding + # for root_path in self.root_paths: + # helm_values = dict_merge( + # helm_values, + # collect_helm_values(root_path, env=self.env) + # ) + + # return helm_values + + # def __get_default_helm_values(self): + # ch_root_path = Path(CH_ROOT) + # values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml' + # helm_values = get_template(values_yaml_path) + # helm_values = dict_merge(helm_values, + # collect_helm_values(ch_root_path, env=self.env)) + + # return helm_values def __get_default_helm_values_with_secrets(self, helm_values): helm_values = copy.deepcopy(helm_values) @@ -323,76 +315,76 @@ def __get_default_helm_values_with_secrets(self, helm_values): helm_values['apps'][key]['harness']['secrets'] = {} return helm_values - def create_tls_certificate(self, helm_values): - if not self.tls: - helm_values['tls'] = None - return - if not self.local: - return - helm_values['tls'] = self.domain.replace(".", "-") + "-tls" - - bootstrap_file = 'bootstrap.sh' - certs_parent_folder_path = self.output_path / 'helm' / 'resources' - certs_folder_path = certs_parent_folder_path / 'certs' - - # if os.path.exists(os.path.join(certs_folder_path)): - if certs_folder_path.exists(): - # don't overwrite the certificate if it exists - return - - try: - client = DockerClient() - client.ping() - except: - raise ConnectionRefusedError( - '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') - - # Create CA and sign cert for domain - container = client.containers.run(image='frapsoft/openssl', - command=f'sleep 60', - entrypoint="", - detach=True, - environment=[ - f"DOMAIN={self.domain}"], - ) - - container.exec_run('mkdir -p /mnt/vol1') - container.exec_run('mkdir -p /mnt/certs') - - # copy bootstrap file - cur_dir = os.getcwd() - os.chdir(Path(HERE) / 'scripts') - tar = tarfile.open(bootstrap_file + '.tar', mode='w') - try: - tar.add(bootstrap_file) - finally: - tar.close() - data = open(bootstrap_file + '.tar', 'rb').read() - container.put_archive('/mnt/vol1', data) - os.chdir(cur_dir) - container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') - - # exec bootstrap file - container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') - - # retrieve the certs from the container - bits, stat = container.get_archive('/mnt/certs') - if not certs_folder_path.exists(): - certs_folder_path.mkdir(parents=True) - certs_tar = certs_parent_folder_path / 'certs.tar' - with open(certs_tar, 'wb') as f: - for chunk in bits: - f.write(chunk) - cf = tarfile.open(certs_tar) - cf.extractall(path=certs_parent_folder_path) - - logs = container.logs() - logging.info(f'openssl container logs: {logs}') - - # stop the container - container.kill() - - logging.info("Created certificates for local deployment") + # def create_tls_certificate(self, helm_values): + # if not self.tls: + # helm_values['tls'] = None + # return + # if not self.local: + # return + # helm_values['tls'] = self.domain.replace(".", "-") + "-tls" + + # bootstrap_file = 'bootstrap.sh' + # certs_parent_folder_path = self.output_path / 'helm' / 'resources' + # certs_folder_path = certs_parent_folder_path / 'certs' + + # # if os.path.exists(os.path.join(certs_folder_path)): + # if certs_folder_path.exists(): + # # don't overwrite the certificate if it exists + # return + + # try: + # client = DockerClient() + # client.ping() + # except: + # raise ConnectionRefusedError( + # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') + + # # Create CA and sign cert for domain + # container = client.containers.run(image='frapsoft/openssl', + # command=f'sleep 60', + # entrypoint="", + # detach=True, + # environment=[ + # f"DOMAIN={self.domain}"], + # ) + + # container.exec_run('mkdir -p /mnt/vol1') + # container.exec_run('mkdir -p /mnt/certs') + + # # copy bootstrap file + # cur_dir = os.getcwd() + # os.chdir(Path(HERE) / 'scripts') + # tar = tarfile.open(bootstrap_file + '.tar', mode='w') + # try: + # tar.add(bootstrap_file) + # finally: + # tar.close() + # data = open(bootstrap_file + '.tar', 'rb').read() + # container.put_archive('/mnt/vol1', data) + # os.chdir(cur_dir) + # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') + + # # exec bootstrap file + # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') + + # # retrieve the certs from the container + # bits, stat = container.get_archive('/mnt/certs') + # if not certs_folder_path.exists(): + # certs_folder_path.mkdir(parents=True) + # certs_tar = certs_parent_folder_path / 'certs.tar' + # with open(certs_tar, 'wb') as f: + # for chunk in bits: + # f.write(chunk) + # cf = tarfile.open(certs_tar) + # cf.extractall(path=certs_parent_folder_path) + + # logs = container.logs() + # logging.info(f'openssl container logs: {logs}') + + # # stop the container + # container.kill() + + # logging.info("Created certificates for local deployment") def __finish_helm_values(self, values): """ @@ -444,7 +436,7 @@ def __finish_helm_values(self, values): if harness[KEY_DATABASE] and not harness[KEY_DATABASE].get('name', None): harness[KEY_DATABASE]['name'] = app_name.strip() + '-db' - self.__clear_unused_db_configuration(harness) + self._clear_unused_db_configuration(harness) values_set_legacy(v) if self.include: @@ -464,36 +456,36 @@ def __finish_helm_values(self, values): create_env_variables(values) return values, self.include - def __clear_unused_db_configuration(self, harness_config): - database_config = harness_config[KEY_DATABASE] - database_type = database_config.get('type', None) - if database_type is None: - del harness_config[KEY_DATABASE] - return - db_specific_keys = [k for k, v in database_config.items() - if isinstance(v, dict) and 'image' in v and 'ports' in v] - for db in db_specific_keys: - if database_type != db: - del database_config[db] - - def image_tag(self, image_name, build_context_path=None, dependencies=()): - tag = self.tag - if tag is None and not self.local: - logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") - ignore_path = os.path.join(build_context_path, '.dockerignore') - ignore = set(DEFAULT_IGNORE) - if os.path.exists(ignore_path): - with open(ignore_path) as f: - ignore = ignore.union({line.strip() for line in f}) - logging.info(f"Ignoring {ignore}") - tag = generate_tag_from_content(build_context_path, ignore) - logging.info(f"Content hash: {tag}") - dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) - tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() - logging.info(f"Generated tag: {tag}") - app_name = image_name.split("/")[-1] # the image name can have a prefix - self.all_images[app_name] = tag - return self.registry + image_name + (f':{tag}' if tag else '') + # def __clear_unused_db_configuration(self, harness_config): + # database_config = harness_config[KEY_DATABASE] + # database_type = database_config.get('type', None) + # if database_type is None: + # del harness_config[KEY_DATABASE] + # return + # db_specific_keys = [k for k, v in database_config.items() + # if isinstance(v, dict) and 'image' in v and 'ports' in v] + # for db in db_specific_keys: + # if database_type != db: + # del database_config[db] + + # def image_tag(self, image_name, build_context_path=None, dependencies=()): + # tag = self.tag + # if tag is None and not self.local: + # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") + # ignore_path = os.path.join(build_context_path, '.dockerignore') + # ignore = set(DEFAULT_IGNORE) + # if os.path.exists(ignore_path): + # with open(ignore_path) as f: + # ignore = ignore.union({line.strip() for line in f}) + # logging.info(f"Ignoring {ignore}") + # tag = generate_tag_from_content(build_context_path, ignore) + # logging.info(f"Content hash: {tag}") + # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) + # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() + # logging.info(f"Generated tag: {tag}") + # app_name = image_name.split("/")[-1] # the image name can have a prefix + # self.all_images[app_name] = tag + # return self.registry + image_name + (f':{tag}' if tag else '') def create_app_values_spec(self, app_name, app_path, base_image_name=None): logging.info('Generating values script for ' + app_name) @@ -559,8 +551,6 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): task_path, app_path.parent)) img_name = image_name_from_dockerfile_path(task_name, base_image_name) - # import ipdb; ipdb.set_trace() # fmt: skip - # values[KEY_TASK_IMAGES][task_name] = self.image_tag( # img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) # values.setdefault(KEY_TASK_IMAGES_BUILD, {})[task_name] = { @@ -591,281 +581,281 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path): helm_values[KEY_HARNESS]['deployment']['args'] = f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py' -def get_included_with_dependencies(values, include): - app_values = values['apps'].values() - directly_included = [app for app in app_values if any( - inc == app[KEY_HARNESS]['name'] for inc in include)] - - dependent = set(include) - for app in directly_included: - if app['harness']['dependencies'].get('hard', None): - dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) - if app['harness']['dependencies'].get('soft', None): - dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) - if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: - dependent.add('accounts') - if len(dependent) == len(include): - return dependent - return get_included_with_dependencies(values, dependent) - - -def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): - pass - - -def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None): - """ - Searches recursively for helm templates inside the applications and collects the templates in the destination - - :param search_root: - :param dest_helm_chart_path: collected helm templates destination folder - :param exclude: - :return: - """ - app_base_path = search_root / APPS_PATH - - for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories - app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path)) - if app_name in exclude or (include and not any(inc in app_name for inc in include)): - continue - template_dir = app_path / 'deploy' / f'templates-{templates_path}' - if template_dir.exists(): - dest_dir = dest_helm_chart_path / 'templates' / app_name - - logging.info( - "Collecting templates for application %s to %s", app_name, dest_dir) - if dest_dir.exists(): - logging.warning( - "Merging/overriding all files in directory %s", dest_dir) - merge_configuration_directories(f"{template_dir}", f"{dest_dir}") - else: - shutil.copytree(template_dir, dest_dir) - resources_dir = app_path / 'deploy' / 'resources' - if resources_dir.exists(): - dest_dir = dest_helm_chart_path / 'resources' / app_name - - logging.info( - "Collecting resources for application %s to %s", app_name, dest_dir) - - merge_configuration_directories(f"{resources_dir}", f"{dest_dir}") - - # subchart_dir = app_path / 'deploy/charts' - # if subchart_dir.exists(): - # dest_dir = dest_helm_chart_path / 'charts' / app_name - - # logging.info( - # "Collecting templates for application %s to %s", app_name, dest_dir) - # if dest_dir.exists(): - # logging.warning( - # "Merging/overriding all files in directory %s", dest_dir) - # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") - # else: - # shutil.copytree(subchart_dir, dest_dir) - - -def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): - if not base_helm_chart.exists(): - return - if dest_helm_chart_path.exists(): - logging.info("Merging/overriding all files in directory %s", - dest_helm_chart_path) - merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}") - else: - logging.info("Copying base deployment chart from %s to %s", - base_helm_chart, dest_helm_chart_path) - shutil.copytree(base_helm_chart, dest_helm_chart_path) - - -def collect_helm_values(deployment_root, env=()): - """ - Creates helm values from a cloudharness deployment scaffolding - """ - values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml' - - values = get_template(values_template_path) - - for e in env: - specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, - f'values-template-{e}.yaml') - if os.path.exists(specific_template_path): - logging.info( - "Specific environment values template found: " + specific_template_path) - with open(specific_template_path) as f: - values_env_specific = yaml.safe_load(f) - values = dict_merge(values, values_env_specific) - return values - - -def init_app_values(deployment_root, exclude, values=None): - values = values if values is not None else {} - app_base_path = os.path.join(deployment_root, APPS_PATH) - overridden_template_path = os.path.join( - deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') - default_values_path = os.path.join( - CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') - - for app_path in get_sub_paths(app_base_path): - - app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) - - if app_name in exclude: - continue - app_key = app_name.replace('-', '_') - if app_key not in values: - default_values = get_template(default_values_path) - values[app_key] = default_values - overridden_defaults = get_template(overridden_template_path) - values[app_key] = dict_merge(values[app_key], overridden_defaults) - - return values - - -def values_from_legacy(values): - if KEY_HARNESS not in values: - values[KEY_HARNESS] = {} - harness = values[KEY_HARNESS] - if KEY_SERVICE not in harness: - harness[KEY_SERVICE] = {} - if KEY_DEPLOYMENT not in harness: - harness[KEY_DEPLOYMENT] = {} - if KEY_DATABASE not in harness: - harness[KEY_DATABASE] = {} - - if 'subdomain' in values: - harness['subdomain'] = values['subdomain'] - if 'autodeploy' in values: - harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] - if 'autoservice' in values: - harness[KEY_SERVICE]['auto'] = values['autoservice'] - if 'secureme' in values: - harness['secured'] = values['secureme'] - if 'resources' in values: - harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) - if 'replicas' in values: - harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] - if 'image' in values: - harness[KEY_DEPLOYMENT]['image'] = values['image'] - if 'port' in values: - harness[KEY_DEPLOYMENT]['port'] = values['port'] - harness[KEY_SERVICE]['port'] = values['port'] - - -def values_set_legacy(values): - harness = values[KEY_HARNESS] - if 'image' in harness[KEY_DEPLOYMENT]: - values['image'] = harness[KEY_DEPLOYMENT]['image'] - - values['name'] = harness['name'] - if harness[KEY_DEPLOYMENT].get('port', None): - values['port'] = harness[KEY_DEPLOYMENT]['port'] - if 'resources' in harness[KEY_DEPLOYMENT]: - values['resources'] = harness[KEY_DEPLOYMENT]['resources'] - - -def generate_tag_from_content(content_path, ignore=()): - from dirhash import dirhash - return dirhash(content_path, 'sha1', ignore=ignore) - - -def extract_env_variables_from_values(values, envs=tuple(), prefix=''): - if isinstance(values, dict): - newenvs = list(envs) - for key, value in values.items(): - v = extract_env_variables_from_values( - value, envs, f"{prefix}_{key}".replace('-', '_').upper()) - if key in ('name', 'port', 'subdomain'): - newenvs.extend(v) - return newenvs - else: - return [env_variable(prefix, values)] - - -def create_env_variables(values): - for app_name, value in values[KEY_APPS].items(): - if KEY_HARNESS in value: - values['env'].extend(extract_env_variables_from_values( - value[KEY_HARNESS], prefix='CH_' + app_name)) - values['env'].append(env_variable('CH_DOMAIN', values['domain'])) - values['env'].append(env_variable( - 'CH_IMAGE_REGISTRY', values['registry']['name'])) - values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) - - -def hosts_info(values): - domain = values['domain'] - namespace = values['namespace'] - subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if - KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if - KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] - try: - ip = get_cluster_ip() - except: - logging.warning('Cannot get cluster ip') - return - logging.info( - "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") - - deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] - for app in values[KEY_APPS].values() if KEY_HARNESS in app) - - logging.info( - "\nTo run locally some apps, also those references may be needed") - for appname in values[KEY_APPS]: - app = values[KEY_APPS][appname]['harness'] - if 'deployment' not in app: - continue - print( - "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( - app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) - - print( - f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") - - -class ValuesValidationException(Exception): - pass - - -def validate_helm_values(values): - validate_dependencies(values) - - -def validate_dependencies(values): - all_apps = {a for a in values["apps"]} - for app in all_apps: - app_values = values["apps"][app] - if 'dependencies' in app_values[KEY_HARNESS]: - soft_dependencies = { - d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} - not_found = {d for d in soft_dependencies if d not in all_apps} - if not_found: - logging.warning( - f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") - hard_dependencies = { - d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} - not_found = {d for d in hard_dependencies if d not in all_apps} - if not_found: - raise ValuesValidationException( - f"Bad application dependencies specified for application {app}: {','.join(not_found)}") - - build_dependencies = { - d for d in app_values[KEY_HARNESS]['dependencies']['build']} - - not_found = { - d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} - not_found = {d for d in not_found if d not in all_apps} - if not_found: - raise ValuesValidationException( - f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") - - if 'use_services' in app_values[KEY_HARNESS]: - service_dependencies = {d['name'].replace( - "-", "_") for d in app_values[KEY_HARNESS]['use_services']} - - not_found = {d for d in service_dependencies if d not in all_apps} - if not_found: - raise ValuesValidationException( - f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") +# def get_included_with_dependencies(values, include): +# app_values = values['apps'].values() +# directly_included = [app for app in app_values if any( +# inc == app[KEY_HARNESS]['name'] for inc in include)] + +# dependent = set(include) +# for app in directly_included: +# if app['harness']['dependencies'].get('hard', None): +# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) +# if app['harness']['dependencies'].get('soft', None): +# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) +# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: +# dependent.add('accounts') +# if len(dependent) == len(include): +# return dependent +# return get_included_with_dependencies(values, dependent) + + +# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): +# pass + + +# def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None): +# """ +# Searches recursively for helm templates inside the applications and collects the templates in the destination + +# :param search_root: +# :param dest_helm_chart_path: collected helm templates destination folder +# :param exclude: +# :return: +# """ +# app_base_path = search_root / APPS_PATH + +# for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories +# app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path)) +# if app_name in exclude or (include and not any(inc in app_name for inc in include)): +# continue +# template_dir = app_path / 'deploy' / f'templates-{templates_path}' +# if template_dir.exists(): +# dest_dir = dest_helm_chart_path / 'templates' / app_name + +# logging.info( +# "Collecting templates for application %s to %s", app_name, dest_dir) +# if dest_dir.exists(): +# logging.warning( +# "Merging/overriding all files in directory %s", dest_dir) +# merge_configuration_directories(f"{template_dir}", f"{dest_dir}") +# else: +# shutil.copytree(template_dir, dest_dir) +# resources_dir = app_path / 'deploy' / 'resources' +# if resources_dir.exists(): +# dest_dir = dest_helm_chart_path / 'resources' / app_name + +# logging.info( +# "Collecting resources for application %s to %s", app_name, dest_dir) + +# merge_configuration_directories(f"{resources_dir}", f"{dest_dir}") + +# # subchart_dir = app_path / 'deploy/charts' +# # if subchart_dir.exists(): +# # dest_dir = dest_helm_chart_path / 'charts' / app_name + +# # logging.info( +# # "Collecting templates for application %s to %s", app_name, dest_dir) +# # if dest_dir.exists(): +# # logging.warning( +# # "Merging/overriding all files in directory %s", dest_dir) +# # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") +# # else: +# # shutil.copytree(subchart_dir, dest_dir) + + +# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): +# if not base_helm_chart.exists(): +# return +# if dest_helm_chart_path.exists(): +# logging.info("Merging/overriding all files in directory %s", +# dest_helm_chart_path) +# merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}") +# else: +# logging.info("Copying base deployment chart from %s to %s", +# base_helm_chart, dest_helm_chart_path) +# shutil.copytree(base_helm_chart, dest_helm_chart_path) + + +# def collect_helm_values(deployment_root, env=()): +# """ +# Creates helm values from a cloudharness deployment scaffolding +# """ +# values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml' + +# values = get_template(values_template_path) + +# for e in env: +# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, +# f'values-template-{e}.yaml') +# if os.path.exists(specific_template_path): +# logging.info( +# "Specific environment values template found: " + specific_template_path) +# with open(specific_template_path) as f: +# values_env_specific = yaml.safe_load(f) +# values = dict_merge(values, values_env_specific) +# return values + + +# def init_app_values(deployment_root, exclude, values=None): +# values = values if values is not None else {} +# app_base_path = os.path.join(deployment_root, APPS_PATH) +# overridden_template_path = os.path.join( +# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') +# default_values_path = os.path.join( +# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + +# for app_path in get_sub_paths(app_base_path): + +# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) + +# if app_name in exclude: +# continue +# app_key = app_name.replace('-', '_') +# if app_key not in values: +# default_values = get_template(default_values_path) +# values[app_key] = default_values +# overridden_defaults = get_template(overridden_template_path) +# values[app_key] = dict_merge(values[app_key], overridden_defaults) + +# return values + + +# def values_from_legacy(values): +# if KEY_HARNESS not in values: +# values[KEY_HARNESS] = {} +# harness = values[KEY_HARNESS] +# if KEY_SERVICE not in harness: +# harness[KEY_SERVICE] = {} +# if KEY_DEPLOYMENT not in harness: +# harness[KEY_DEPLOYMENT] = {} +# if KEY_DATABASE not in harness: +# harness[KEY_DATABASE] = {} + +# if 'subdomain' in values: +# harness['subdomain'] = values['subdomain'] +# if 'autodeploy' in values: +# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] +# if 'autoservice' in values: +# harness[KEY_SERVICE]['auto'] = values['autoservice'] +# if 'secureme' in values: +# harness['secured'] = values['secureme'] +# if 'resources' in values: +# harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) +# if 'replicas' in values: +# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] +# if 'image' in values: +# harness[KEY_DEPLOYMENT]['image'] = values['image'] +# if 'port' in values: +# harness[KEY_DEPLOYMENT]['port'] = values['port'] +# harness[KEY_SERVICE]['port'] = values['port'] + + +# def values_set_legacy(values): +# harness = values[KEY_HARNESS] +# if 'image' in harness[KEY_DEPLOYMENT]: +# values['image'] = harness[KEY_DEPLOYMENT]['image'] + +# values['name'] = harness['name'] +# if harness[KEY_DEPLOYMENT].get('port', None): +# values['port'] = harness[KEY_DEPLOYMENT]['port'] +# if 'resources' in harness[KEY_DEPLOYMENT]: +# values['resources'] = harness[KEY_DEPLOYMENT]['resources'] + + +# def generate_tag_from_content(content_path, ignore=()): +# from dirhash import dirhash +# return dirhash(content_path, 'sha1', ignore=ignore) + + +# def extract_env_variables_from_values(values, envs=tuple(), prefix=''): +# if isinstance(values, dict): +# newenvs = list(envs) +# for key, value in values.items(): +# v = extract_env_variables_from_values( +# value, envs, f"{prefix}_{key}".replace('-', '_').upper()) +# if key in ('name', 'port', 'subdomain'): +# newenvs.extend(v) +# return newenvs +# else: +# return [env_variable(prefix, values)] + + +# def create_env_variables(values): +# for app_name, value in values[KEY_APPS].items(): +# if KEY_HARNESS in value: +# values['env'].extend(extract_env_variables_from_values( +# value[KEY_HARNESS], prefix='CH_' + app_name)) +# values['env'].append(env_variable('CH_DOMAIN', values['domain'])) +# values['env'].append(env_variable( +# 'CH_IMAGE_REGISTRY', values['registry']['name'])) +# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) + + +# def hosts_info(values): +# domain = values['domain'] +# namespace = values['namespace'] +# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if +# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if +# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] +# try: +# ip = get_cluster_ip() +# except: +# logging.warning('Cannot get cluster ip') +# return +# logging.info( +# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") + +# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] +# for app in values[KEY_APPS].values() if KEY_HARNESS in app) + +# logging.info( +# "\nTo run locally some apps, also those references may be needed") +# for appname in values[KEY_APPS]: +# app = values[KEY_APPS][appname]['harness'] +# if 'deployment' not in app: +# continue +# print( +# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( +# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) + +# print( +# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") + + +# class ValuesValidationException(Exception): +# pass + + +# def validate_helm_values(values): +# validate_dependencies(values) + + +# def validate_dependencies(values): +# all_apps = {a for a in values["apps"]} +# for app in all_apps: +# app_values = values["apps"][app] +# if 'dependencies' in app_values[KEY_HARNESS]: +# soft_dependencies = { +# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} +# not_found = {d for d in soft_dependencies if d not in all_apps} +# if not_found: +# logging.warning( +# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") +# hard_dependencies = { +# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} +# not_found = {d for d in hard_dependencies if d not in all_apps} +# if not_found: +# raise ValuesValidationException( +# f"Bad application dependencies specified for application {app}: {','.join(not_found)}") + +# build_dependencies = { +# d for d in app_values[KEY_HARNESS]['dependencies']['build']} + +# not_found = { +# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} +# not_found = {d for d in not_found if d not in all_apps} +# if not_found: +# raise ValuesValidationException( +# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") + +# if 'use_services' in app_values[KEY_HARNESS]: +# service_dependencies = {d['name'].replace( +# "-", "_") for d in app_values[KEY_HARNESS]['use_services']} + +# not_found = {d for d in service_dependencies if d not in all_apps} +# if not_found: +# raise ValuesValidationException( +# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") def identify_unicorn_based_main(candidates, app_path): diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index f5eb560f..bd49f8ee 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -21,15 +21,8 @@ from .models import HarnessMainConfig -KEY_HARNESS = 'harness' -KEY_SERVICE = 'service' -KEY_DATABASE = 'database' -KEY_DEPLOYMENT = 'deployment' -KEY_APPS = 'apps' -KEY_TASK_IMAGES = 'task-images' -KEY_TEST_IMAGES = 'test-images' +from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates -DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') def deploy(namespace, output_path='./deployment'): @@ -51,70 +44,70 @@ def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain= namespace=namespace).process_values() -class CloudHarnessHelm: - def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, - output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, - namespace=None): - assert domain, 'A domain must be specified' - self.root_paths = root_paths - self.tag = tag - if registry and registry[-1] != '/': - self.registry = registry + '/' - else: - self.registry = registry - self.local = local - self.domain = domain - self.exclude = exclude - self.secured = secured - self.output_path = output_path - self.include = include - self.registry_secret = registry_secret - self.tls = tls - self.env = env - self.namespace = namespace - - self.dest_deployment_path = os.path.join( - self.output_path, HELM_CHART_PATH) - self.helm_chart_path = os.path.join( - self.dest_deployment_path, 'Chart.yaml') - self.__init_deployment() - - self.static_images = set() - self.base_images = {} - self.all_images = {} - - def __init_deployment(self): - """ - Create the base helm chart - """ - if os.path.exists(self.dest_deployment_path): - shutil.rmtree(self.dest_deployment_path) - # Initialize with default - copy_merge_base_deployment(self.dest_deployment_path, os.path.join( - CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) - - # Override for every cloudharness scaffolding - for root_path in self.root_paths: - copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, - base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) - collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, - dest_helm_chart_path=self.dest_deployment_path) - - def __adjust_missing_values(self, helm_values): - if 'name' not in helm_values: - with open(self.helm_chart_path) as f: - chart_idx_content = yaml.safe_load(f) - helm_values['name'] = chart_idx_content['name'].lower() +class CloudHarnessHelm(ConfigurationGenerator): + # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, + # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, + # namespace=None): + # assert domain, 'A domain must be specified' + # self.root_paths = root_paths + # self.tag = tag + # if registry and registry[-1] != '/': + # self.registry = registry + '/' + # else: + # self.registry = registry + # self.local = local + # self.domain = domain + # self.exclude = exclude + # self.secured = secured + # self.output_path = output_path + # self.include = include + # self.registry_secret = registry_secret + # self.tls = tls + # self.env = env + # self.namespace = namespace + + # self.dest_deployment_path = os.path.join( + # self.output_path, HELM_CHART_PATH) + # self.helm_chart_path = os.path.join( + # self.dest_deployment_path, 'Chart.yaml') + # self.__init_deployment() + + # self.static_images = set() + # self.base_images = {} + # self.all_images = {} + + # def __init_deployment(self): + # """ + # Create the base helm chart + # """ + # if os.path.exists(self.dest_deployment_path): + # shutil.rmtree(self.dest_deployment_path) + # # Initialize with default + # copy_merge_base_deployment(self.dest_deployment_path, os.path.join( + # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) + + # # Override for every cloudharness scaffolding + # for root_path in self.root_paths: + # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, + # base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) + # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, + # dest_helm_chart_path=self.dest_deployment_path) + + # def __adjust_missing_values(self, helm_values): + # if 'name' not in helm_values: + # with open(self.helm_chart_path) as f: + # chart_idx_content = yaml.safe_load(f) + # helm_values['name'] = chart_idx_content['name'].lower() def process_values(self) -> HarnessMainConfig: """ Creates values file for the helm chart """ - helm_values = self.__get_default_helm_values() + helm_values = self._get_default_helm_values() - self.__adjust_missing_values(helm_values) + self._adjust_missing_values(helm_values) - helm_values = self.__merge_base_helm_values(helm_values) + helm_values = self._merge_base_helm_values(helm_values) helm_values[KEY_APPS] = {} @@ -122,18 +115,18 @@ def process_values(self) -> HarnessMainConfig: helm_values[KEY_TASK_IMAGES] = {} - self.__init_base_images(base_image_name) - self.__init_static_images(base_image_name) - helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name) + self._init_base_images(base_image_name) + self._init_static_images(base_image_name) + helm_values[KEY_TEST_IMAGES] = self._init_test_images(base_image_name) - self.__process_applications(helm_values, base_image_name) + self._process_applications(helm_values, base_image_name) self.create_tls_certificate(helm_values) values, include = self.__finish_helm_values(values=helm_values) # Adjust dependencies from static (common) images - self.__assign_static_build_dependencies(helm_values) + self._assign_static_build_dependencies(helm_values) for root_path in self.root_paths: collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, @@ -148,174 +141,174 @@ def process_values(self) -> HarnessMainConfig: validate_helm_values(merged_values) return HarnessMainConfig.from_dict(merged_values) - def __process_applications(self, helm_values, base_image_name): - for root_path in self.root_paths: - app_values = init_app_values( - root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) - helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - app_values) - - app_base_path = os.path.join(root_path, APPS_PATH) - app_values = self.collect_app_values( - app_base_path, base_image_name=base_image_name) - helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - app_values) - - def collect_app_values(self, app_base_path, base_image_name=None): - values = {} - - for app_path in get_sub_paths(app_base_path): - app_name = app_name_from_path( - os.path.relpath(app_path, app_base_path)) - - if app_name in self.exclude: - continue - app_key = app_name.replace('-', '_') - - app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) - - values[app_key] = dict_merge( - values[app_key], app_values) if app_key in values else app_values - - return values - - def __init_static_images(self, base_image_name): - for static_img_dockerfile in self.static_images: - img_name = image_name_from_dockerfile_path(os.path.basename( - static_img_dockerfile), base_name=base_image_name) - self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( - img_name, build_context_path=static_img_dockerfile) - - def __assign_static_build_dependencies(self, helm_values): - for static_img_dockerfile in self.static_images: - key = os.path.basename(static_img_dockerfile) - if key in helm_values[KEY_TASK_IMAGES]: - dependencies = guess_build_dependencies_from_dockerfile( - static_img_dockerfile) - for dep in dependencies: - if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: - helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] - - for image_name in list(helm_values[KEY_TASK_IMAGES].keys()): - if image_name in self.exclude: - del helm_values[KEY_TASK_IMAGES][image_name] - - def __init_base_images(self, base_image_name): - - for root_path in self.root_paths: - for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): - img_name = image_name_from_dockerfile_path( - os.path.basename(base_img_dockerfile), base_name=base_image_name) - self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - img_name, build_context_path=root_path) - - self.static_images.update(find_dockerfiles_paths( - os.path.join(root_path, STATIC_IMAGES_PATH))) - return self.base_images - - def __init_test_images(self, base_image_name): - test_images = {} - for root_path in self.root_paths: - for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): - img_name = image_name_from_dockerfile_path( - os.path.basename(base_img_dockerfile), base_name=base_image_name) - test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - img_name, build_context_path=base_img_dockerfile) - - return test_images - - - def __find_static_dockerfile_paths(self, root_path): - return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) - - def __merge_base_helm_values(self, helm_values): - # Override for every cloudharness scaffolding - for root_path in self.root_paths: - helm_values = dict_merge( - helm_values, - collect_helm_values(root_path, env=self.env) - ) - - return helm_values - - def __get_default_helm_values(self): - helm_values = get_template(os.path.join( - CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml')) - helm_values = dict_merge(helm_values, - collect_helm_values(CH_ROOT, env=self.env)) - - return helm_values - - def create_tls_certificate(self, helm_values): - if not self.tls: - helm_values['tls'] = None - return - if not self.local: - return - helm_values['tls'] = self.domain.replace(".", "-") + "-tls" - - bootstrap_file = 'bootstrap.sh' - certs_parent_folder_path = os.path.join( - self.output_path, 'helm', 'resources') - certs_folder_path = os.path.join(certs_parent_folder_path, 'certs') - - if os.path.exists(os.path.join(certs_folder_path)): - # don't overwrite the certificate if it exists - return - - try: - client = DockerClient() - client.ping() - except: - raise ConnectionRefusedError( - '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') - - # Create CA and sign cert for domain - container = client.containers.run(image='frapsoft/openssl', - command=f'sleep 60', - entrypoint="", - detach=True, - environment=[ - f"DOMAIN={self.domain}"], - ) - - container.exec_run('mkdir -p /mnt/vol1') - container.exec_run('mkdir -p /mnt/certs') - - # copy bootstrap file - cur_dir = os.getcwd() - os.chdir(os.path.join(HERE, 'scripts')) - tar = tarfile.open(bootstrap_file + '.tar', mode='w') - try: - tar.add(bootstrap_file) - finally: - tar.close() - data = open(bootstrap_file + '.tar', 'rb').read() - container.put_archive('/mnt/vol1', data) - os.chdir(cur_dir) - container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') - - # exec bootstrap file - container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') - - # retrieve the certs from the container - bits, stat = container.get_archive('/mnt/certs') - if not os.path.exists(certs_folder_path): - os.makedirs(certs_folder_path) - f = open(f'{certs_parent_folder_path}/certs.tar', 'wb') - for chunk in bits: - f.write(chunk) - f.close() - cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar') - cf.extractall(path=certs_parent_folder_path) - - logs = container.logs() - logging.info(f'openssl container logs: {logs}') - - # stop the container - container.kill() - - logging.info("Created certificates for local deployment") + # def __process_applications(self, helm_values, base_image_name): + # for root_path in self.root_paths: + # app_values = init_app_values( + # root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) + # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + # app_values) + + # app_base_path = os.path.join(root_path, APPS_PATH) + # app_values = self.collect_app_values( + # app_base_path, base_image_name=base_image_name) + # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], + # app_values) + + # def collect_app_values(self, app_base_path, base_image_name=None): + # values = {} + + # for app_path in get_sub_paths(app_base_path): + # app_name = app_name_from_path( + # os.path.relpath(app_path, app_base_path)) + + # if app_name in self.exclude: + # continue + # app_key = app_name.replace('-', '_') + + # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) + + # values[app_key] = dict_merge( + # values[app_key], app_values) if app_key in values else app_values + + # return values + + # def __init_static_images(self, base_image_name): + # for static_img_dockerfile in self.static_images: + # img_name = image_name_from_dockerfile_path(os.path.basename( + # static_img_dockerfile), base_name=base_image_name) + # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( + # img_name, build_context_path=static_img_dockerfile) + + # def __assign_static_build_dependencies(self, helm_values): + # for static_img_dockerfile in self.static_images: + # key = os.path.basename(static_img_dockerfile) + # if key in helm_values[KEY_TASK_IMAGES]: + # dependencies = guess_build_dependencies_from_dockerfile( + # static_img_dockerfile) + # for dep in dependencies: + # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: + # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] + + # for image_name in list(helm_values[KEY_TASK_IMAGES].keys()): + # if image_name in self.exclude: + # del helm_values[KEY_TASK_IMAGES][image_name] + + # def __init_base_images(self, base_image_name): + + # for root_path in self.root_paths: + # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): + # img_name = image_name_from_dockerfile_path( + # os.path.basename(base_img_dockerfile), base_name=base_image_name) + # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + # img_name, build_context_path=root_path) + + # self.static_images.update(find_dockerfiles_paths( + # os.path.join(root_path, STATIC_IMAGES_PATH))) + # return self.base_images + + # def __init_test_images(self, base_image_name): + # test_images = {} + # for root_path in self.root_paths: + # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): + # img_name = image_name_from_dockerfile_path( + # os.path.basename(base_img_dockerfile), base_name=base_image_name) + # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + # img_name, build_context_path=base_img_dockerfile) + + # return test_images + + + # def __find_static_dockerfile_paths(self, root_path): + # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) + + # def __merge_base_helm_values(self, helm_values): + # # Override for every cloudharness scaffolding + # for root_path in self.root_paths: + # helm_values = dict_merge( + # helm_values, + # collect_helm_values(root_path, env=self.env) + # ) + + # return helm_values + + # def __get_default_helm_values(self): + # helm_values = get_template(os.path.join( + # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml')) + # helm_values = dict_merge(helm_values, + # collect_helm_values(CH_ROOT, env=self.env)) + + # return helm_values + + # def create_tls_certificate(self, helm_values): + # if not self.tls: + # helm_values['tls'] = None + # return + # if not self.local: + # return + # helm_values['tls'] = self.domain.replace(".", "-") + "-tls" + + # bootstrap_file = 'bootstrap.sh' + # certs_parent_folder_path = os.path.join( + # self.output_path, 'helm', 'resources') + # certs_folder_path = os.path.join(certs_parent_folder_path, 'certs') + + # if os.path.exists(os.path.join(certs_folder_path)): + # # don't overwrite the certificate if it exists + # return + + # try: + # client = DockerClient() + # client.ping() + # except: + # raise ConnectionRefusedError( + # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') + + # # Create CA and sign cert for domain + # container = client.containers.run(image='frapsoft/openssl', + # command=f'sleep 60', + # entrypoint="", + # detach=True, + # environment=[ + # f"DOMAIN={self.domain}"], + # ) + + # container.exec_run('mkdir -p /mnt/vol1') + # container.exec_run('mkdir -p /mnt/certs') + + # # copy bootstrap file + # cur_dir = os.getcwd() + # os.chdir(os.path.join(HERE, 'scripts')) + # tar = tarfile.open(bootstrap_file + '.tar', mode='w') + # try: + # tar.add(bootstrap_file) + # finally: + # tar.close() + # data = open(bootstrap_file + '.tar', 'rb').read() + # container.put_archive('/mnt/vol1', data) + # os.chdir(cur_dir) + # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') + + # # exec bootstrap file + # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') + + # # retrieve the certs from the container + # bits, stat = container.get_archive('/mnt/certs') + # if not os.path.exists(certs_folder_path): + # os.makedirs(certs_folder_path) + # f = open(f'{certs_parent_folder_path}/certs.tar', 'wb') + # for chunk in bits: + # f.write(chunk) + # f.close() + # cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar') + # cf.extractall(path=certs_parent_folder_path) + + # logs = container.logs() + # logging.info(f'openssl container logs: {logs}') + + # # stop the container + # container.kill() + + # logging.info("Created certificates for local deployment") def __finish_helm_values(self, values): """ @@ -370,7 +363,7 @@ def __finish_helm_values(self, values): if harness[KEY_DATABASE] and not harness[KEY_DATABASE].get('name', None): harness[KEY_DATABASE]['name'] = app_name.strip() + '-db' - self.__clear_unused_db_configuration(harness) + self._clear_unused_db_configuration(harness) values_set_legacy(v) if self.include: @@ -390,36 +383,36 @@ def __finish_helm_values(self, values): create_env_variables(values) return values, self.include - def __clear_unused_db_configuration(self, harness_config): - database_config = harness_config[KEY_DATABASE] - database_type = database_config.get('type', None) - if database_type is None: - del harness_config[KEY_DATABASE] - return - db_specific_keys = [k for k, v in database_config.items() - if isinstance(v, dict) and 'image' in v and 'ports' in v] - for db in db_specific_keys: - if database_type != db: - del database_config[db] - - def image_tag(self, image_name, build_context_path=None, dependencies=()): - tag = self.tag - if tag is None and not self.local: - logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") - ignore_path = os.path.join(build_context_path, '.dockerignore') - ignore = set(DEFAULT_IGNORE) - if os.path.exists(ignore_path): - with open(ignore_path) as f: - ignore = ignore.union({line.strip() for line in f}) - logging.info(f"Ignoring {ignore}") - tag = generate_tag_from_content(build_context_path, ignore) - logging.info(f"Content hash: {tag}") - dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) - tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() - logging.info(f"Generated tag: {tag}") - app_name = image_name.split("/")[-1] # the image name can have a prefix - self.all_images[app_name] = tag - return self.registry + image_name + (f':{tag}' if tag else '') + # def __clear_unused_db_configuration(self, harness_config): + # database_config = harness_config[KEY_DATABASE] + # database_type = database_config.get('type', None) + # if database_type is None: + # del harness_config[KEY_DATABASE] + # return + # db_specific_keys = [k for k, v in database_config.items() + # if isinstance(v, dict) and 'image' in v and 'ports' in v] + # for db in db_specific_keys: + # if database_type != db: + # del database_config[db] + + # def image_tag(self, image_name, build_context_path=None, dependencies=()): + # tag = self.tag + # if tag is None and not self.local: + # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") + # ignore_path = os.path.join(build_context_path, '.dockerignore') + # ignore = set(DEFAULT_IGNORE) + # if os.path.exists(ignore_path): + # with open(ignore_path) as f: + # ignore = ignore.union({line.strip() for line in f}) + # logging.info(f"Ignoring {ignore}") + # tag = generate_tag_from_content(build_context_path, ignore) + # logging.info(f"Content hash: {tag}") + # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) + # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() + # logging.info(f"Generated tag: {tag}") + # app_name = image_name.split("/")[-1] # the image name can have a prefix + # self.all_images[app_name] = tag + # return self.registry + image_name + (f':{tag}' if tag else '') def create_app_values_spec(self, app_name, app_path, base_image_name=None): logging.info('Generating values script for ' + app_name) @@ -487,282 +480,282 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): return values -def get_included_with_dependencies(values, include): - app_values = values['apps'].values() - directly_included = [app for app in app_values if any( - inc == app[KEY_HARNESS]['name'] for inc in include)] - - dependent = set(include) - for app in directly_included: - if app['harness']['dependencies'].get('hard', None): - dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) - if app['harness']['dependencies'].get('soft', None): - dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) - if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: - dependent.add('accounts') - if len(dependent) == len(include): - return dependent - return get_included_with_dependencies(values, dependent) - - -def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): - pass - - -def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), include=None): - """ - Searches recursively for helm templates inside the applications and collects the templates in the destination - - :param search_root: - :param dest_helm_chart_path: collected helm templates destination folder - :param exclude: - :return: - """ - app_base_path = os.path.join(search_root, APPS_PATH) - - for app_path in get_sub_paths(app_base_path): - app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) - if app_name in exclude or (include and not any(inc in app_name for inc in include)): - continue - template_dir = os.path.join(app_path, 'deploy', 'templates') - if os.path.exists(template_dir): - dest_dir = os.path.join( - dest_helm_chart_path, 'templates', app_name) - - logging.info( - "Collecting templates for application %s to %s", app_name, dest_dir) - if os.path.exists(dest_dir): - logging.warning( - "Merging/overriding all files in directory %s", dest_dir) - merge_configuration_directories(template_dir, dest_dir) - else: - shutil.copytree(template_dir, dest_dir) - resources_dir = os.path.join(app_path, 'deploy/resources') - if os.path.exists(resources_dir): - dest_dir = os.path.join( - dest_helm_chart_path, 'resources', app_name) - - logging.info( - "Collecting resources for application %s to %s", app_name, dest_dir) - - merge_configuration_directories(resources_dir, dest_dir) - - subchart_dir = os.path.join(app_path, 'deploy/charts') - if os.path.exists(subchart_dir): - dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name) - - logging.info( - "Collecting templates for application %s to %s", app_name, dest_dir) - if os.path.exists(dest_dir): - logging.warning( - "Merging/overriding all files in directory %s", dest_dir) - merge_configuration_directories(subchart_dir, dest_dir) - else: - shutil.copytree(subchart_dir, dest_dir) - - -def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): - if not os.path.exists(base_helm_chart): - return - if os.path.exists(dest_helm_chart_path): - logging.info("Merging/overriding all files in directory %s", - dest_helm_chart_path) - merge_configuration_directories(base_helm_chart, dest_helm_chart_path) - else: - logging.info("Copying base deployment chart from %s to %s", - base_helm_chart, dest_helm_chart_path) - shutil.copytree(base_helm_chart, dest_helm_chart_path) - - -def collect_helm_values(deployment_root, env=()): - """ - Creates helm values from a cloudharness deployment scaffolding - """ - - values_template_path = os.path.join( - deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml') - - values = get_template(values_template_path) - - for e in env: - specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, - f'values-template-{e}.yaml') - if os.path.exists(specific_template_path): - logging.info( - "Specific environment values template found: " + specific_template_path) - with open(specific_template_path) as f: - values_env_specific = yaml.safe_load(f) - values = dict_merge(values, values_env_specific) - return values - - -def init_app_values(deployment_root, exclude, values=None): - values = values if values is not None else {} - app_base_path = os.path.join(deployment_root, APPS_PATH) - overridden_template_path = os.path.join( - deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') - default_values_path = os.path.join( - CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') - - for app_path in get_sub_paths(app_base_path): - - app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) - - if app_name in exclude: - continue - app_key = app_name.replace('-', '_') - if app_key not in values: - default_values = get_template(default_values_path) - values[app_key] = default_values - overridden_defaults = get_template(overridden_template_path) - values[app_key] = dict_merge(values[app_key], overridden_defaults) - - return values - - -def values_from_legacy(values): - if KEY_HARNESS not in values: - values[KEY_HARNESS] = {} - harness = values[KEY_HARNESS] - if KEY_SERVICE not in harness: - harness[KEY_SERVICE] = {} - if KEY_DEPLOYMENT not in harness: - harness[KEY_DEPLOYMENT] = {} - if KEY_DATABASE not in harness: - harness[KEY_DATABASE] = {} - - if 'subdomain' in values: - harness['subdomain'] = values['subdomain'] - if 'autodeploy' in values: - harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] - if 'autoservice' in values: - harness[KEY_SERVICE]['auto'] = values['autoservice'] - if 'secureme' in values: - harness['secured'] = values['secureme'] - if 'resources' in values: - harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) - if 'replicas' in values: - harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] - if 'image' in values: - harness[KEY_DEPLOYMENT]['image'] = values['image'] - if 'port' in values: - harness[KEY_DEPLOYMENT]['port'] = values['port'] - harness[KEY_SERVICE]['port'] = values['port'] - - -def values_set_legacy(values): - harness = values[KEY_HARNESS] - if 'image' in harness[KEY_DEPLOYMENT]: - values['image'] = harness[KEY_DEPLOYMENT]['image'] - - values['name'] = harness['name'] - if harness[KEY_DEPLOYMENT].get('port', None): - values['port'] = harness[KEY_DEPLOYMENT]['port'] - if 'resources' in harness[KEY_DEPLOYMENT]: - values['resources'] = harness[KEY_DEPLOYMENT]['resources'] - - -def generate_tag_from_content(content_path, ignore=()): - from dirhash import dirhash - return dirhash(content_path, 'sha1', ignore=ignore) - - -def extract_env_variables_from_values(values, envs=tuple(), prefix=''): - if isinstance(values, dict): - newenvs = list(envs) - for key, value in values.items(): - v = extract_env_variables_from_values( - value, envs, f"{prefix}_{key}".replace('-', '_').upper()) - if key in ('name', 'port', 'subdomain'): - newenvs.extend(v) - return newenvs - else: - return [env_variable(prefix, values)] - - -def create_env_variables(values): - for app_name, value in values[KEY_APPS].items(): - if KEY_HARNESS in value: - values['env'].extend(extract_env_variables_from_values( - value[KEY_HARNESS], prefix='CH_' + app_name)) - values['env'].append(env_variable('CH_DOMAIN', values['domain'])) - values['env'].append(env_variable( - 'CH_IMAGE_REGISTRY', values['registry']['name'])) - values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) - - -def hosts_info(values): - domain = values['domain'] - namespace = values['namespace'] - subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if - KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if - KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] - try: - ip = get_cluster_ip() - except: - logging.warning('Cannot get cluster ip') - return - logging.info( - "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") - - deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] - for app in values[KEY_APPS].values() if KEY_HARNESS in app) - - logging.info( - "\nTo run locally some apps, also those references may be needed") - for appname in values[KEY_APPS]: - app = values[KEY_APPS][appname]['harness'] - if 'deployment' not in app: - continue - print( - "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( - app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) - - print( - f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") - - -class ValuesValidationException(Exception): - pass - - -def validate_helm_values(values): - validate_dependencies(values) - - -def validate_dependencies(values): - all_apps = {a for a in values["apps"]} - for app in all_apps: - app_values = values["apps"][app] - if 'dependencies' in app_values[KEY_HARNESS]: - soft_dependencies = { - d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} - not_found = {d for d in soft_dependencies if d not in all_apps} - if not_found: - logging.warning( - f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") - hard_dependencies = { - d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} - not_found = {d for d in hard_dependencies if d not in all_apps} - if not_found: - raise ValuesValidationException( - f"Bad application dependencies specified for application {app}: {','.join(not_found)}") - - build_dependencies = { - d for d in app_values[KEY_HARNESS]['dependencies']['build']} - - not_found = { - d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} - not_found = {d for d in not_found if d not in all_apps} - if not_found: - raise ValuesValidationException( - f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") - - if 'use_services' in app_values[KEY_HARNESS]: - service_dependencies = {d['name'].replace( - "-", "_") for d in app_values[KEY_HARNESS]['use_services']} - - not_found = {d for d in service_dependencies if d not in all_apps} - if not_found: - raise ValuesValidationException( - f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") +# def get_included_with_dependencies(values, include): +# app_values = values['apps'].values() +# directly_included = [app for app in app_values if any( +# inc == app[KEY_HARNESS]['name'] for inc in include)] + +# dependent = set(include) +# for app in directly_included: +# if app['harness']['dependencies'].get('hard', None): +# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) +# if app['harness']['dependencies'].get('soft', None): +# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) +# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: +# dependent.add('accounts') +# if len(dependent) == len(include): +# return dependent +# return get_included_with_dependencies(values, dependent) + + +# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): +# pass + + +# def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), include=None): +# """ +# Searches recursively for helm templates inside the applications and collects the templates in the destination + +# :param search_root: +# :param dest_helm_chart_path: collected helm templates destination folder +# :param exclude: +# :return: +# """ +# app_base_path = os.path.join(search_root, APPS_PATH) + +# for app_path in get_sub_paths(app_base_path): +# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) +# if app_name in exclude or (include and not any(inc in app_name for inc in include)): +# continue +# template_dir = os.path.join(app_path, 'deploy', 'templates') +# if os.path.exists(template_dir): +# dest_dir = os.path.join( +# dest_helm_chart_path, 'templates', app_name) + +# logging.info( +# "Collecting templates for application %s to %s", app_name, dest_dir) +# if os.path.exists(dest_dir): +# logging.warning( +# "Merging/overriding all files in directory %s", dest_dir) +# merge_configuration_directories(template_dir, dest_dir) +# else: +# shutil.copytree(template_dir, dest_dir) +# resources_dir = os.path.join(app_path, 'deploy/resources') +# if os.path.exists(resources_dir): +# dest_dir = os.path.join( +# dest_helm_chart_path, 'resources', app_name) + +# logging.info( +# "Collecting resources for application %s to %s", app_name, dest_dir) + +# merge_configuration_directories(resources_dir, dest_dir) + +# subchart_dir = os.path.join(app_path, 'deploy/charts') +# if os.path.exists(subchart_dir): +# dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name) + +# logging.info( +# "Collecting templates for application %s to %s", app_name, dest_dir) +# if os.path.exists(dest_dir): +# logging.warning( +# "Merging/overriding all files in directory %s", dest_dir) +# merge_configuration_directories(subchart_dir, dest_dir) +# else: +# shutil.copytree(subchart_dir, dest_dir) + + +# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): +# if not os.path.exists(base_helm_chart): +# return +# if os.path.exists(dest_helm_chart_path): +# logging.info("Merging/overriding all files in directory %s", +# dest_helm_chart_path) +# merge_configuration_directories(base_helm_chart, dest_helm_chart_path) +# else: +# logging.info("Copying base deployment chart from %s to %s", +# base_helm_chart, dest_helm_chart_path) +# shutil.copytree(base_helm_chart, dest_helm_chart_path) + + +# def collect_helm_values(deployment_root, env=()): +# """ +# Creates helm values from a cloudharness deployment scaffolding +# """ + +# values_template_path = os.path.join( +# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml') + +# values = get_template(values_template_path) + +# for e in env: +# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, +# f'values-template-{e}.yaml') +# if os.path.exists(specific_template_path): +# logging.info( +# "Specific environment values template found: " + specific_template_path) +# with open(specific_template_path) as f: +# values_env_specific = yaml.safe_load(f) +# values = dict_merge(values, values_env_specific) +# return values + + +# def init_app_values(deployment_root, exclude, values=None): +# values = values if values is not None else {} +# app_base_path = os.path.join(deployment_root, APPS_PATH) +# overridden_template_path = os.path.join( +# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') +# default_values_path = os.path.join( +# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + +# for app_path in get_sub_paths(app_base_path): + +# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) + +# if app_name in exclude: +# continue +# app_key = app_name.replace('-', '_') +# if app_key not in values: +# default_values = get_template(default_values_path) +# values[app_key] = default_values +# overridden_defaults = get_template(overridden_template_path) +# values[app_key] = dict_merge(values[app_key], overridden_defaults) + +# return values + + +# def values_from_legacy(values): +# if KEY_HARNESS not in values: +# values[KEY_HARNESS] = {} +# harness = values[KEY_HARNESS] +# if KEY_SERVICE not in harness: +# harness[KEY_SERVICE] = {} +# if KEY_DEPLOYMENT not in harness: +# harness[KEY_DEPLOYMENT] = {} +# if KEY_DATABASE not in harness: +# harness[KEY_DATABASE] = {} + +# if 'subdomain' in values: +# harness['subdomain'] = values['subdomain'] +# if 'autodeploy' in values: +# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] +# if 'autoservice' in values: +# harness[KEY_SERVICE]['auto'] = values['autoservice'] +# if 'secureme' in values: +# harness['secured'] = values['secureme'] +# if 'resources' in values: +# harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) +# if 'replicas' in values: +# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] +# if 'image' in values: +# harness[KEY_DEPLOYMENT]['image'] = values['image'] +# if 'port' in values: +# harness[KEY_DEPLOYMENT]['port'] = values['port'] +# harness[KEY_SERVICE]['port'] = values['port'] + + +# def values_set_legacy(values): +# harness = values[KEY_HARNESS] +# if 'image' in harness[KEY_DEPLOYMENT]: +# values['image'] = harness[KEY_DEPLOYMENT]['image'] + +# values['name'] = harness['name'] +# if harness[KEY_DEPLOYMENT].get('port', None): +# values['port'] = harness[KEY_DEPLOYMENT]['port'] +# if 'resources' in harness[KEY_DEPLOYMENT]: +# values['resources'] = harness[KEY_DEPLOYMENT]['resources'] + + +# def generate_tag_from_content(content_path, ignore=()): +# from dirhash import dirhash +# return dirhash(content_path, 'sha1', ignore=ignore) + + +# def extract_env_variables_from_values(values, envs=tuple(), prefix=''): +# if isinstance(values, dict): +# newenvs = list(envs) +# for key, value in values.items(): +# v = extract_env_variables_from_values( +# value, envs, f"{prefix}_{key}".replace('-', '_').upper()) +# if key in ('name', 'port', 'subdomain'): +# newenvs.extend(v) +# return newenvs +# else: +# return [env_variable(prefix, values)] + + +# def create_env_variables(values): +# for app_name, value in values[KEY_APPS].items(): +# if KEY_HARNESS in value: +# values['env'].extend(extract_env_variables_from_values( +# value[KEY_HARNESS], prefix='CH_' + app_name)) +# values['env'].append(env_variable('CH_DOMAIN', values['domain'])) +# values['env'].append(env_variable( +# 'CH_IMAGE_REGISTRY', values['registry']['name'])) +# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) + + +# def hosts_info(values): +# domain = values['domain'] +# namespace = values['namespace'] +# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if +# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if +# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] +# try: +# ip = get_cluster_ip() +# except: +# logging.warning('Cannot get cluster ip') +# return +# logging.info( +# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") + +# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] +# for app in values[KEY_APPS].values() if KEY_HARNESS in app) + +# logging.info( +# "\nTo run locally some apps, also those references may be needed") +# for appname in values[KEY_APPS]: +# app = values[KEY_APPS][appname]['harness'] +# if 'deployment' not in app: +# continue +# print( +# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( +# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) + +# print( +# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") + + +# class ValuesValidationException(Exception): +# pass + + +# def validate_helm_values(values): +# validate_dependencies(values) + + +# def validate_dependencies(values): +# all_apps = {a for a in values["apps"]} +# for app in all_apps: +# app_values = values["apps"][app] +# if 'dependencies' in app_values[KEY_HARNESS]: +# soft_dependencies = { +# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} +# not_found = {d for d in soft_dependencies if d not in all_apps} +# if not_found: +# logging.warning( +# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") +# hard_dependencies = { +# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} +# not_found = {d for d in hard_dependencies if d not in all_apps} +# if not_found: +# raise ValuesValidationException( +# f"Bad application dependencies specified for application {app}: {','.join(not_found)}") + +# build_dependencies = { +# d for d in app_values[KEY_HARNESS]['dependencies']['build']} + +# not_found = { +# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} +# not_found = {d for d in not_found if d not in all_apps} +# if not_found: +# raise ValuesValidationException( +# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") + +# if 'use_services' in app_values[KEY_HARNESS]: +# service_dependencies = {d['name'].replace( +# "-", "_") for d in app_values[KEY_HARNESS]['use_services']} + +# not_found = {d for d in service_dependencies if d not in all_apps} +# if not_found: +# raise ValuesValidationException( +# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") diff --git a/tools/deployment-cli-tools/tests/test_codefresh.py b/tools/deployment-cli-tools/tests/test_codefresh.py index 7e3abd4d..e4505a9f 100644 --- a/tools/deployment-cli-tools/tests/test_codefresh.py +++ b/tools/deployment-cli-tools/tests/test_codefresh.py @@ -1,6 +1,7 @@ from ch_cli_tools.preprocessing import preprocess_build_overrides from ch_cli_tools.helm import * +from ch_cli_tools.configurationgenerator import * from ch_cli_tools.codefresh import * HERE = os.path.dirname(os.path.realpath(__file__)) @@ -126,7 +127,7 @@ def test_create_codefresh_configuration(): assert len( tstep['commands']) == 2, "Unit test commands are not properly loaded from the unit test configuration file" assert tstep['commands'][0] == "tox", "Unit test commands are not properly loaded from the unit test configuration file" - + assert len(l1_steps[CD_BUILD_STEP_DEPENDENCIES]['steps']) == 3, "3 clone steps should be included as we have 2 dependencies from myapp, plus cloudharness" finally: shutil.rmtree(BUILD_MERGE_DIR) @@ -213,7 +214,7 @@ def test_create_codefresh_configuration_tests(): assert "test-api" in st_build_test_steps["test-api"]["dockerfile"], "test-api image must be built from root context" - + e2e_steps = l1_steps[CD_E2E_TEST_STEP]['scale'] @@ -251,7 +252,7 @@ def test_create_codefresh_configuration_tests(): finally: shutil.rmtree(BUILD_MERGE_DIR) - + values = create_helm_chart( [CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, diff --git a/tools/deployment-cli-tools/tests/test_helm.py b/tools/deployment-cli-tools/tests/test_helm.py index ed53ab86..5fa269d6 100644 --- a/tools/deployment-cli-tools/tests/test_helm.py +++ b/tools/deployment-cli-tools/tests/test_helm.py @@ -1,6 +1,7 @@ import shutil from ch_cli_tools.helm import * +from ch_cli_tools.configurationgenerator import * HERE = os.path.dirname(os.path.realpath(__file__)) RESOURCES = os.path.join(HERE, 'resources') @@ -80,8 +81,9 @@ def exists(*args): shutil.rmtree(OUT) -def test_collect_helm_values_noreg_noinclude(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", +def test_collect_helm_values_noreg_noinclude(tmp_path): + out_path = tmp_path / 'test_collect_helm_values_noreg_noinclude' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_path, domain="my.local", namespace='test', env='dev', local=False, tag=1) # Auto values @@ -119,7 +121,7 @@ def test_collect_helm_values_noreg_noinclude(): assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] == True assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] == False - helm_path = os.path.join(OUT, HELM_CHART_PATH) + helm_path = out_path / HELM_CHART_PATH def exists(*args): return os.path.exists(os.path.join(*args)) @@ -137,8 +139,6 @@ def exists(*args): assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'cloudharness/cloudharness-base:1' assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1' - shutil.rmtree(OUT) - def test_collect_helm_values_precedence(): values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", @@ -302,13 +302,13 @@ def create(): return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'], exclude=['events'], domain="my.local", namespace='test', env='dev', local=False, tag=None, registry='reg') - + BASE_KEY = "cloudharness-base" values = create() # Auto values are set by using the directory hash assert 'reg/cloudharness/myapp:' in values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] - assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image + assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image assert 'cloudharness/myapp-mytask' in values[KEY_TASK_IMAGES]['myapp-mytask'] assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == values.apps['myapp'].harness.deployment.image v1 = values.apps['myapp'].harness.deployment.image @@ -320,7 +320,7 @@ def create(): assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value" assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore" - + try: fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt') with open(fname, 'w') as f: @@ -355,7 +355,7 @@ def create(): assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore" finally: os.remove(fname) - + # Dependencies test: if a dependency is changed, the hash should change @@ -366,7 +366,7 @@ def create(): f.write('a') values = create() - + assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change" assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change" finally: @@ -379,12 +379,9 @@ def create(): f.write('a') values = create() - + assert b1 != values["task-images"][BASE_KEY], "Content for base image is changed, the hash should change" assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change" assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change" finally: os.remove(fname) - - - From 368725aa5af146a1fcdaff219fd1cfbb5aa4d4c7 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 07:37:54 -0600 Subject: [PATCH 067/210] CH-100 Clean imports/comments --- .../ch_cli_tools/dockercompose.py | 572 +----------------- .../deployment-cli-tools/ch_cli_tools/helm.py | 547 +---------------- 2 files changed, 6 insertions(+), 1113 deletions(-) diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 0e75ed7e..f65e352b 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -4,23 +4,14 @@ import yaml from ruamel.yaml import YAML import os -import shutil import logging -from hashlib import sha1 import subprocess -from functools import cache -import tarfile -from docker import from_env as DockerClient -from pathlib import Path import copy -from . import HERE, CH_ROOT -from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ - DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE -from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \ - get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \ - find_dockerfiles_paths, find_file_paths +from cloudharness_utils.constants import VALUES_MANUAL_PATH, COMPOSE +from .utils import get_cluster_ip, image_name_from_dockerfile_path, get_template, \ + merge_to_yaml_file, dict_merge, app_name_from_path, find_dockerfiles_paths, find_file_paths from .models import HarnessMainConfig @@ -38,57 +29,6 @@ def create_docker_compose_configuration(root_paths, tag='latest', registry='', l class CloudHarnessDockerCompose(ConfigurationGenerator): - # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, - # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, - # namespace=None, templates_path=HELM_PATH): - # assert domain, 'A domain must be specified' - # self.root_paths = [Path(r) for r in root_paths] - # self.tag = tag - # if registry and not registry.endswith('/'): - # self.registry = f'{registry}/' - # else: - # self.registry = registry - # self.local = local - # self.domain = domain - # self.exclude = exclude - # self.secured = secured - # self.output_path = Path(output_path) - # self.include = include - # self.registry_secret = registry_secret - # self.tls = tls - # self.env = env - # self.namespace = namespace - - # self.templates_path = templates_path - # self.dest_deployment_path = self.output_path / templates_path - # self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml' - # self.__init_deployment() - - # self.static_images = set() - # self.base_images = {} - # self.all_images = {} - - # def __init_deployment(self): - # """ - # Create the base helm chart - # """ - # if self.dest_deployment_path.exists(): - # shutil.rmtree(self.dest_deployment_path) - # # Initialize with default - # copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path) - - # # Override for every cloudharness scaffolding - # for root_path in self.root_paths: - # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, - # base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path) - # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, - # dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path) - - # def __adjust_missing_values(self, helm_values): - # if 'name' not in helm_values: - # with open(self.helm_chart_path) as f: - # chart_idx_content = yaml.safe_load(f) - # helm_values['name'] = chart_idx_content['name'].lower() def process_values(self) -> HarnessMainConfig: """ @@ -176,133 +116,6 @@ def __post_process_multiple_document_docker_compose(self, yaml_document): main_document = document # we need to save the main document later yaml_handler.dump(main_document, yaml_document) - # def __process_applications(self, helm_values, base_image_name): - # for root_path in self.root_paths: - # app_values = init_app_values( - # root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) - # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - # app_values) - - # app_base_path = root_path / APPS_PATH - # app_values = self.collect_app_values( - # app_base_path, base_image_name=base_image_name) - # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - # app_values) - - # def collect_app_values(self, app_base_path, base_image_name=None): - # values = {} - - # for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories - # app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}") - - # if app_name in self.exclude: - # continue - # app_key = app_name.replace('-', '_') - - # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) - - # # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None) - # # # for dockerfile_path in app_path.rglob('**/Dockerfile'): - # # # parent_name = dockerfile_path.parent.name.replace("-", "_") - # # # if parent_name == app_key: - # # # app_values['build'] = { - # # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", - # # # 'dockerfile': "Dockerfile", - # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), - # # # } - # # # elif "tasks/" in f"{dockerfile_path}": - # # # parent_name = parent_name.upper() - # # # values.setdefault("task-images-build", {})[parent_name] = { - # # # 'dockerfile': "Dockerfile", - # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), - # # # } - # # # import ipdb; ipdb.set_trace() # fmt: skip - - # # if dockerfile_path: - # # app_values['build'] = { - # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}", - # # 'dockerfile': "Dockerfile", - # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent), - # # } - - # values[app_key] = dict_merge( - # values[app_key], app_values) if app_key in values else app_values - - # return values - - # def __init_static_images(self, base_image_name): - # for static_img_dockerfile in self.static_images: - # img_name = image_name_from_dockerfile_path(os.path.basename( - # static_img_dockerfile), base_name=base_image_name) - # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( - # img_name, build_context_path=static_img_dockerfile) - - # def __assign_static_build_dependencies(self, helm_values): - # for static_img_dockerfile in self.static_images: - # key = os.path.basename(static_img_dockerfile) - # if key in helm_values[KEY_TASK_IMAGES]: - # dependencies = guess_build_dependencies_from_dockerfile( - # static_img_dockerfile) - # for dep in dependencies: - # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: - # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] - # # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = { - # # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent), - # # 'dockerfile': 'Dockerfile', - # # } - - # for image_name in helm_values[KEY_TASK_IMAGES].keys(): - # if image_name in self.exclude: - # del helm_values[KEY_TASK_IMAGES][image_name] - # # del helm_values[KEY_TASK_IMAGES_BUILD][image_name] - - # def __init_base_images(self, base_image_name): - - # for root_path in self.root_paths: - # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): - # img_name = image_name_from_dockerfile_path( - # os.path.basename(base_img_dockerfile), base_name=base_image_name) - # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - # img_name, build_context_path=root_path) - - # self.static_images.update(find_dockerfiles_paths( - # os.path.join(root_path, STATIC_IMAGES_PATH))) - # return self.base_images - - # def __init_test_images(self, base_image_name): - # test_images = {} - # for root_path in self.root_paths: - # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): - # img_name = image_name_from_dockerfile_path( - # os.path.basename(base_img_dockerfile), base_name=base_image_name) - # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - # img_name, build_context_path=base_img_dockerfile) - - # return test_images - - - # def __find_static_dockerfile_paths(self, root_path): - # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) - - # def __merge_base_helm_values(self, helm_values): - # # Override for every cloudharness scaffolding - # for root_path in self.root_paths: - # helm_values = dict_merge( - # helm_values, - # collect_helm_values(root_path, env=self.env) - # ) - - # return helm_values - - # def __get_default_helm_values(self): - # ch_root_path = Path(CH_ROOT) - # values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml' - # helm_values = get_template(values_yaml_path) - # helm_values = dict_merge(helm_values, - # collect_helm_values(ch_root_path, env=self.env)) - - # return helm_values - def __get_default_helm_values_with_secrets(self, helm_values): helm_values = copy.deepcopy(helm_values) # {{- $values_copy := deepCopy .Values }} @@ -315,77 +128,6 @@ def __get_default_helm_values_with_secrets(self, helm_values): helm_values['apps'][key]['harness']['secrets'] = {} return helm_values - # def create_tls_certificate(self, helm_values): - # if not self.tls: - # helm_values['tls'] = None - # return - # if not self.local: - # return - # helm_values['tls'] = self.domain.replace(".", "-") + "-tls" - - # bootstrap_file = 'bootstrap.sh' - # certs_parent_folder_path = self.output_path / 'helm' / 'resources' - # certs_folder_path = certs_parent_folder_path / 'certs' - - # # if os.path.exists(os.path.join(certs_folder_path)): - # if certs_folder_path.exists(): - # # don't overwrite the certificate if it exists - # return - - # try: - # client = DockerClient() - # client.ping() - # except: - # raise ConnectionRefusedError( - # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') - - # # Create CA and sign cert for domain - # container = client.containers.run(image='frapsoft/openssl', - # command=f'sleep 60', - # entrypoint="", - # detach=True, - # environment=[ - # f"DOMAIN={self.domain}"], - # ) - - # container.exec_run('mkdir -p /mnt/vol1') - # container.exec_run('mkdir -p /mnt/certs') - - # # copy bootstrap file - # cur_dir = os.getcwd() - # os.chdir(Path(HERE) / 'scripts') - # tar = tarfile.open(bootstrap_file + '.tar', mode='w') - # try: - # tar.add(bootstrap_file) - # finally: - # tar.close() - # data = open(bootstrap_file + '.tar', 'rb').read() - # container.put_archive('/mnt/vol1', data) - # os.chdir(cur_dir) - # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') - - # # exec bootstrap file - # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') - - # # retrieve the certs from the container - # bits, stat = container.get_archive('/mnt/certs') - # if not certs_folder_path.exists(): - # certs_folder_path.mkdir(parents=True) - # certs_tar = certs_parent_folder_path / 'certs.tar' - # with open(certs_tar, 'wb') as f: - # for chunk in bits: - # f.write(chunk) - # cf = tarfile.open(certs_tar) - # cf.extractall(path=certs_parent_folder_path) - - # logs = container.logs() - # logging.info(f'openssl container logs: {logs}') - - # # stop the container - # container.kill() - - # logging.info("Created certificates for local deployment") - def __finish_helm_values(self, values): """ Sets default overridden values @@ -456,37 +198,6 @@ def __finish_helm_values(self, values): create_env_variables(values) return values, self.include - # def __clear_unused_db_configuration(self, harness_config): - # database_config = harness_config[KEY_DATABASE] - # database_type = database_config.get('type', None) - # if database_type is None: - # del harness_config[KEY_DATABASE] - # return - # db_specific_keys = [k for k, v in database_config.items() - # if isinstance(v, dict) and 'image' in v and 'ports' in v] - # for db in db_specific_keys: - # if database_type != db: - # del database_config[db] - - # def image_tag(self, image_name, build_context_path=None, dependencies=()): - # tag = self.tag - # if tag is None and not self.local: - # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") - # ignore_path = os.path.join(build_context_path, '.dockerignore') - # ignore = set(DEFAULT_IGNORE) - # if os.path.exists(ignore_path): - # with open(ignore_path) as f: - # ignore = ignore.union({line.strip() for line in f}) - # logging.info(f"Ignoring {ignore}") - # tag = generate_tag_from_content(build_context_path, ignore) - # logging.info(f"Content hash: {tag}") - # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) - # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() - # logging.info(f"Generated tag: {tag}") - # app_name = image_name.split("/")[-1] # the image name can have a prefix - # self.all_images[app_name] = tag - # return self.registry + image_name + (f':{tag}' if tag else '') - def create_app_values_spec(self, app_name, app_path, base_image_name=None): logging.info('Generating values script for ' + app_name) @@ -581,283 +292,6 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path): helm_values[KEY_HARNESS]['deployment']['args'] = f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py' -# def get_included_with_dependencies(values, include): -# app_values = values['apps'].values() -# directly_included = [app for app in app_values if any( -# inc == app[KEY_HARNESS]['name'] for inc in include)] - -# dependent = set(include) -# for app in directly_included: -# if app['harness']['dependencies'].get('hard', None): -# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) -# if app['harness']['dependencies'].get('soft', None): -# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) -# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: -# dependent.add('accounts') -# if len(dependent) == len(include): -# return dependent -# return get_included_with_dependencies(values, dependent) - - -# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): -# pass - - -# def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None): -# """ -# Searches recursively for helm templates inside the applications and collects the templates in the destination - -# :param search_root: -# :param dest_helm_chart_path: collected helm templates destination folder -# :param exclude: -# :return: -# """ -# app_base_path = search_root / APPS_PATH - -# for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories -# app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path)) -# if app_name in exclude or (include and not any(inc in app_name for inc in include)): -# continue -# template_dir = app_path / 'deploy' / f'templates-{templates_path}' -# if template_dir.exists(): -# dest_dir = dest_helm_chart_path / 'templates' / app_name - -# logging.info( -# "Collecting templates for application %s to %s", app_name, dest_dir) -# if dest_dir.exists(): -# logging.warning( -# "Merging/overriding all files in directory %s", dest_dir) -# merge_configuration_directories(f"{template_dir}", f"{dest_dir}") -# else: -# shutil.copytree(template_dir, dest_dir) -# resources_dir = app_path / 'deploy' / 'resources' -# if resources_dir.exists(): -# dest_dir = dest_helm_chart_path / 'resources' / app_name - -# logging.info( -# "Collecting resources for application %s to %s", app_name, dest_dir) - -# merge_configuration_directories(f"{resources_dir}", f"{dest_dir}") - -# # subchart_dir = app_path / 'deploy/charts' -# # if subchart_dir.exists(): -# # dest_dir = dest_helm_chart_path / 'charts' / app_name - -# # logging.info( -# # "Collecting templates for application %s to %s", app_name, dest_dir) -# # if dest_dir.exists(): -# # logging.warning( -# # "Merging/overriding all files in directory %s", dest_dir) -# # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}") -# # else: -# # shutil.copytree(subchart_dir, dest_dir) - - -# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): -# if not base_helm_chart.exists(): -# return -# if dest_helm_chart_path.exists(): -# logging.info("Merging/overriding all files in directory %s", -# dest_helm_chart_path) -# merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}") -# else: -# logging.info("Copying base deployment chart from %s to %s", -# base_helm_chart, dest_helm_chart_path) -# shutil.copytree(base_helm_chart, dest_helm_chart_path) - - -# def collect_helm_values(deployment_root, env=()): -# """ -# Creates helm values from a cloudharness deployment scaffolding -# """ -# values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml' - -# values = get_template(values_template_path) - -# for e in env: -# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, -# f'values-template-{e}.yaml') -# if os.path.exists(specific_template_path): -# logging.info( -# "Specific environment values template found: " + specific_template_path) -# with open(specific_template_path) as f: -# values_env_specific = yaml.safe_load(f) -# values = dict_merge(values, values_env_specific) -# return values - - -# def init_app_values(deployment_root, exclude, values=None): -# values = values if values is not None else {} -# app_base_path = os.path.join(deployment_root, APPS_PATH) -# overridden_template_path = os.path.join( -# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') -# default_values_path = os.path.join( -# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') - -# for app_path in get_sub_paths(app_base_path): - -# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) - -# if app_name in exclude: -# continue -# app_key = app_name.replace('-', '_') -# if app_key not in values: -# default_values = get_template(default_values_path) -# values[app_key] = default_values -# overridden_defaults = get_template(overridden_template_path) -# values[app_key] = dict_merge(values[app_key], overridden_defaults) - -# return values - - -# def values_from_legacy(values): -# if KEY_HARNESS not in values: -# values[KEY_HARNESS] = {} -# harness = values[KEY_HARNESS] -# if KEY_SERVICE not in harness: -# harness[KEY_SERVICE] = {} -# if KEY_DEPLOYMENT not in harness: -# harness[KEY_DEPLOYMENT] = {} -# if KEY_DATABASE not in harness: -# harness[KEY_DATABASE] = {} - -# if 'subdomain' in values: -# harness['subdomain'] = values['subdomain'] -# if 'autodeploy' in values: -# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] -# if 'autoservice' in values: -# harness[KEY_SERVICE]['auto'] = values['autoservice'] -# if 'secureme' in values: -# harness['secured'] = values['secureme'] -# if 'resources' in values: -# harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) -# if 'replicas' in values: -# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] -# if 'image' in values: -# harness[KEY_DEPLOYMENT]['image'] = values['image'] -# if 'port' in values: -# harness[KEY_DEPLOYMENT]['port'] = values['port'] -# harness[KEY_SERVICE]['port'] = values['port'] - - -# def values_set_legacy(values): -# harness = values[KEY_HARNESS] -# if 'image' in harness[KEY_DEPLOYMENT]: -# values['image'] = harness[KEY_DEPLOYMENT]['image'] - -# values['name'] = harness['name'] -# if harness[KEY_DEPLOYMENT].get('port', None): -# values['port'] = harness[KEY_DEPLOYMENT]['port'] -# if 'resources' in harness[KEY_DEPLOYMENT]: -# values['resources'] = harness[KEY_DEPLOYMENT]['resources'] - - -# def generate_tag_from_content(content_path, ignore=()): -# from dirhash import dirhash -# return dirhash(content_path, 'sha1', ignore=ignore) - - -# def extract_env_variables_from_values(values, envs=tuple(), prefix=''): -# if isinstance(values, dict): -# newenvs = list(envs) -# for key, value in values.items(): -# v = extract_env_variables_from_values( -# value, envs, f"{prefix}_{key}".replace('-', '_').upper()) -# if key in ('name', 'port', 'subdomain'): -# newenvs.extend(v) -# return newenvs -# else: -# return [env_variable(prefix, values)] - - -# def create_env_variables(values): -# for app_name, value in values[KEY_APPS].items(): -# if KEY_HARNESS in value: -# values['env'].extend(extract_env_variables_from_values( -# value[KEY_HARNESS], prefix='CH_' + app_name)) -# values['env'].append(env_variable('CH_DOMAIN', values['domain'])) -# values['env'].append(env_variable( -# 'CH_IMAGE_REGISTRY', values['registry']['name'])) -# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) - - -# def hosts_info(values): -# domain = values['domain'] -# namespace = values['namespace'] -# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if -# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if -# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] -# try: -# ip = get_cluster_ip() -# except: -# logging.warning('Cannot get cluster ip') -# return -# logging.info( -# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") - -# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] -# for app in values[KEY_APPS].values() if KEY_HARNESS in app) - -# logging.info( -# "\nTo run locally some apps, also those references may be needed") -# for appname in values[KEY_APPS]: -# app = values[KEY_APPS][appname]['harness'] -# if 'deployment' not in app: -# continue -# print( -# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( -# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) - -# print( -# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") - - -# class ValuesValidationException(Exception): -# pass - - -# def validate_helm_values(values): -# validate_dependencies(values) - - -# def validate_dependencies(values): -# all_apps = {a for a in values["apps"]} -# for app in all_apps: -# app_values = values["apps"][app] -# if 'dependencies' in app_values[KEY_HARNESS]: -# soft_dependencies = { -# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} -# not_found = {d for d in soft_dependencies if d not in all_apps} -# if not_found: -# logging.warning( -# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") -# hard_dependencies = { -# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} -# not_found = {d for d in hard_dependencies if d not in all_apps} -# if not_found: -# raise ValuesValidationException( -# f"Bad application dependencies specified for application {app}: {','.join(not_found)}") - -# build_dependencies = { -# d for d in app_values[KEY_HARNESS]['dependencies']['build']} - -# not_found = { -# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} -# not_found = {d for d in not_found if d not in all_apps} -# if not_found: -# raise ValuesValidationException( -# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") - -# if 'use_services' in app_values[KEY_HARNESS]: -# service_dependencies = {d['name'].replace( -# "-", "_") for d in app_values[KEY_HARNESS]['use_services']} - -# not_found = {d for d in service_dependencies if d not in all_apps} -# if not_found: -# raise ValuesValidationException( -# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") - - def identify_unicorn_based_main(candidates, app_path): import re gunicorn_pattern = re.compile(r"gunicorn") diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index bd49f8ee..1f7408f1 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -3,20 +3,12 @@ """ import yaml import os -import shutil import logging -from hashlib import sha1 import subprocess -from functools import cache -import tarfile -from docker import from_env as DockerClient - -from . import HERE, CH_ROOT -from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ - DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH -from .utils import get_cluster_ip, get_git_commit_hash, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \ - get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \ +from cloudharness_utils.constants import VALUES_MANUAL_PATH, HELM_CHART_PATH +from .utils import get_cluster_ip, get_git_commit_hash, image_name_from_dockerfile_path, \ + get_template, merge_to_yaml_file, dict_merge, app_name_from_path, \ find_dockerfiles_paths from .models import HarnessMainConfig @@ -45,59 +37,6 @@ def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain= class CloudHarnessHelm(ConfigurationGenerator): - # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, - # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, - # namespace=None): - # assert domain, 'A domain must be specified' - # self.root_paths = root_paths - # self.tag = tag - # if registry and registry[-1] != '/': - # self.registry = registry + '/' - # else: - # self.registry = registry - # self.local = local - # self.domain = domain - # self.exclude = exclude - # self.secured = secured - # self.output_path = output_path - # self.include = include - # self.registry_secret = registry_secret - # self.tls = tls - # self.env = env - # self.namespace = namespace - - # self.dest_deployment_path = os.path.join( - # self.output_path, HELM_CHART_PATH) - # self.helm_chart_path = os.path.join( - # self.dest_deployment_path, 'Chart.yaml') - # self.__init_deployment() - - # self.static_images = set() - # self.base_images = {} - # self.all_images = {} - - # def __init_deployment(self): - # """ - # Create the base helm chart - # """ - # if os.path.exists(self.dest_deployment_path): - # shutil.rmtree(self.dest_deployment_path) - # # Initialize with default - # copy_merge_base_deployment(self.dest_deployment_path, os.path.join( - # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) - - # # Override for every cloudharness scaffolding - # for root_path in self.root_paths: - # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path, - # base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) - # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include, - # dest_helm_chart_path=self.dest_deployment_path) - - # def __adjust_missing_values(self, helm_values): - # if 'name' not in helm_values: - # with open(self.helm_chart_path) as f: - # chart_idx_content = yaml.safe_load(f) - # helm_values['name'] = chart_idx_content['name'].lower() def process_values(self) -> HarnessMainConfig: """ @@ -141,174 +80,6 @@ def process_values(self) -> HarnessMainConfig: validate_helm_values(merged_values) return HarnessMainConfig.from_dict(merged_values) - # def __process_applications(self, helm_values, base_image_name): - # for root_path in self.root_paths: - # app_values = init_app_values( - # root_path, exclude=self.exclude, values=helm_values[KEY_APPS]) - # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - # app_values) - - # app_base_path = os.path.join(root_path, APPS_PATH) - # app_values = self.collect_app_values( - # app_base_path, base_image_name=base_image_name) - # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS], - # app_values) - - # def collect_app_values(self, app_base_path, base_image_name=None): - # values = {} - - # for app_path in get_sub_paths(app_base_path): - # app_name = app_name_from_path( - # os.path.relpath(app_path, app_base_path)) - - # if app_name in self.exclude: - # continue - # app_key = app_name.replace('-', '_') - - # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) - - # values[app_key] = dict_merge( - # values[app_key], app_values) if app_key in values else app_values - - # return values - - # def __init_static_images(self, base_image_name): - # for static_img_dockerfile in self.static_images: - # img_name = image_name_from_dockerfile_path(os.path.basename( - # static_img_dockerfile), base_name=base_image_name) - # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( - # img_name, build_context_path=static_img_dockerfile) - - # def __assign_static_build_dependencies(self, helm_values): - # for static_img_dockerfile in self.static_images: - # key = os.path.basename(static_img_dockerfile) - # if key in helm_values[KEY_TASK_IMAGES]: - # dependencies = guess_build_dependencies_from_dockerfile( - # static_img_dockerfile) - # for dep in dependencies: - # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]: - # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep] - - # for image_name in list(helm_values[KEY_TASK_IMAGES].keys()): - # if image_name in self.exclude: - # del helm_values[KEY_TASK_IMAGES][image_name] - - # def __init_base_images(self, base_image_name): - - # for root_path in self.root_paths: - # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): - # img_name = image_name_from_dockerfile_path( - # os.path.basename(base_img_dockerfile), base_name=base_image_name) - # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - # img_name, build_context_path=root_path) - - # self.static_images.update(find_dockerfiles_paths( - # os.path.join(root_path, STATIC_IMAGES_PATH))) - # return self.base_images - - # def __init_test_images(self, base_image_name): - # test_images = {} - # for root_path in self.root_paths: - # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): - # img_name = image_name_from_dockerfile_path( - # os.path.basename(base_img_dockerfile), base_name=base_image_name) - # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( - # img_name, build_context_path=base_img_dockerfile) - - # return test_images - - - # def __find_static_dockerfile_paths(self, root_path): - # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) - - # def __merge_base_helm_values(self, helm_values): - # # Override for every cloudharness scaffolding - # for root_path in self.root_paths: - # helm_values = dict_merge( - # helm_values, - # collect_helm_values(root_path, env=self.env) - # ) - - # return helm_values - - # def __get_default_helm_values(self): - # helm_values = get_template(os.path.join( - # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml')) - # helm_values = dict_merge(helm_values, - # collect_helm_values(CH_ROOT, env=self.env)) - - # return helm_values - - # def create_tls_certificate(self, helm_values): - # if not self.tls: - # helm_values['tls'] = None - # return - # if not self.local: - # return - # helm_values['tls'] = self.domain.replace(".", "-") + "-tls" - - # bootstrap_file = 'bootstrap.sh' - # certs_parent_folder_path = os.path.join( - # self.output_path, 'helm', 'resources') - # certs_folder_path = os.path.join(certs_parent_folder_path, 'certs') - - # if os.path.exists(os.path.join(certs_folder_path)): - # # don't overwrite the certificate if it exists - # return - - # try: - # client = DockerClient() - # client.ping() - # except: - # raise ConnectionRefusedError( - # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') - - # # Create CA and sign cert for domain - # container = client.containers.run(image='frapsoft/openssl', - # command=f'sleep 60', - # entrypoint="", - # detach=True, - # environment=[ - # f"DOMAIN={self.domain}"], - # ) - - # container.exec_run('mkdir -p /mnt/vol1') - # container.exec_run('mkdir -p /mnt/certs') - - # # copy bootstrap file - # cur_dir = os.getcwd() - # os.chdir(os.path.join(HERE, 'scripts')) - # tar = tarfile.open(bootstrap_file + '.tar', mode='w') - # try: - # tar.add(bootstrap_file) - # finally: - # tar.close() - # data = open(bootstrap_file + '.tar', 'rb').read() - # container.put_archive('/mnt/vol1', data) - # os.chdir(cur_dir) - # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1') - - # # exec bootstrap file - # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}') - - # # retrieve the certs from the container - # bits, stat = container.get_archive('/mnt/certs') - # if not os.path.exists(certs_folder_path): - # os.makedirs(certs_folder_path) - # f = open(f'{certs_parent_folder_path}/certs.tar', 'wb') - # for chunk in bits: - # f.write(chunk) - # f.close() - # cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar') - # cf.extractall(path=certs_parent_folder_path) - - # logs = container.logs() - # logging.info(f'openssl container logs: {logs}') - - # # stop the container - # container.kill() - - # logging.info("Created certificates for local deployment") def __finish_helm_values(self, values): """ @@ -383,37 +154,6 @@ def __finish_helm_values(self, values): create_env_variables(values) return values, self.include - # def __clear_unused_db_configuration(self, harness_config): - # database_config = harness_config[KEY_DATABASE] - # database_type = database_config.get('type', None) - # if database_type is None: - # del harness_config[KEY_DATABASE] - # return - # db_specific_keys = [k for k, v in database_config.items() - # if isinstance(v, dict) and 'image' in v and 'ports' in v] - # for db in db_specific_keys: - # if database_type != db: - # del database_config[db] - - # def image_tag(self, image_name, build_context_path=None, dependencies=()): - # tag = self.tag - # if tag is None and not self.local: - # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") - # ignore_path = os.path.join(build_context_path, '.dockerignore') - # ignore = set(DEFAULT_IGNORE) - # if os.path.exists(ignore_path): - # with open(ignore_path) as f: - # ignore = ignore.union({line.strip() for line in f}) - # logging.info(f"Ignoring {ignore}") - # tag = generate_tag_from_content(build_context_path, ignore) - # logging.info(f"Content hash: {tag}") - # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) - # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() - # logging.info(f"Generated tag: {tag}") - # app_name = image_name.split("/")[-1] # the image name can have a prefix - # self.all_images[app_name] = tag - # return self.registry + image_name + (f':{tag}' if tag else '') - def create_app_values_spec(self, app_name, app_path, base_image_name=None): logging.info('Generating values script for ' + app_name) @@ -478,284 +218,3 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None): img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) return values - - -# def get_included_with_dependencies(values, include): -# app_values = values['apps'].values() -# directly_included = [app for app in app_values if any( -# inc == app[KEY_HARNESS]['name'] for inc in include)] - -# dependent = set(include) -# for app in directly_included: -# if app['harness']['dependencies'].get('hard', None): -# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard'])) -# if app['harness']['dependencies'].get('soft', None): -# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft'])) -# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']: -# dependent.add('accounts') -# if len(dependent) == len(include): -# return dependent -# return get_included_with_dependencies(values, dependent) - - -# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): -# pass - - -# def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), include=None): -# """ -# Searches recursively for helm templates inside the applications and collects the templates in the destination - -# :param search_root: -# :param dest_helm_chart_path: collected helm templates destination folder -# :param exclude: -# :return: -# """ -# app_base_path = os.path.join(search_root, APPS_PATH) - -# for app_path in get_sub_paths(app_base_path): -# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) -# if app_name in exclude or (include and not any(inc in app_name for inc in include)): -# continue -# template_dir = os.path.join(app_path, 'deploy', 'templates') -# if os.path.exists(template_dir): -# dest_dir = os.path.join( -# dest_helm_chart_path, 'templates', app_name) - -# logging.info( -# "Collecting templates for application %s to %s", app_name, dest_dir) -# if os.path.exists(dest_dir): -# logging.warning( -# "Merging/overriding all files in directory %s", dest_dir) -# merge_configuration_directories(template_dir, dest_dir) -# else: -# shutil.copytree(template_dir, dest_dir) -# resources_dir = os.path.join(app_path, 'deploy/resources') -# if os.path.exists(resources_dir): -# dest_dir = os.path.join( -# dest_helm_chart_path, 'resources', app_name) - -# logging.info( -# "Collecting resources for application %s to %s", app_name, dest_dir) - -# merge_configuration_directories(resources_dir, dest_dir) - -# subchart_dir = os.path.join(app_path, 'deploy/charts') -# if os.path.exists(subchart_dir): -# dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name) - -# logging.info( -# "Collecting templates for application %s to %s", app_name, dest_dir) -# if os.path.exists(dest_dir): -# logging.warning( -# "Merging/overriding all files in directory %s", dest_dir) -# merge_configuration_directories(subchart_dir, dest_dir) -# else: -# shutil.copytree(subchart_dir, dest_dir) - - -# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): -# if not os.path.exists(base_helm_chart): -# return -# if os.path.exists(dest_helm_chart_path): -# logging.info("Merging/overriding all files in directory %s", -# dest_helm_chart_path) -# merge_configuration_directories(base_helm_chart, dest_helm_chart_path) -# else: -# logging.info("Copying base deployment chart from %s to %s", -# base_helm_chart, dest_helm_chart_path) -# shutil.copytree(base_helm_chart, dest_helm_chart_path) - - -# def collect_helm_values(deployment_root, env=()): -# """ -# Creates helm values from a cloudharness deployment scaffolding -# """ - -# values_template_path = os.path.join( -# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml') - -# values = get_template(values_template_path) - -# for e in env: -# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, -# f'values-template-{e}.yaml') -# if os.path.exists(specific_template_path): -# logging.info( -# "Specific environment values template found: " + specific_template_path) -# with open(specific_template_path) as f: -# values_env_specific = yaml.safe_load(f) -# values = dict_merge(values, values_env_specific) -# return values - - -# def init_app_values(deployment_root, exclude, values=None): -# values = values if values is not None else {} -# app_base_path = os.path.join(deployment_root, APPS_PATH) -# overridden_template_path = os.path.join( -# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') -# default_values_path = os.path.join( -# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') - -# for app_path in get_sub_paths(app_base_path): - -# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path)) - -# if app_name in exclude: -# continue -# app_key = app_name.replace('-', '_') -# if app_key not in values: -# default_values = get_template(default_values_path) -# values[app_key] = default_values -# overridden_defaults = get_template(overridden_template_path) -# values[app_key] = dict_merge(values[app_key], overridden_defaults) - -# return values - - -# def values_from_legacy(values): -# if KEY_HARNESS not in values: -# values[KEY_HARNESS] = {} -# harness = values[KEY_HARNESS] -# if KEY_SERVICE not in harness: -# harness[KEY_SERVICE] = {} -# if KEY_DEPLOYMENT not in harness: -# harness[KEY_DEPLOYMENT] = {} -# if KEY_DATABASE not in harness: -# harness[KEY_DATABASE] = {} - -# if 'subdomain' in values: -# harness['subdomain'] = values['subdomain'] -# if 'autodeploy' in values: -# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy'] -# if 'autoservice' in values: -# harness[KEY_SERVICE]['auto'] = values['autoservice'] -# if 'secureme' in values: -# harness['secured'] = values['secureme'] -# if 'resources' in values: -# harness[KEY_DEPLOYMENT]['resources'].update(values['resources']) -# if 'replicas' in values: -# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas'] -# if 'image' in values: -# harness[KEY_DEPLOYMENT]['image'] = values['image'] -# if 'port' in values: -# harness[KEY_DEPLOYMENT]['port'] = values['port'] -# harness[KEY_SERVICE]['port'] = values['port'] - - -# def values_set_legacy(values): -# harness = values[KEY_HARNESS] -# if 'image' in harness[KEY_DEPLOYMENT]: -# values['image'] = harness[KEY_DEPLOYMENT]['image'] - -# values['name'] = harness['name'] -# if harness[KEY_DEPLOYMENT].get('port', None): -# values['port'] = harness[KEY_DEPLOYMENT]['port'] -# if 'resources' in harness[KEY_DEPLOYMENT]: -# values['resources'] = harness[KEY_DEPLOYMENT]['resources'] - - -# def generate_tag_from_content(content_path, ignore=()): -# from dirhash import dirhash -# return dirhash(content_path, 'sha1', ignore=ignore) - - -# def extract_env_variables_from_values(values, envs=tuple(), prefix=''): -# if isinstance(values, dict): -# newenvs = list(envs) -# for key, value in values.items(): -# v = extract_env_variables_from_values( -# value, envs, f"{prefix}_{key}".replace('-', '_').upper()) -# if key in ('name', 'port', 'subdomain'): -# newenvs.extend(v) -# return newenvs -# else: -# return [env_variable(prefix, values)] - - -# def create_env_variables(values): -# for app_name, value in values[KEY_APPS].items(): -# if KEY_HARNESS in value: -# values['env'].extend(extract_env_variables_from_values( -# value[KEY_HARNESS], prefix='CH_' + app_name)) -# values['env'].append(env_variable('CH_DOMAIN', values['domain'])) -# values['env'].append(env_variable( -# 'CH_IMAGE_REGISTRY', values['registry']['name'])) -# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) - - -# def hosts_info(values): -# domain = values['domain'] -# namespace = values['namespace'] -# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if -# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if -# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']] -# try: -# ip = get_cluster_ip() -# except: -# logging.warning('Cannot get cluster ip') -# return -# logging.info( -# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}") - -# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name'] -# for app in values[KEY_APPS].values() if KEY_HARNESS in app) - -# logging.info( -# "\nTo run locally some apps, also those references may be needed") -# for appname in values[KEY_APPS]: -# app = values[KEY_APPS][appname]['harness'] -# if 'deployment' not in app: -# continue -# print( -# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( -# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace)) - -# print( -# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}") - - -# class ValuesValidationException(Exception): -# pass - - -# def validate_helm_values(values): -# validate_dependencies(values) - - -# def validate_dependencies(values): -# all_apps = {a for a in values["apps"]} -# for app in all_apps: -# app_values = values["apps"][app] -# if 'dependencies' in app_values[KEY_HARNESS]: -# soft_dependencies = { -# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']} -# not_found = {d for d in soft_dependencies if d not in all_apps} -# if not_found: -# logging.warning( -# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}") -# hard_dependencies = { -# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']} -# not_found = {d for d in hard_dependencies if d not in all_apps} -# if not_found: -# raise ValuesValidationException( -# f"Bad application dependencies specified for application {app}: {','.join(not_found)}") - -# build_dependencies = { -# d for d in app_values[KEY_HARNESS]['dependencies']['build']} - -# not_found = { -# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} -# not_found = {d for d in not_found if d not in all_apps} -# if not_found: -# raise ValuesValidationException( -# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image") - -# if 'use_services' in app_values[KEY_HARNESS]: -# service_dependencies = {d['name'].replace( -# "-", "_") for d in app_values[KEY_HARNESS]['use_services']} - -# not_found = {d for d in service_dependencies if d not in all_apps} -# if not_found: -# raise ValuesValidationException( -# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}") From a4cd813718e35efebab29586431661fcfccc83be Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 08:21:21 -0600 Subject: [PATCH 068/210] CH-100 Refactor helm test --- .../cloudharness_utils/testing/util.py | 6 +- .../ch_cli_tools/configurationgenerator.py | 4 +- .../deployment-cli-tools/ch_cli_tools/helm.py | 2 +- tools/deployment-cli-tools/tests/test_helm.py | 189 ++++++++---------- 4 files changed, 89 insertions(+), 112 deletions(-) diff --git a/libraries/cloudharness-utils/cloudharness_utils/testing/util.py b/libraries/cloudharness-utils/cloudharness_utils/testing/util.py index b0e98624..6f11adcc 100644 --- a/libraries/cloudharness-utils/cloudharness_utils/testing/util.py +++ b/libraries/cloudharness-utils/cloudharness_utils/testing/util.py @@ -1,7 +1,7 @@ -from cgi import test +# from cgi import test import os -from os.path import dirname as dn +# from os.path import dirname as dn from cloudharness_model.models import ApplicationUser, ApplicationTestConfig, ApplicationHarnessConfig, E2ETestsConfig @@ -11,7 +11,7 @@ def get_user_password(main_user: ApplicationUser): def get_app_environment(app_config: ApplicationHarnessConfig, app_domain, use_local_env=True): my_env = os.environ.copy() if use_local_env else {} my_env["APP_URL"] = app_domain - + if app_config.accounts and app_config.accounts.users: main_user: ApplicationUser = app_config.accounts.users[0] diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py index 9a445456..ba974dfc 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py +++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py @@ -38,7 +38,7 @@ class ConfigurationGenerator(object): - def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, + def __init__(self, root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None, templates_path=HELM_PATH): assert domain, 'A domain must be specified' @@ -56,7 +56,7 @@ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=Non self.include = include self.registry_secret = registry_secret self.tls = tls - self.env = env + self.env = env or {} self.namespace = namespace self.templates_path = templates_path diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index 1f7408f1..daae2d16 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -26,7 +26,7 @@ def deploy(namespace, output_path='./deployment'): f"helm upgrade {namespace} {helm_path} -n {namespace} --install --reset-values".split()) -def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, +def create_helm_chart(root_paths, tag: str | None | int ='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None) -> HarnessMainConfig: if (type(env)) == str: diff --git a/tools/deployment-cli-tools/tests/test_helm.py b/tools/deployment-cli-tools/tests/test_helm.py index 5fa269d6..9a773442 100644 --- a/tools/deployment-cli-tools/tests/test_helm.py +++ b/tools/deployment-cli-tools/tests/test_helm.py @@ -1,16 +1,19 @@ -import shutil - from ch_cli_tools.helm import * from ch_cli_tools.configurationgenerator import * +import pytest HERE = os.path.dirname(os.path.realpath(__file__)) RESOURCES = os.path.join(HERE, 'resources') -OUT = '/tmp/deployment' CLOUDHARNESS_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(HERE))) -def test_collect_helm_values(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'], +def exists(path): + return path.exists() + + +def test_collect_helm_values(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'], exclude=['events'], domain="my.local", namespace='test', env='dev', local=False, tag=1, registry='reg') @@ -51,24 +54,21 @@ def test_collect_helm_values(): # Environment specific overriding assert values[KEY_APPS]['accounts']['a'] == 'dev' assert values['a'] == 'dev' - assert values['database']['auto'] == False + assert values['database']['auto'] is False # legacy reading - assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] == True - assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] == False + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True + assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False - helm_path = os.path.join(OUT, HELM_CHART_PATH) - - def exists(*args): - return os.path.exists(os.path.join(*args)) + helm_path = out_folder / HELM_CHART_PATH # Check files assert exists(helm_path) - assert exists(helm_path, 'values.yaml') - assert exists(helm_path, 'resources/accounts/realm.json') - assert exists(helm_path, 'resources/accounts/aresource.txt') - assert exists(helm_path, 'resources/myapp/aresource.txt') - assert exists(helm_path, 'templates/myapp/mytemplate.yaml') + assert exists(helm_path / 'values.yaml') + assert exists(helm_path / 'resources' / 'accounts' / 'realm.json') + assert exists(helm_path / 'resources' / 'accounts' / 'aresource.txt') + assert exists(helm_path / 'resources' / 'myapp' / 'aresource.txt') + assert exists(helm_path / 'templates' / 'myapp' / 'mytemplate.yaml') # Checl base and task images assert values[KEY_TASK_IMAGES] @@ -78,8 +78,6 @@ def exists(*args): # Not indicated as a build dependency assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES] - shutil.rmtree(OUT) - def test_collect_helm_values_noreg_noinclude(tmp_path): out_path = tmp_path / 'test_collect_helm_values_noreg_noinclude' @@ -115,24 +113,21 @@ def test_collect_helm_values_noreg_noinclude(tmp_path): # Environment specific overriding assert values[KEY_APPS]['accounts']['a'] == 'dev' assert values['a'] == 'dev' - assert values['database']['auto'] == False + assert values['database']['auto'] is False # legacy reading - assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] == True - assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] == False + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True + assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False helm_path = out_path / HELM_CHART_PATH - def exists(*args): - return os.path.exists(os.path.join(*args)) - # Check files assert exists(helm_path) - assert exists(helm_path, 'values.yaml') - assert exists(helm_path, 'resources/accounts/realm.json') - assert exists(helm_path, 'resources/accounts/aresource.txt') - assert exists(helm_path, 'resources/myapp/aresource.txt') - assert exists(helm_path, 'templates/myapp/mytemplate.yaml') + assert exists(helm_path / 'values.yaml') + assert exists(helm_path / 'resources' / 'accounts' / 'realm.json') + assert exists(helm_path / 'resources' / 'accounts' / 'aresource.txt') + assert exists(helm_path / 'resources' / 'myapp' / 'aresource.txt') + assert exists(helm_path / 'templates' / 'myapp' / 'mytemplate.yaml') assert values[KEY_TASK_IMAGES] assert 'cloudharness-base' in values[KEY_TASK_IMAGES] @@ -140,68 +135,50 @@ def exists(*args): assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1' -def test_collect_helm_values_precedence(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", +def test_collect_helm_values_precedence(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_precedence' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["events"]) # Values.yaml from current app must override values-prod.yaml from cloudharness assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden' assert values[KEY_APPS]['events']['kafka']['resources']['limits']['cpu'] == 'overridden-prod' -def test_collect_helm_values_multiple_envs(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", +def test_collect_helm_values_multiple_envs(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_multiple_envs' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"]) - assert values[KEY_APPS]['myapp']['test'] == True, 'values-test not loaded' - assert values[KEY_APPS]['myapp']['dev'] == True, 'values-dev not loaded' + assert values[KEY_APPS]['myapp']['test'] is True, 'values-test not loaded' + assert values[KEY_APPS]['myapp']['dev'] is True, 'values-dev not loaded' assert values[KEY_APPS]['myapp']['a'] == 'test', 'values-test not overriding' -def test_collect_helm_values_wrong_dependencies_validate(): - try: - values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local", +def test_collect_helm_values_wrong_dependencies_validate(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_wrong_dependencies_validate' + with pytest.raises(ValuesValidationException): + create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"]) - - except ValuesValidationException as e: - logging.info("Exception correctly raised %s", e.args) - assert True - else: - assert False, "Should error because of wrong hard dependency" - try: - values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local", + create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"]) except ValuesValidationException as e: - assert False, "Should not error because of wrong soft dependency" - else: - assert True, "No error for wrong soft dependencies" + pytest.fail("Should not error because of wrong soft dependency") - try: - values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local", + with pytest.raises(ValuesValidationException): + create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["wrong-build"]) - - except ValuesValidationException as e: - logging.info("Exception correctly raised %s", e.args) - assert True - else: - assert False, "Should error because of wrong build dependency" - - try: - values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local", + with pytest.raises(ValuesValidationException): + create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["wrong-services"]) - except ValuesValidationException as e: - logging.info("Exception correctly raised %s", e.args) - assert True - else: - assert False, "Should error because of wrong service dependency" - -def test_collect_helm_values_build_dependencies(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", +def test_collect_helm_values_build_dependencies(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_build_dependencies' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["myapp"]) assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies" @@ -209,8 +186,9 @@ def test_collect_helm_values_build_dependencies(): assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency" assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency" -def test_collect_helm_values_build_dependencies_nodeps(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", +def test_collect_helm_values_build_dependencies_nodeps(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_build_dependencies_nodeps' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["events"]) @@ -219,8 +197,10 @@ def test_collect_helm_values_build_dependencies_nodeps(): assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency" assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency" -def test_collect_helm_values_build_dependencies_exclude(): - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", + +def test_collect_helm_values_build_dependencies_exclude(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_build_dependencies_exclude' + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"]) @@ -229,9 +209,10 @@ def test_collect_helm_values_build_dependencies_exclude(): assert 'workflows-extract-download' not in values[KEY_TASK_IMAGES], "workflows-extract-download has been explicitly excluded" -def test_clear_unused_dbconfig(): +def test_clear_unused_dbconfig(tmp_path): + out_folder = tmp_path / 'test_clear_unused_dbconfig' - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", env='withpostgres', local=False, include=["myapp"], exclude=["legacy"]) # There is a DB config @@ -248,7 +229,7 @@ def test_clear_unused_dbconfig(): assert db_config['mongo'] is None assert db_config['neo4j'] is None - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", env='withmongo', local=False, include=["myapp"], exclude=["legacy"]) assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS] @@ -262,9 +243,10 @@ def test_clear_unused_dbconfig(): assert db_config['postgres'] is None -def test_clear_all_dbconfig_if_nodb(): +def test_clear_all_dbconfig_if_nodb(tmp_path): + out_folder = tmp_path / 'test_clear_all_dbconfig_if_nodb' - values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local", + values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", env='withoutdb', local=False, include=["myapp"], exclude=["legacy"]) # There is a DB config @@ -274,6 +256,7 @@ def test_clear_all_dbconfig_if_nodb(): db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE] assert db_config is None + def test_tag_hash_generation(): v1 = generate_tag_from_content(RESOURCES) v2 = generate_tag_from_content(RESOURCES, ignore=['myapp']) @@ -285,21 +268,22 @@ def test_tag_hash_generation(): v5 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*']) assert v5 == v4 + fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt' try: - fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt') - with open(fname, 'w') as f: - f.write('a') + fname.write_text('a') v6 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*']) assert v6 == v5 v7 = generate_tag_from_content(RESOURCES) assert v7 != v1 finally: - os.remove(fname) + fname.unlink() + -def test_collect_helm_values_auto_tag(): +def test_collect_helm_values_auto_tag(tmp_path): + out_folder = tmp_path / 'test_collect_helm_values_auto_tag' def create(): - return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'], + return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'], exclude=['events'], domain="my.local", namespace='test', env='dev', local=False, tag=None, registry='reg') @@ -321,62 +305,55 @@ def create(): assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore" + fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt' try: - fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt') - with open(fname, 'w') as f: - f.write('a') + fname.write_text('a') values = create() assert v1 != values.apps['myapp'].harness.deployment.image, "Adding the file changed the hash value" v2 = values.apps['myapp'].harness.deployment.image assert values["task-images"][BASE_KEY] == b1, "Application files should be ignored for base image following the root .dockerignore" finally: - os.remove(fname) - + fname.unlink() try: - with open(fname, 'w') as f: - f.write('a') + fname.write_text('a') values = create() assert v2 == values.apps['myapp'].harness.deployment.image, "Recreated an identical file, the hash value should be the same" finally: - os.remove(fname) + fname.unlink() - fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.ignored') + fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.ignored' try: - with open(fname, 'w') as f: - f.write('a') + fname.write_text('a') + values = create() assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore" assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore" finally: - os.remove(fname) - - + fname.unlink() # Dependencies test: if a dependency is changed, the hash should change - fname = os.path.join(RESOURCES, 'infrastructure/common-images', 'my-common', 'afile') + fname = Path(RESOURCES) / 'infrastructure' / 'common-images' / 'my-common' / 'afile' try: - with open(fname, 'w') as f: - f.write('a') + fname.write_text('a') values = create() assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change" assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change" finally: - os.remove(fname) + fname.unlink() - fname = os.path.join(CLOUDHARNESS_ROOT, 'atestfile') + fname = Path(CLOUDHARNESS_ROOT) / 'atestfile' try: - with open(fname, 'w') as f: - f.write('a') + fname.write_text('a') values = create() @@ -384,4 +361,4 @@ def create(): assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change" assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change" finally: - os.remove(fname) + fname.unlink() From 1d63bc4f139fe3edef85d2bce3713ca10e1f97a3 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 08:37:34 -0600 Subject: [PATCH 069/210] CH-100 Add tests for docker compose target --- .../cloudharness_utils/constants.py | 1 + .../ch_cli_tools/dockercompose.py | 2 +- .../deploy/templates-compose/mytemplate.yaml | 0 .../tests/test_dockercompose.py | 365 ++++++++++++++++++ 4 files changed, 367 insertions(+), 1 deletion(-) create mode 100644 tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml create mode 100644 tools/deployment-cli-tools/tests/test_dockercompose.py diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py index d989cff9..1cd12a7d 100644 --- a/libraries/cloudharness-utils/cloudharness_utils/constants.py +++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py @@ -12,6 +12,7 @@ HELM_ENGINE = HELM_PATH COMPOSE = 'compose' +COMPOSE_PATH = COMPOSE COMPOSE_ENGINE = 'docker-compose' INFRASTRUCTURE_PATH = 'infrastructure' diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index f65e352b..1a96e562 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -18,7 +18,7 @@ from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates -def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, +def create_docker_compose_configuration(root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None) -> HarnessMainConfig: if (type(env)) == str: diff --git a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml new file mode 100644 index 00000000..e69de29b diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py new file mode 100644 index 00000000..c59d552f --- /dev/null +++ b/tools/deployment-cli-tools/tests/test_dockercompose.py @@ -0,0 +1,365 @@ +from ch_cli_tools.dockercompose import * +from ch_cli_tools.configurationgenerator import * +import pytest + +HERE = os.path.dirname(os.path.realpath(__file__)) +RESOURCES = os.path.join(HERE, 'resources') +CLOUDHARNESS_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(HERE))) +COMPOSE_PATH = COMPOSE + + +def exists(path): + return path.exists() + + +def test_collect_compose_values(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'], + exclude=['events'], domain="my.local", + namespace='test', env='dev', local=False, tag=1, registry='reg') + + # Auto values + assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == 'reg/cloudharness/myapp:1' + assert values.apps['myapp'].harness.deployment.image == 'reg/cloudharness/myapp:1' + assert values[KEY_APPS]['myapp'][KEY_HARNESS]['name'] == 'myapp' + assert values[KEY_APPS]['legacy'][KEY_HARNESS]['name'] == 'legacy' + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['image'] == 'reg/cloudharness/accounts:1' + + # First level include apps + assert 'samples' in values[KEY_APPS] + assert 'myapp' in values[KEY_APPS] + + # Not included + assert 'jupyterhub' not in values[KEY_APPS] + + # Dependency include first level + assert 'accounts' in values[KEY_APPS] + assert 'legacy' in values[KEY_APPS] + + # Dependency include second level + assert 'argo' in values[KEY_APPS] + + # Explicit exclude overrides include + assert 'events' not in values[KEY_APPS] + + # Base values kept + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['subdomain'] == 'accounts' + + # Defaults + assert 'service' in values[KEY_APPS]['legacy'][KEY_HARNESS] + assert 'common' in values[KEY_APPS]['legacy'] + assert 'common' in values[KEY_APPS]['accounts'] + # Values overriding + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['port'] == 'overridden' + + # Environment specific overriding + assert values[KEY_APPS]['accounts']['a'] == 'dev' + assert values['a'] == 'dev' + assert values['database']['auto'] is False + + # legacy reading + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True + assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False + + compose_path = out_folder / COMPOSE_PATH + + # Check files + assert exists(compose_path) + assert exists(compose_path / 'values.yaml') + assert exists(compose_path / 'resources' / 'accounts' / 'realm.json') + assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt') + assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt') + assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml') + + # Checl base and task images + assert values[KEY_TASK_IMAGES] + assert 'cloudharness-base' in values[KEY_TASK_IMAGES] + assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'reg/cloudharness/cloudharness-base:1' + assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'reg/cloudharness/myapp-mytask:1' + # Not indicated as a build dependency + assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES] + + +def test_collect_compose_values_noreg_noinclude(tmp_path): + out_path = tmp_path / 'test_collect_compose_values_noreg_noinclude' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_path, domain="my.local", + namespace='test', env='dev', local=False, tag=1) + + # Auto values + assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == 'cloudharness/myapp:1' + assert values[KEY_APPS]['myapp'][KEY_HARNESS]['name'] == 'myapp' + assert values[KEY_APPS]['legacy'][KEY_HARNESS]['name'] == 'legacy' + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['image'] == 'cloudharness/accounts:1' + + # First level include apps + assert 'samples' in values[KEY_APPS] + assert 'myapp' in values[KEY_APPS] + assert 'jupyterhub' in values[KEY_APPS] + assert 'accounts' in values[KEY_APPS] + assert 'legacy' in values[KEY_APPS] + assert 'argo' in values[KEY_APPS] + assert 'events' in values[KEY_APPS] + + # Base values kept + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['subdomain'] == 'accounts' + + # Defaults + assert 'service' in values[KEY_APPS]['legacy'][KEY_HARNESS] + assert 'common' in values[KEY_APPS]['legacy'] + assert 'common' in values[KEY_APPS]['accounts'] + # Values overriding + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['port'] == 'overridden' + assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden' + + # Environment specific overriding + assert values[KEY_APPS]['accounts']['a'] == 'dev' + assert values['a'] == 'dev' + assert values['database']['auto'] is False + + # legacy reading + assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True + assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False + + compose_path = out_path / COMPOSE_PATH + + # Check files + assert exists(compose_path) + assert exists(compose_path / 'values.yaml') + assert exists(compose_path / 'resources' / 'accounts' / 'realm.json') + assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt') + assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt') + assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml') + + assert values[KEY_TASK_IMAGES] + assert 'cloudharness-base' in values[KEY_TASK_IMAGES] + assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'cloudharness/cloudharness-base:1' + assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1' + + +def test_collect_compose_values_precedence(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_precedence' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["events"]) + + # Values.yaml from current app must override values-prod.yaml from cloudharness + assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden' + assert values[KEY_APPS]['events']['kafka']['resources']['limits']['cpu'] == 'overridden-prod' + +def test_collect_compose_values_multiple_envs(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_multiple_envs' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"]) + + + assert values[KEY_APPS]['myapp']['test'] is True, 'values-test not loaded' + assert values[KEY_APPS]['myapp']['dev'] is True, 'values-dev not loaded' + assert values[KEY_APPS]['myapp']['a'] == 'test', 'values-test not overriding' + + + +def test_collect_compose_values_wrong_dependencies_validate(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_wrong_dependencies_validate' + with pytest.raises(ValuesValidationException): + create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"]) + try: + create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"]) + + except ValuesValidationException as e: + pytest.fail("Should not error because of wrong soft dependency") + + with pytest.raises(ValuesValidationException): + create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["wrong-build"]) + with pytest.raises(ValuesValidationException): + create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["wrong-services"]) + + +def test_collect_compose_values_build_dependencies(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_build_dependencies' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["myapp"]) + + assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies" + assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed" + assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency" + assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency" + +def test_collect_compose_values_build_dependencies_nodeps(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_build_dependencies_nodeps' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["events"]) + + + assert 'cloudharness-flask' not in values[KEY_TASK_IMAGES], "Cloudharness-flask is not included in the build dependencies" + assert 'cloudharness-base' not in values[KEY_TASK_IMAGES], "Cloudharness-base is not included in the build dependencies" + assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency" + assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency" + + +def test_collect_compose_values_build_dependencies_exclude(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_build_dependencies_exclude' + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"]) + + + assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies" + assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed" + assert 'workflows-extract-download' not in values[KEY_TASK_IMAGES], "workflows-extract-download has been explicitly excluded" + + +def test_clear_unused_dbconfig(tmp_path): + out_folder = tmp_path / 'test_clear_unused_dbconfig' + + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + env='withpostgres', local=False, include=["myapp"], exclude=["legacy"]) + + # There is a DB config + assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS] + + db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE] + # postgres is set, but other entries are not. + assert db_config['postgres'] is not None + assert db_config['postgres']['image'].startswith('postgres:') + + # However, it seems that even after removing unused entries, + # the finale instance of the HarnessMainConfig class that is created + # adds back those entries and set them to None. + assert db_config['mongo'] is None + assert db_config['neo4j'] is None + + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + env='withmongo', local=False, include=["myapp"], exclude=["legacy"]) + + assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS] + db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE] + + # mongo is set, but other entries are not. + assert db_config['mongo'] is not None + assert db_config['mongo']['image'].startswith('mongo:') + assert db_config['neo4j'] is None + + assert db_config['postgres'] is None + + +def test_clear_all_dbconfig_if_nodb(tmp_path): + out_folder = tmp_path / 'test_clear_all_dbconfig_if_nodb' + + values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", + env='withoutdb', local=False, include=["myapp"], exclude=["legacy"]) + + # There is a DB config + assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS] + + # But it is None + db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE] + assert db_config is None + + +def test_tag_hash_generation(): + v1 = generate_tag_from_content(RESOURCES) + v2 = generate_tag_from_content(RESOURCES, ignore=['myapp']) + assert v1 != v2 + v3 = generate_tag_from_content(RESOURCES, ignore=['*/myapp/*']) + assert v3 != v1 + v4 = generate_tag_from_content(RESOURCES, ignore=['applications/myapp/*']) + assert v4 == v3 + v5 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*']) + assert v5 == v4 + + fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt' + try: + fname.write_text('a') + + v6 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*']) + assert v6 == v5 + v7 = generate_tag_from_content(RESOURCES) + assert v7 != v1 + finally: + fname.unlink() + + +def test_collect_compose_values_auto_tag(tmp_path): + out_folder = tmp_path / 'test_collect_compose_values_auto_tag' + def create(): + return create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'], + exclude=['events'], domain="my.local", + namespace='test', env='dev', local=False, tag=None, registry='reg') + + BASE_KEY = "cloudharness-base" + values = create() + + # Auto values are set by using the directory hash + assert 'reg/cloudharness/myapp:' in values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] + assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image + assert 'cloudharness/myapp-mytask' in values[KEY_TASK_IMAGES]['myapp-mytask'] + assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == values.apps['myapp'].harness.deployment.image + v1 = values.apps['myapp'].harness.deployment.image + c1 = values["task-images"]["my-common"] + b1 = values["task-images"][BASE_KEY] + d1 = values["task-images"]["cloudharness-flask"] + + values = create() + assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value" + assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore" + + + fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt' + try: + fname.write_text('a') + + values = create() + assert v1 != values.apps['myapp'].harness.deployment.image, "Adding the file changed the hash value" + v2 = values.apps['myapp'].harness.deployment.image + assert values["task-images"][BASE_KEY] == b1, "Application files should be ignored for base image following the root .dockerignore" + finally: + fname.unlink() + + try: + fname.write_text('a') + + values = create() + assert v2 == values.apps['myapp'].harness.deployment.image, "Recreated an identical file, the hash value should be the same" + finally: + fname.unlink() + + + fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.ignored' + try: + fname.write_text('a') + + + values = create() + assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore" + + assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore" + finally: + fname.unlink() + + # Dependencies test: if a dependency is changed, the hash should change + fname = Path(RESOURCES) / 'infrastructure' / 'common-images' / 'my-common' / 'afile' + + try: + fname.write_text('a') + + values = create() + + assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change" + assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change" + finally: + fname.unlink() + + + fname = Path(CLOUDHARNESS_ROOT) / 'atestfile' + try: + fname.write_text('a') + + values = create() + + assert b1 != values["task-images"][BASE_KEY], "Content for base image is changed, the hash should change" + assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change" + assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change" + finally: + fname.unlink() From b125dfc1399056c27f90b789c4726f508a2bc239 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 08:52:39 -0600 Subject: [PATCH 070/210] CH-100 Add tests for docker compose --- .../deploy/templates-compose/mytemplate.yaml | 13 +++++++++++++ .../tests/test_dockercompose.py | 19 +++++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml index e69de29b..aa1a2140 100644 --- a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml +++ b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml @@ -0,0 +1,13 @@ +cloudharness-metadata: + path: resources/generated/test.yaml + +data: |- + mykey: myvalue + +--- + +cloudharness-metadata: + path: resources/generated/test2.yaml + +data: |- + mykey2: myvalue2 diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py index c59d552f..bd6c0a67 100644 --- a/tools/deployment-cli-tools/tests/test_dockercompose.py +++ b/tools/deployment-cli-tools/tests/test_dockercompose.py @@ -64,13 +64,22 @@ def test_collect_compose_values(tmp_path): compose_path = out_folder / COMPOSE_PATH # Check files + assert exists(out_folder / 'docker-compose.yaml') assert exists(compose_path) assert exists(compose_path / 'values.yaml') + assert exists(compose_path / 'allvalues.yaml') assert exists(compose_path / 'resources' / 'accounts' / 'realm.json') assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt') assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt') + assert exists(compose_path / 'resources' / 'generated' / 'test.yaml') + assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml') assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml') + content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text() + assert content == 'mykey: myvalue' + content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text() + assert content == 'mykey2: myvalue2' + # Checl base and task images assert values[KEY_TASK_IMAGES] assert 'cloudharness-base' in values[KEY_TASK_IMAGES] @@ -123,19 +132,29 @@ def test_collect_compose_values_noreg_noinclude(tmp_path): compose_path = out_path / COMPOSE_PATH # Check files + assert exists(out_path / 'docker-compose.yaml') assert exists(compose_path) assert exists(compose_path / 'values.yaml') + assert exists(compose_path / 'allvalues.yaml') assert exists(compose_path / 'resources' / 'accounts' / 'realm.json') assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt') assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt') + assert exists(compose_path / 'resources' / 'generated' / 'test.yaml') + assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml') assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml') + content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text() + assert content == 'mykey: myvalue' + content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text() + assert content == 'mykey2: myvalue2' + assert values[KEY_TASK_IMAGES] assert 'cloudharness-base' in values[KEY_TASK_IMAGES] assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'cloudharness/cloudharness-base:1' assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1' + def test_collect_compose_values_precedence(tmp_path): out_folder = tmp_path / 'test_collect_compose_values_precedence' values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local", From 2c7e26488e787e372dc03f07ae2145417f145060 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 08:58:49 -0600 Subject: [PATCH 071/210] CH-100 Fix type hinting for Python 3.9 --- .../ch_cli_tools/configurationgenerator.py | 3 ++- tools/deployment-cli-tools/ch_cli_tools/dockercompose.py | 3 ++- tools/deployment-cli-tools/ch_cli_tools/helm.py | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py index ba974dfc..e371bb53 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py +++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py @@ -1,6 +1,7 @@ """ Utilities to create a helm chart from a CloudHarness directory structure """ +from typing import Union import yaml from ruamel.yaml import YAML import os @@ -38,7 +39,7 @@ class ConfigurationGenerator(object): - def __init__(self, root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True, + def __init__(self, root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None, templates_path=HELM_PATH): assert domain, 'A domain must be specified' diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py index 1a96e562..4b2c374b 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py +++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py @@ -1,6 +1,7 @@ """ Utilities to create a helm chart from a CloudHarness directory structure """ +from typing import Union import yaml from ruamel.yaml import YAML import os @@ -18,7 +19,7 @@ from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates -def create_docker_compose_configuration(root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True, +def create_docker_compose_configuration(root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None) -> HarnessMainConfig: if (type(env)) == str: diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index daae2d16..e58070fd 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -1,6 +1,7 @@ """ Utilities to create a helm chart from a CloudHarness directory structure """ +from typing import Union import yaml import os import logging @@ -26,7 +27,7 @@ def deploy(namespace, output_path='./deployment'): f"helm upgrade {namespace} {helm_path} -n {namespace} --install --reset-values".split()) -def create_helm_chart(root_paths, tag: str | None | int ='latest', registry='', local=True, domain=None, exclude=(), secured=True, +def create_helm_chart(root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment', include=None, registry_secret=None, tls=True, env=None, namespace=None) -> HarnessMainConfig: if (type(env)) == str: From 866dee9cf928ae247beb0e6c5fdf83a3cb5b5f47 Mon Sep 17 00:00:00 2001 From: aranega Date: Mon, 1 Apr 2024 09:13:36 -0600 Subject: [PATCH 072/210] CH-100 Add conditional test for docker compose if "helm" is installed --- .../tests/test_dockercompose.py | 30 +++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py index bd6c0a67..86fff944 100644 --- a/tools/deployment-cli-tools/tests/test_dockercompose.py +++ b/tools/deployment-cli-tools/tests/test_dockercompose.py @@ -1,12 +1,15 @@ from ch_cli_tools.dockercompose import * from ch_cli_tools.configurationgenerator import * import pytest +import shutil HERE = os.path.dirname(os.path.realpath(__file__)) RESOURCES = os.path.join(HERE, 'resources') CLOUDHARNESS_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(HERE))) COMPOSE_PATH = COMPOSE +HELM_IS_INSTALLED = shutil.which("helm") is not None + def exists(path): return path.exists() @@ -71,14 +74,15 @@ def test_collect_compose_values(tmp_path): assert exists(compose_path / 'resources' / 'accounts' / 'realm.json') assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt') assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt') - assert exists(compose_path / 'resources' / 'generated' / 'test.yaml') - assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml') assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml') - content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text() - assert content == 'mykey: myvalue' - content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text() - assert content == 'mykey2: myvalue2' + if HELM_IS_INSTALLED: + assert exists(compose_path / 'resources' / 'generated' / 'test.yaml') + assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml') + content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text() + assert content == 'mykey: myvalue' + content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text() + assert content == 'mykey2: myvalue2' # Checl base and task images assert values[KEY_TASK_IMAGES] @@ -139,14 +143,16 @@ def test_collect_compose_values_noreg_noinclude(tmp_path): assert exists(compose_path / 'resources' / 'accounts' / 'realm.json') assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt') assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt') - assert exists(compose_path / 'resources' / 'generated' / 'test.yaml') - assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml') assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml') - content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text() - assert content == 'mykey: myvalue' - content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text() - assert content == 'mykey2: myvalue2' + if HELM_IS_INSTALLED: + assert exists(compose_path / 'resources' / 'generated' / 'test.yaml') + assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml') + content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text() + assert content == 'mykey: myvalue' + content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text() + assert content == 'mykey2: myvalue2' + assert False assert values[KEY_TASK_IMAGES] assert 'cloudharness-base' in values[KEY_TASK_IMAGES] From f226829ca70f7ff86aa755db461aa17f6562fbba Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 2 Apr 2024 06:00:53 -0600 Subject: [PATCH 073/210] CH-100 Update scripts --- deployment-configuration/compose/Chart.yaml | 2 +- deployment-configuration/compose/README.md | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/deployment-configuration/compose/Chart.yaml b/deployment-configuration/compose/Chart.yaml index f294c3e7..83bf4933 100644 --- a/deployment-configuration/compose/Chart.yaml +++ b/deployment-configuration/compose/Chart.yaml @@ -1,6 +1,6 @@ apiVersion: v1 appVersion: "0.0.1" -description: CloudHarness Helm Chart +description: CloudHarness Docker Compose name: cloudharness version: 0.0.1 maintainers: diff --git a/deployment-configuration/compose/README.md b/deployment-configuration/compose/README.md index abeab69d..391b61c6 100644 --- a/deployment-configuration/compose/README.md +++ b/deployment-configuration/compose/README.md @@ -1,4 +1,3 @@ -# CloudHarness Helm chart: deploy CloudHarness to k8s - -Helm is used to define the CloudHarness deployment on Kubernetes. For further information about Helm, see https://helm.sh. +# CloudHarness Docker Compose: deploy CloudHarness to Docker Compose +Helm is used to define templates about how the CloudHarness deployment on Docker Compose. For further information about Helm, see https://helm.sh. From ac4b863bed7dc9a2730ed42037ef4f35a2906179 Mon Sep 17 00:00:00 2001 From: aranega Date: Tue, 2 Apr 2024 10:33:58 -0600 Subject: [PATCH 074/210] CH-100 Fix imports of harness-deployment --- tools/deployment-cli-tools/harness-application | 5 ++--- tools/deployment-cli-tools/harness-deployment | 3 ++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/deployment-cli-tools/harness-application b/tools/deployment-cli-tools/harness-application index a584c05d..46532825 100644 --- a/tools/deployment-cli-tools/harness-application +++ b/tools/deployment-cli-tools/harness-application @@ -2,7 +2,6 @@ import sys import os -import shutil import re import tempfile @@ -25,8 +24,8 @@ if __name__ == "__main__": parser.add_argument('name', metavar='name', type=str, help='Application name') parser.add_argument('-t', '--template', dest='templates', action="append", default=['base',], - help="""Add a template name. - + help="""Add a template name. + Available templates: - flask-server (backend flask server based on openapi) - webapp (webapp including backend and frontend) diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment index 9a5cc78c..40d4b09a 100644 --- a/tools/deployment-cli-tools/harness-deployment +++ b/tools/deployment-cli-tools/harness-deployment @@ -5,7 +5,8 @@ import sys import os from ch_cli_tools.dockercompose import create_docker_compose_configuration -from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy +from ch_cli_tools.helm import create_helm_chart, deploy +from ch_cli_tools.configurationgenerator import hosts_info from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file from ch_cli_tools.preprocessing import preprocess_build_overrides From 9b4dcaa9abea51e672358b64f9e47966b8e2be0f Mon Sep 17 00:00:00 2001 From: aranega Date: Thu, 18 Apr 2024 10:11:59 -0600 Subject: [PATCH 075/210] CH-100 Fix issue with TAG policy in skaffold for docker compose --- .../compose/templates/allvalues-template.yaml | 2 +- tools/deployment-cli-tools/ch_cli_tools/skaffold.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment-configuration/compose/templates/allvalues-template.yaml b/deployment-configuration/compose/templates/allvalues-template.yaml index d69538aa..d0aa2866 100644 --- a/deployment-configuration/compose/templates/allvalues-template.yaml +++ b/deployment-configuration/compose/templates/allvalues-template.yaml @@ -8,7 +8,6 @@ to replace the secrets values we create a dict with the structure: thus with an empty secrets node and then it's mergeOverwrite the copy of the .Values we created resulting in a copy of the .Values with all secrets being "" -*/ -}} cloudharness-metadata: path: allvalues2.yaml data: | @@ -18,3 +17,4 @@ data: | {{- $tmp := mergeOverwrite $values_copy $new_secrets }} {{- end }} {{ $values_copy | toYaml | indent 4 }} +*/ -}} diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py index 7859d043..b78f8b9e 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py +++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py @@ -201,7 +201,7 @@ def identify_unicorn_based_main(candidates): } skaffold_conf['build']['tagPolicy'] = { 'envTemplate': { - 'template': "TAG" + 'template': '"{{.TAG}}"' } } From 10f2c0f1f4147247a71ee0a8ddacd0145954f5b0 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 19 Apr 2024 10:32:18 -0600 Subject: [PATCH 076/210] CH-100 Update configuration for db in docker compose --- .../compose/templates/auto-compose.yaml | 8 ++++---- .../compose/templates/auto-database.yaml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index ca024edd..c99023fa 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -19,8 +19,8 @@ services: - "443:443" volumes: - "/var/run/docker.sock:/var/run/docker.sock:ro" - - "./certs/:/certs/:ro" - - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" + - "./compose/traefik/certs/:/certs/:ro" + - "./compose/traefik/traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" {{- range $app_name, $app_config := .Values.apps }} {{- if has $app_name (list "argo" "events" "nfsserver") -}} @@ -76,10 +76,10 @@ services: replicas: {{ $deployment.replicas | default 1 }} resources: limits: - cpus: {{ $deployment.resources.limits.cpu | default "50m" }} + cpus: {{ $deployment.resources.limits.cpu | default "0.5" }} memory: {{ trimSuffix "i" $deployment.resources.limits.memory | default "64M" }} reservations: - cpus: {{ $deployment.resources.requests.cpu | default "25m" }} + cpus: {{ $deployment.resources.requests.cpu | default "0.25" }} memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }} {{- with $deployment.command }} # entrypoint: {{ cat . $deployment.args }} diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml index 569bb220..93fd22ff 100644 --- a/deployment-configuration/compose/templates/auto-database.yaml +++ b/deployment-configuration/compose/templates/auto-database.yaml @@ -13,10 +13,10 @@ deploy: resources: limits: - cpus: {{ .limits.cpu | default "1000m" }} + cpus: {{ .limits.cpu | default "0.75" }} memory: {{ trimSuffix "i" .limits.memory | default "2G" }} reservations: - cpus: {{ .requests.cpu | default "100m" }} + cpus: {{ .requests.cpu | default "0.50" }} memory: {{ trimSuffix "i" .requests.memory | default "512M" }} {{- end }} volumes: From 83d9b8a3de978d480a4e77620eca1390e975d47c Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 19 Apr 2024 11:50:21 -0600 Subject: [PATCH 077/210] CH-100 Add support to link databases with service in docker compose --- .../compose/templates/auto-compose.yaml | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index c99023fa..f284c4be 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -107,11 +107,18 @@ services: {{- range $app_config.harness.env }} - {{ .name }}={{ .value }} {{- end }} - {{- with (concat (without $app_config.harness.dependencies.hard "argo") $app_config.harness.dependencies.soft) }} + {{- with compact + (append (concat (without $app_config.harness.dependencies.hard "argo") + $app_config.harness.dependencies.soft) + (dig "database" "name" "" $app_config.harness)) }} links: {{- range . -}} {{- $service := .}} {{- range $name, $conf := $.Values.apps }} + {{- if hasSuffix "-db" $service }} + - {{ $service }}:{{ $service }}.{{ $.Values.domain }} + {{- break -}} + {{- end -}} {{- if eq $conf.harness.name $service }} {{- if has $name (list "events" "nfsserver") }} # - {{ $name }}:{{ $service }}.{{ $.Values.domain }} @@ -128,14 +135,16 @@ services: {{/* "compact" in the beginning is to remove empty values */}} {{- with compact (append - (without $app_config.harness.dependencies.hard "argo" ) + (without $app_config.harness.dependencies.hard "argo" "events") (dig "database" "name" "" $app_config.harness)) -}} - {{- with without $app_config.harness.dependencies.hard "argo" "events" }} depends_on: - {{- end }} {{- range . -}} {{- $service := .}} {{- range $name, $conf := $.Values.apps -}} + {{- if hasSuffix "-db" $service }} + - {{ $service }} + {{- break -}} + {{- end -}} {{- if eq $conf.harness.name $service }} {{- if has $name (list "events" "nfsserver") }} # - {{ $name }} @@ -144,6 +153,7 @@ services: {{- end }} {{- break -}} {{- end -}} + {{- end -}} {{- end }} {{- end }} From e5361ca0a462d035499f24782d30bac6d37ba4e1 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 19 Apr 2024 12:32:07 -0600 Subject: [PATCH 078/210] CH-100 Change location of traefik config files --- deployment-configuration/compose/templates/auto-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml index f284c4be..a3b46854 100644 --- a/deployment-configuration/compose/templates/auto-compose.yaml +++ b/deployment-configuration/compose/templates/auto-compose.yaml @@ -19,8 +19,8 @@ services: - "443:443" volumes: - "/var/run/docker.sock:/var/run/docker.sock:ro" - - "./compose/traefik/certs/:/certs/:ro" - - "./compose/traefik/traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" + - "./traefik/certs/:/certs/:ro" + - "./traefik/traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro" {{- range $app_name, $app_config := .Values.apps }} {{- if has $app_name (list "argo" "events" "nfsserver") -}} From 4c5d36905e157c740fbb6c6c72d95e839d993203 Mon Sep 17 00:00:00 2001 From: aranega Date: Fri, 19 Apr 2024 20:26:30 -0600 Subject: [PATCH 079/210] CH-100 Add specific option to expose locally a DB --- .../compose/templates/auto-database.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml index 93fd22ff..9d56ae00 100644 --- a/deployment-configuration/compose/templates/auto-database.yaml +++ b/deployment-configuration/compose/templates/auto-database.yaml @@ -9,6 +9,12 @@ {{- range $port := $db_infos.ports }} - {{ $port.port | quote }} {{- end }} + {{- with .local_expose }} + ports: + {{- range $port := $db_infos.ports }} + - 127.0.0.1:{{ $port.port }}:{{ $port.port }} + {{- end }} + {{- end }} {{- with .resources }} deploy: resources: From 606ee410b21376a6df349c90474b74bca6aa7b58 Mon Sep 17 00:00:00 2001 From: Afonso Pinto Date: Thu, 13 Jun 2024 15:59:58 +0100 Subject: [PATCH 080/210] Update accounts.md --- docs/accounts.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/accounts.md b/docs/accounts.md index 8d07ca4a..5797c719 100644 --- a/docs/accounts.md +++ b/docs/accounts.md @@ -13,7 +13,7 @@ A user account must be provided to access to the MNP secured applications. To put a gatekeeper in front of your application, set `harness/secured` to `true` in the application's values.yaml. -To assign paths and roles, set `uri_role_mapping` as you would do in the [gatekeeper configuration file resources](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/user-guide.md#configuration-options). +To assign paths and roles, set `uri_role_mapping` as you would do in the [gatekeeper configuration file resources](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/content/userguide/_index.md#configuration-options). Example: @@ -27,7 +27,7 @@ harness: - administrator ``` -See the [Gogatekeeper official documentation](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/user-guide.md) for more. +See the [Gogatekeeper official documentation](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/content/userguide/_index.md) for more. ## Backend development From 3f786449995289a19ecbad2645dcd00a8a7bb6c6 Mon Sep 17 00:00:00 2001 From: Afonso Pinto Date: Thu, 20 Jun 2024 11:02:00 +0100 Subject: [PATCH 081/210] Update accounts.md --- docs/accounts.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/accounts.md b/docs/accounts.md index 5797c719..dda1e83e 100644 --- a/docs/accounts.md +++ b/docs/accounts.md @@ -27,7 +27,7 @@ harness: - administrator ``` -See the [Gogatekeeper official documentation](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/content/userguide/_index.md) for more. +See the [Gogatekeeper official documentation](https://gogatekeeper.github.io/gatekeeper/userguide) for more. ## Backend development From 0305496fa8b685af2c2b2dac5254534bb096e6c8 Mon Sep 17 00:00:00 2001 From: Diogo Correia Date: Mon, 24 Jun 2024 14:34:51 +0100 Subject: [PATCH 082/210] Fix documentation typos --- README.md | 1 - docs/README.md | 2 +- docs/applications/secrets.md | 2 +- docs/build-deploy/README.md | 2 +- docs/build-deploy/environments.md | 2 +- docs/tutorials/simple-date-clock-application.adoc | 4 ++-- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 459b2929..a1eb15a2 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,6 @@ A JRE is needed to run the code generators based on openapi-generator. For more info, see [here](https://openapi-generator.tech/docs/installation). - ## CloudHarness command line tools To use the cli tools, install requirements first: diff --git a/docs/README.md b/docs/README.md index b6a8087f..33c6bb73 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,6 +1,6 @@ # Index of the documentation articles - [Create and run a deployment](./build-deploy/README.md) - - [Create a codefresh continuous deployment](./build-deploy/codefresh.md) + - [Create a codefresh continuous deployment](./build-deploy/ci-cd/codefresh.md) - [Configure the Helm chart](./build-deploy/helm-configuration.md) - [Set up environments](./build-deploy/environments.md) - [Work with local deployments](./build-deploy/local-deploy/README.md) diff --git a/docs/applications/secrets.md b/docs/applications/secrets.md index c7d49143..dfd72256 100644 --- a/docs/applications/secrets.md +++ b/docs/applications/secrets.md @@ -2,7 +2,7 @@ ## What secrets are -Kubernetes Secrets let you store and manage sensitive information, such as passwords, OAuth tokens, and ssh keys. Storing confidential information in a Secret is safer and more flexible than putting it verbatim in a Pod definition or in a container image. See [Secrets design document](https://github.com/kubernetes/community/blob/master/contributors/design-proposals/auth/secrets.md) for more information. +Kubernetes Secrets let you store and manage sensitive information, such as passwords, OAuth tokens, and ssh keys. Storing confidential information in a Secret is safer and more flexible than putting it verbatim in a Pod definition or in a container image. See [Secrets design document](https://github.com/kubernetes/design-proposals-archive/blob/main/auth/secrets.md) for more information. **CloudHarness has build-in support for application specific kubernetes secrets.** diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md index 35e5fbdc..c48892d9 100644 --- a/docs/build-deploy/README.md +++ b/docs/build-deploy/README.md @@ -94,7 +94,7 @@ To build and reploy ## Continuous deployment with Codefresh -See [here](./codefresh.md). +See [here](./ci-cd/codefresh.md). ## Relevant files and directory structure Deployment files are automatically generated with the script diff --git a/docs/build-deploy/environments.md b/docs/build-deploy/environments.md index f730b641..6112a2bd 100644 --- a/docs/build-deploy/environments.md +++ b/docs/build-deploy/environments.md @@ -6,7 +6,7 @@ respect to the production build. ## How to set the environment -The environment of the current deployment can ve set with the parameter `--env` (`-e`) +The environment of the current deployment can be set with the parameter `--env` (`-e`) of `harness-deployment`. When the environment is set, specific environment configuration files are included, potentially overriding any value in the system. diff --git a/docs/tutorials/simple-date-clock-application.adoc b/docs/tutorials/simple-date-clock-application.adoc index 8ed52a0e..b5a480e4 100644 --- a/docs/tutorials/simple-date-clock-application.adoc +++ b/docs/tutorials/simple-date-clock-application.adoc @@ -36,7 +36,7 @@ Before installing everything, please be sure you have the following tools instal == Install {ch} (if it is not yet done) {ch} is coded in Python, consequently, it's always better to create a local virtualenv dedicated to the project to avoid messing with your system dependencies. -As shown in the snippet above, we will clone the repository, change directory inside the freshly clone directory and create a virtualenv inside (for this tutorial, the Python's version used is CPython 3.10.6). +As shown in the snippet bellow, we will clone the repository, change directory inside the freshly clone directory and create a virtualenv inside (for this tutorial, the Python's version used is CPython 3.10.6). First step is to clone the link:{repo_url}[{ch} repository] on your system. @@ -398,7 +398,7 @@ This will add a new `datetime_controller.py` in the `backend/clockdate/controlle [IMPORTANT] You need to notice that all the controllers files (and all the files) are overridden in the `backend` directory. -To prevent files of being overridden, you need to edit the `.openapi-generator-ignore` file, that acts like a `.gitignore` file (in a way), by marking the files/directories that needs to be ignored by the generation. +To prevent files from being overridden, you need to edit the `.openapi-generator-ignore` file, in Cloud Harness template directory, which acts like a `.gitignore` file (in a way), by marking the files/directories that needs to be ignored by the generation. When we open this file, we get the following controller method: From 3330807ac38424e865c6106cdd13a6d03bdcc303 Mon Sep 17 00:00:00 2001 From: Diogo Correia Date: Tue, 25 Jun 2024 16:41:56 +0100 Subject: [PATCH 083/210] Fix grammar mistake in the developer documentation --- docs/dev.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dev.md b/docs/dev.md index 113e0c86..3c080d9b 100644 --- a/docs/dev.md +++ b/docs/dev.md @@ -233,7 +233,7 @@ The skaffold configuration is generated by the `create_skaffold_configuration(.. This function also generates the skaffold entries for the Dockerfiles of the micro-services used in the application. The skaffold generation is based on the [`skaffold-template.yaml`](../deployment-configuration/skaffold-template.yaml) from the CloudHarness project located in [`deployment-configuration`](../deployment-configuration/). This base configuration is merged with the configuration dedicated to a specific project and which is located in the `deployment-configuration` folder of the project. -Finally, once all the requiered information are injected in the skaffold configuration dictionnary, the dictionnary is saved as a YAML file in the `deployment/skaffold.yaml` file in the project directory. +Finally, once all the requiered information is injected in the skaffold configuration dictionnary, the dictionnary is saved as a YAML file in the `deployment/skaffold.yaml` file in the project directory. #### How to extend the deployment generation From 24269dd8e9cc8e10a14650f2470988d342ec07e9 Mon Sep 17 00:00:00 2001 From: Diogo Correia Date: Thu, 27 Jun 2024 10:42:29 +0100 Subject: [PATCH 084/210] Update dead links in accounts documentation --- docs/accounts.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/accounts.md b/docs/accounts.md index 8d07ca4a..b85f9f5a 100644 --- a/docs/accounts.md +++ b/docs/accounts.md @@ -13,7 +13,7 @@ A user account must be provided to access to the MNP secured applications. To put a gatekeeper in front of your application, set `harness/secured` to `true` in the application's values.yaml. -To assign paths and roles, set `uri_role_mapping` as you would do in the [gatekeeper configuration file resources](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/user-guide.md#configuration-options). +To assign paths and roles, set `uri_role_mapping` as you would do in the [gatekeeper configuration file resources](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/content/configuration/_index.md). Example: @@ -27,7 +27,7 @@ harness: - administrator ``` -See the [Gogatekeeper official documentation](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/user-guide.md) for more. +See the [Gogatekeeper official documentation](https://github.com/gogatekeeper/gatekeeper/blob/master/docs/content/userguide/_index.md) for more. ## Backend development From 90eedea0ee398343218e975cc8d7728123f80450 Mon Sep 17 00:00:00 2001 From: Diogo Correia Date: Thu, 27 Jun 2024 17:34:06 +0100 Subject: [PATCH 085/210] Fix previously introduced typo --- docs/tutorials/simple-date-clock-application.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tutorials/simple-date-clock-application.adoc b/docs/tutorials/simple-date-clock-application.adoc index b5a480e4..8df80aaa 100644 --- a/docs/tutorials/simple-date-clock-application.adoc +++ b/docs/tutorials/simple-date-clock-application.adoc @@ -36,7 +36,7 @@ Before installing everything, please be sure you have the following tools instal == Install {ch} (if it is not yet done) {ch} is coded in Python, consequently, it's always better to create a local virtualenv dedicated to the project to avoid messing with your system dependencies. -As shown in the snippet bellow, we will clone the repository, change directory inside the freshly clone directory and create a virtualenv inside (for this tutorial, the Python's version used is CPython 3.10.6). +As shown in the snippet below, we will clone the repository, change directory inside the freshly clone directory and create a virtualenv inside (for this tutorial, the Python's version used is CPython 3.10.6). First step is to clone the link:{repo_url}[{ch} repository] on your system. From 592bb486be5f77edf09b2e0b9d45eb21244c1096 Mon Sep 17 00:00:00 2001 From: Diogo Correia Date: Fri, 28 Jun 2024 17:08:53 +0100 Subject: [PATCH 086/210] Fix dead link to jupyter hook implementation example --- docs/jupyterhub.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/jupyterhub.md b/docs/jupyterhub.md index 709ede5f..e48e101f 100644 --- a/docs/jupyterhub.md +++ b/docs/jupyterhub.md @@ -138,7 +138,7 @@ def change_pod_manifest(self: KubeSpawner): ``` The hook function should be part of a library installable as a pip package. -To see a real example, refer to the main [hook implementation](../applications/jupyterhub/src/harness_jupyter/jupyterhub.py). +To see a real example, refer to the main [hook implementation](../applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py). ### Add the hook @@ -178,5 +178,3 @@ to the [values.yaml](../applications/jupyterhub/deploy/values.yaml) file. Cloudharness JupyterHub is integrated with the accounts service so enabling a shared single-sign-on with other applications in the solution. The spawner is also adapted providing a hook to allow other applications to be based on the hub spawner to run with their own configurations. - -Available \ No newline at end of file From e0ba439477a60e64a6bccebb6a57e120e1aaa32a Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Tue, 16 Jul 2024 15:56:46 +0200 Subject: [PATCH 087/210] CH-135 fix notifications build --- applications/notifications/server/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/notifications/server/setup.py b/applications/notifications/server/setup.py index cccc107f..e126c665 100644 --- a/applications/notifications/server/setup.py +++ b/applications/notifications/server/setup.py @@ -14,7 +14,7 @@ # http://pypi.python.org/pypi/setuptools REQUIRES = [ - "jinja2>=3" + "jinja2>=3", "python_dateutil>=2.6.0" ] From bad0be01957a497d633eea18258f31e67e6523ef Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Fri, 26 Jul 2024 19:00:11 +0200 Subject: [PATCH 088/210] CH-140 update python dependencies --- .../cloudharness-common/requirements.txt | 39 ------------------- libraries/cloudharness-common/setup.py | 20 ++++++---- libraries/cloudharness-utils/requirements.txt | 6 --- libraries/cloudharness-utils/setup.py | 8 ++-- libraries/models/requirements.txt | 10 ----- libraries/models/setup.py | 16 +++++--- 6 files changed, 28 insertions(+), 71 deletions(-) diff --git a/libraries/cloudharness-common/requirements.txt b/libraries/cloudharness-common/requirements.txt index 6450109d..e69de29b 100644 --- a/libraries/cloudharness-common/requirements.txt +++ b/libraries/cloudharness-common/requirements.txt @@ -1,39 +0,0 @@ -argo-workflows==5.0.0 -blinker==1.7.0 -cachetools==5.3.2 -certifi==2023.11.17 -cffi==1.16.0 -charset-normalizer==3.3.2 -click==8.1.7 -cryptography==42.0.0 -deprecation==2.1.0 -ecdsa==0.18.0 -idna==3.6 -importlib-metadata==7.0.1 -itsdangerous==2.1.2 -Jinja2==3.1.3 -kafka-python==2.0.2 -kubernetes==29.0.0 -MarkupSafe==2.1.3 -oauthlib==3.2.2 -oyaml==1.0 -packaging==23.2 -pyaml==23.12.0 -pyasn1==0.5.1 -pyasn1-modules==0.3.0 -pycparser==2.21 -pyhumps==3.8.0 -PyJWT==2.8.0 -python-dateutil==2.8.2 -python-jose==3.3.0 -python-keycloak==3.7.0 -PyYAML==6.0.1 -requests==2.31.0 -requests-oauthlib==1.3.1 -requests-toolbelt==1.0.0 -rsa==4.9 -sentry-sdk==1.39.2 -six==1.16.0 -urllib3==2.1.0 -Werkzeug==3.0.1 -zipp==3.17.0 diff --git a/libraries/cloudharness-common/setup.py b/libraries/cloudharness-common/setup.py index d3bee95c..8e775bc9 100644 --- a/libraries/cloudharness-common/setup.py +++ b/libraries/cloudharness-common/setup.py @@ -12,21 +12,26 @@ # http://pypi.python.org/pypi/setuptools REQUIREMENTS = [ - 'kubernetes', - 'kafka-python', - 'pyaml', + 'kubernetes >= 29.0.0', + 'PyYAML >= 6.0.1', + 'oyaml >= 1.0', 'pyjwt>=2.6.0', 'cryptography', 'requests>=2.21.0', 'sentry-sdk[flask]>=0.14.4', - 'python-keycloak', + 'python-keycloak >= 3.7.0', 'cloudharness_model', 'argo-workflows==5.0.0', - 'cachetools' + 'cachetools >= 5.3.2', + 'blinker >= 1.7.0', + 'jinja2 >= 3.1.4', + 'kafka-python >= 2.0.2', + 'requests >= 2.31.0', + 'python-dateutil >= 2.8.2', + 'sentry-sdk >= 1.39.2', ] - setup( name=NAME, version=VERSION, @@ -35,7 +40,8 @@ url="", keywords=["cloudharness", "cloud"], install_requires=REQUIREMENTS, - packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), + packages=find_packages( + exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), include_package_data=True, package_data={'': ['*.yaml']}, long_description="""\ diff --git a/libraries/cloudharness-utils/requirements.txt b/libraries/cloudharness-utils/requirements.txt index 0e663599..e69de29b 100644 --- a/libraries/cloudharness-utils/requirements.txt +++ b/libraries/cloudharness-utils/requirements.txt @@ -1,6 +0,0 @@ -docker -six -ruamel.yaml -oyaml -schemathesis -cloudharness_model diff --git a/libraries/cloudharness-utils/setup.py b/libraries/cloudharness-utils/setup.py index 38b81ff8..fd994d21 100644 --- a/libraries/cloudharness-utils/setup.py +++ b/libraries/cloudharness-utils/setup.py @@ -21,12 +21,13 @@ REQUIREMENTS = [ 'ruamel.yaml', - 'oyaml', + 'cloudharness_model', + 'docker', + 'ruamel.yaml', 'cloudharness_model' ] - setup( name=NAME, version=VERSION, @@ -35,7 +36,8 @@ url="", keywords=["Cloud", "Kubernetes", "Helm", "Deploy"], install_requires=REQUIREMENTS, - packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), + packages=find_packages( + exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), include_package_data=True, long_description="""\ CloudHarness utils library diff --git a/libraries/models/requirements.txt b/libraries/models/requirements.txt index 183e9115..e69de29b 100644 --- a/libraries/models/requirements.txt +++ b/libraries/models/requirements.txt @@ -1,10 +0,0 @@ -fonttools==4.43.0 -Jinja2==3.1.3 -MarkupSafe==2.1.3 -oyaml==1.0 -psutil==5.9.4 -pyhumps==3.8.0 -python-dateutil==2.8.2 -PyYAML==6.0.1 -six==1.16.0 -swagger_ui_bundle==1.1.0 \ No newline at end of file diff --git a/libraries/models/setup.py b/libraries/models/setup.py index 264ae235..946a10f3 100644 --- a/libraries/models/setup.py +++ b/libraries/models/setup.py @@ -10,11 +10,15 @@ NAME = "cloudharness_model" VERSION = "2.3.0" REQUIREMENTS = [ - "swagger-ui-bundle >= 0.0.2", - "python_dateutil >= 2.6.0", - "setuptools >= 21.0.0", - "pyhumps", - "oyaml" + "Jinja2 >= 3.1.3", + "oyaml >= 1.0", + "psutil >= 5.9.4", + "pyhumps >= 3.8.0", + "python-dateutil >= 2.8.2", + "PyYAML >= 6.0.1", + "six >= 1.16.0", + "swagger_ui_bundle >= 1.1.0", ] print(REQUIREMENTS) -setup(name=NAME, version=VERSION, install_requires=REQUIREMENTS, packages=find_packages(),) +setup(name=NAME, version=VERSION, + install_requires=REQUIREMENTS, packages=find_packages(),) From 11c9fe57c844628d4219c1c524c350f49238d60c Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Fri, 26 Jul 2024 19:02:27 +0200 Subject: [PATCH 089/210] CH-17 Update do Python 3.12 --- .../base-images/cloudharness-base-debian/Dockerfile | 2 +- infrastructure/base-images/cloudharness-base/Dockerfile | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/infrastructure/base-images/cloudharness-base-debian/Dockerfile b/infrastructure/base-images/cloudharness-base-debian/Dockerfile index 18975b71..5b600d8a 100644 --- a/infrastructure/base-images/cloudharness-base-debian/Dockerfile +++ b/infrastructure/base-images/cloudharness-base-debian/Dockerfile @@ -1,4 +1,4 @@ -ARG PARENT=python:3.9.10 +ARG PARENT=python:3.12 FROM ${PARENT} RUN apt-get update && apt-get install -y nfs-common && rm -rf /var/lib/apt/lists/* diff --git a/infrastructure/base-images/cloudharness-base/Dockerfile b/infrastructure/base-images/cloudharness-base/Dockerfile index 782c7d55..ac7f94ac 100644 --- a/infrastructure/base-images/cloudharness-base/Dockerfile +++ b/infrastructure/base-images/cloudharness-base/Dockerfile @@ -1,4 +1,4 @@ -ARG PARENT=python:3.9.10-alpine +ARG PARENT=python:3.12-alpine FROM ${PARENT} RUN apk update @@ -11,7 +11,7 @@ RUN apk add gcc libc-dev g++ python3-dev libffi-dev openssl-dev rust musl-dev ca RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ pip install pytest --prefer-binary - + COPY libraries/models/requirements.txt /libraries/models/requirements.txt RUN --mount=type=cache,target=/root/.cache python -m pip install --upgrade pip &&\ pip install -r /libraries/models/requirements.txt --prefer-binary From 2809262fce6a0faaabe0aaa27718ae1d72c9385a Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Fri, 26 Jul 2024 19:02:45 +0200 Subject: [PATCH 090/210] CH-17 Update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a1eb15a2..67588d75 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ Python 3.9 must be installed. It is recommended to setup a virtual environment. With conda: ```bash -conda create --name ch python=3.9 +conda create --name ch python=3.12 conda activate ch ``` From 102faf7b0ef7d8e1f62927cd15461b50f7915039 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Fri, 26 Jul 2024 19:05:39 +0200 Subject: [PATCH 091/210] CH-66 fix default value --- deployment-configuration/value-template.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deployment-configuration/value-template.yaml b/deployment-configuration/value-template.yaml index 8702e6f6..c04439a3 100644 --- a/deployment-configuration/value-template.yaml +++ b/deployment-configuration/value-template.yaml @@ -55,7 +55,7 @@ harness: # -- Service port. port: 80 # -- Auto generated secrets key-value pairs. If no value is provided, a random hash is generated - secrets: + secrets: {} # -- Specify which services this application uses in the frontend to create proxy ingresses. e.g. - name: mnp-checkout use_services: [] # -- enabled sentry for automated error report @@ -136,4 +136,4 @@ harness: keepalive: payload: # -- Maximum size of payload in MB - max: \ No newline at end of file + max: From 82c254d84ec5c02a56777972cd6ad2570a883cdc Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Fri, 26 Jul 2024 19:08:56 +0200 Subject: [PATCH 092/210] CH-121 WIP: use vite on samples app --- applications/samples/frontend/.babelrc | 20 - applications/samples/frontend/.eslintignore | 1 - applications/samples/frontend/.eslintrc.cjs | 19 + applications/samples/frontend/.eslintrc.yml | 49 - applications/samples/frontend/.gitignore | 24 +- applications/samples/frontend/README.md | 31 +- applications/samples/frontend/index.html | 16 + applications/samples/frontend/package.json | 76 +- .../samples/frontend/public/favicon.png | Bin 0 -> 5899 bytes .../{src/assets/icon.png => public/logo.png} | Bin applications/samples/frontend/src/App.tsx | 11 +- .../frontend/src/components/RestTest.tsx | 9 +- .../frontend/src/components/Version.tsx | 4 +- applications/samples/frontend/src/index.css | 5 + applications/samples/frontend/src/index.ejs | 10 - applications/samples/frontend/src/index.tsx | 6 - applications/samples/frontend/src/main.tsx | 10 + .../samples/frontend/src/styles/style.less | 4 - .../samples/frontend/src/vite-env.d.ts | 1 + .../samples/frontend/tsconfig.app.json | 27 + applications/samples/frontend/tsconfig.json | 29 +- .../samples/frontend/tsconfig.node.json | 13 + applications/samples/frontend/vite.config.ts | 32 + .../samples/frontend/webpack.config.dev.js | 48 - .../samples/frontend/webpack.config.js | 124 - applications/samples/frontend/yarn.lock | 8484 +++-------------- 26 files changed, 1621 insertions(+), 7432 deletions(-) delete mode 100644 applications/samples/frontend/.babelrc delete mode 100644 applications/samples/frontend/.eslintignore create mode 100644 applications/samples/frontend/.eslintrc.cjs delete mode 100644 applications/samples/frontend/.eslintrc.yml create mode 100644 applications/samples/frontend/index.html create mode 100644 applications/samples/frontend/public/favicon.png rename applications/samples/frontend/{src/assets/icon.png => public/logo.png} (100%) create mode 100644 applications/samples/frontend/src/index.css delete mode 100644 applications/samples/frontend/src/index.ejs delete mode 100644 applications/samples/frontend/src/index.tsx create mode 100644 applications/samples/frontend/src/main.tsx delete mode 100644 applications/samples/frontend/src/styles/style.less create mode 100644 applications/samples/frontend/src/vite-env.d.ts create mode 100644 applications/samples/frontend/tsconfig.app.json mode change 100755 => 100644 applications/samples/frontend/tsconfig.json create mode 100644 applications/samples/frontend/tsconfig.node.json create mode 100644 applications/samples/frontend/vite.config.ts delete mode 100644 applications/samples/frontend/webpack.config.dev.js delete mode 100644 applications/samples/frontend/webpack.config.js diff --git a/applications/samples/frontend/.babelrc b/applications/samples/frontend/.babelrc deleted file mode 100644 index 2b53150f..00000000 --- a/applications/samples/frontend/.babelrc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "presets": [ - "@babel/preset-env", - "@babel/preset-react" - ], - "env": { - "production":{ - "presets": ["minify"] - } - }, - "plugins": [ - "@babel/transform-regenerator", - "@babel/plugin-proposal-class-properties", - [ - "module-resolver", { - "root": ["./src"] - } - ] - ] -} \ No newline at end of file diff --git a/applications/samples/frontend/.eslintignore b/applications/samples/frontend/.eslintignore deleted file mode 100644 index 77738287..00000000 --- a/applications/samples/frontend/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -dist/ \ No newline at end of file diff --git a/applications/samples/frontend/.eslintrc.cjs b/applications/samples/frontend/.eslintrc.cjs new file mode 100644 index 00000000..ba9dc0ed --- /dev/null +++ b/applications/samples/frontend/.eslintrc.cjs @@ -0,0 +1,19 @@ +module.exports = { + root: true, + env: { browser: true, es2020: true }, + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:react-hooks/recommended', + ], + ignorePatterns: ['dist', '.eslintrc.cjs', 'rest'], + parser: '@typescript-eslint/parser', + plugins: ['react-refresh'], + rules: { + 'react-refresh/only-export-components': [ + 'warn', + { allowConstantExport: true }, + ], + '' + } +} diff --git a/applications/samples/frontend/.eslintrc.yml b/applications/samples/frontend/.eslintrc.yml deleted file mode 100644 index 68fb53e5..00000000 --- a/applications/samples/frontend/.eslintrc.yml +++ /dev/null @@ -1,49 +0,0 @@ ---- -root: true -extends: - - eslint:recommended - - plugin:react/recommended -parser: 'babel-eslint' -parserOptions: - ecmaFeatures: - jsx: true -plugins: - - jest - - react - - react-hooks -settings: - react: - version: detect -env: - jest/globals: true - browser: true - es6: true -rules: - no-console: 0 - func-style: 2 - consistent-return: 2 - prefer-arrow-callback: - - 2 - - allowNamedFunctions: false - allowUnboundThis: false - jest/no-disabled-tests: 2 - jest/no-focused-tests: 2 - react/prop-types: 0 - react/forbid-prop-types: 0 - react/no-unused-prop-types: 0 - react-hooks/rules-of-hooks: 2 - react-hooks/exhaustive-deps: 1 - curly: 2 - no-tabs: 2 - arrow-spacing: 2 - no-unneeded-ternary: 2 - object-curly-spacing: - - 2 - - always - indent: - - 2 - - 2 - - SwitchCase: 1 -globals: - __dirname: writable - module: writable \ No newline at end of file diff --git a/applications/samples/frontend/.gitignore b/applications/samples/frontend/.gitignore index 149b5765..a547bf36 100644 --- a/applications/samples/frontend/.gitignore +++ b/applications/samples/frontend/.gitignore @@ -1,4 +1,24 @@ -wwwroot/*.js +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + node_modules -typings dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/applications/samples/frontend/README.md b/applications/samples/frontend/README.md index 7de3e72e..e1cdc89d 100644 --- a/applications/samples/frontend/README.md +++ b/applications/samples/frontend/README.md @@ -1 +1,30 @@ -# samples +# React + TypeScript + Vite + +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh +- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend updating the configuration to enable type aware lint rules: + +- Configure the top-level `parserOptions` property like this: + +```js +export default { + // other rules... + parserOptions: { + ecmaVersion: 'latest', + sourceType: 'module', + project: ['./tsconfig.json', './tsconfig.node.json', './tsconfig.app.json'], + tsconfigRootDir: __dirname, + }, +} +``` + +- Replace `plugin:@typescript-eslint/recommended` to `plugin:@typescript-eslint/recommended-type-checked` or `plugin:@typescript-eslint/strict-type-checked` +- Optionally add `plugin:@typescript-eslint/stylistic-type-checked` +- Install [eslint-plugin-react](https://github.com/jsx-eslint/eslint-plugin-react) and add `plugin:react/recommended` & `plugin:react/jsx-runtime` to the `extends` list diff --git a/applications/samples/frontend/index.html b/applications/samples/frontend/index.html new file mode 100644 index 00000000..d0f66997 --- /dev/null +++ b/applications/samples/frontend/index.html @@ -0,0 +1,16 @@ + + + + + + + + CloudHarness sample application + + + +
+ + + + \ No newline at end of file diff --git a/applications/samples/frontend/package.json b/applications/samples/frontend/package.json index 2ebb4f2c..9ccdc4a6 100644 --- a/applications/samples/frontend/package.json +++ b/applications/samples/frontend/package.json @@ -1,60 +1,32 @@ { - "name": "samples", - "version": "1.0.0", - "description": "", - "main": "index.js", + "name": "frontend", + "private": true, + "version": "0.0.0", + "type": "module", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1", - "prebuild": "eslint . --color", - "build": "webpack --config webpack.config.js", - "build-dev": "webpack --config webpack.config.js --env mode=development", - "prestart": "eslint . --color --fix", - "start": "webpack serve --progress --env DOMAIN=http://localhost:5000 --config webpack.config.dev.js", - "start:dev": "webpack serve --progress --env.DOMAIN=https://samples.cloudharness.metacell.us --config webpack.config.dev.js", - "start:local": "webpack serve --progress --env DOMAIN=http://samples.ch.local --config webpack.config.dev.js" + "dev": "vite", + "start": "DOMAIN=http://localhost:5000 vite", + "start:dev": "DOMAIN=https://test.ch.metacell.us vite", + "start:local": "DOMAIN=http://samples.ch vite", + "build": "tsc -b && vite build", + "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0", + "preview": "vite preview" }, - "author": "", - "license": "MIT", "dependencies": { - "axios": "^1.6.0", - "react": "^16.12.0", - "react-dom": "^16.12.0", - "react-router": "^5.0.0", - "react-router-dom": "^5.0.0", - "use-clipboard-copy": "^0.1.2" + "axios": "^1.7.2", + "react": "^18.3.1", + "react-dom": "^18.3.1" }, "devDependencies": { - "@babel/core": "^7.8.3", - "@babel/plugin-proposal-class-properties": "^7.8.3", - "@babel/preset-env": "^7.8.3", - "@babel/preset-react": "^7.8.3", - "babel-eslint": "^10.1.0", - "babel-loader": "^8.0.6", - "babel-plugin-module-resolver": "^4.0.0", - "babel-preset-minify": "^0.5.1", - "clean-webpack-plugin": "^3.0.0", - "compression-webpack-plugin": "^7.1.2", - "copy-webpack-plugin": "^6.2.1", - "css-loader": "^5.2.4", - "dotenv": "^16.0.2", - "eslint": "5.16.0", - "eslint-plugin-jest": "^23.8.2", - "eslint-plugin-react": "^7.19.0", - "eslint-plugin-react-hooks": "^3.0.0", - "file-loader": "^5.0.2", - "html-loader": "^0.5.5", - "html-webpack-plugin": "^5.5.0", - "image-webpack-loader": "^8.1.0", - "less": "^3.10.3", - "less-loader": "^6.1.2", - "less-vars-to-js": "^1.3.0", - "raw-loader": "^4.0.2", - "style-loader": "^1.1.3", - "ts-loader": "^9.0.0", - "typescript": "^4.8.3", - "webpack": "^5.61.0", - "webpack-cli": "^4.6.0", - "webpack-dev-server": "^4.5.0", - "webpack-merge": "^5.7.0" + "@types/react": "^18.3.3", + "@types/react-dom": "^18.3.0", + "@typescript-eslint/eslint-plugin": "^7.15.0", + "@typescript-eslint/parser": "^7.15.0", + "@vitejs/plugin-react": "^4.3.1", + "eslint": "^8.57.0", + "eslint-plugin-react-hooks": "^4.6.2", + "eslint-plugin-react-refresh": "^0.4.7", + "typescript": "^5.2.2", + "vite": "^5.3.4" } } diff --git a/applications/samples/frontend/public/favicon.png b/applications/samples/frontend/public/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..93c13b76c7b9432da2e43fcc23c6094538c528c7 GIT binary patch literal 5899 zcmcIo_d6S2)Gvyn=rF1(+SnS|zrS+N<`cs9hsL z5w#LE5=8U*zVF}g-sd^@Joj_YbIuR~`tsUTa(VUAe->_n%SdnDXK;K?*-pEww8(?a-_bD z`l9nx^;rI9J+uPG8lChS#v>8Dw*Fx(Ne zm}sT^!>WJM#IYq`a_F)duBpE?Br5u`s>oi72kiVW3>(z=s&!_hnZrj8e1qb$=KuS8 z@`**Y6us|X5N7Y!)woYfy$R3{1-tS+2jSmk=S_L>2)i%Bq%>MR1U#p8KUGdJ}ps^)0?7oZ9;vLd30iV-0*KeFB zO89_?{&V;!0QaU&@=ISM9&%mu7s0}fQ(&5Mn5G>EN;~80Up*6TtWP4R7~>{Su&d7E zwuaz%d4rZYBjJ^cmr1j=)^j z<1d-h!yCVfGkKp8^0kmGR1R;u1HU=a<#)BopG8b=Gx|TC7%XKWzNG4$A21o5E-ZYh zv;G>blElisHH-VK&gZ|S>8iz>)f|qEuLPDR+O!i7uUPd$3Med&wyO0hJ;E2KnzT$U zjy0%Bv;VLTt;!NDA}T2!~63Zbgfx)?%n6MMAk9p!KZf3J-Y4MJ&~RD;aYVTBUSa#dXI8PYd-7b zFFw30H+KoX39UAI(UPeWc%{@H)vje{&R4uEP4?>r6V3b1?YGE-FMd;Lgo94{WZIK0 zpf~kYyc*dOMl;pE;a>7vr3oxIyWwt^sQ#St-!RMJ9R5*`j2_&UkQjnT05@gKla$}G z@6J0J-NIrV@<3;wJR`IMEkI{t59PdXwZ0#AO%sh+SIIIq-aW?#$fGsO8T`(`{On z1d~6tHqFNcZvKgEnM}mA=){mPPQv)W1u30WO0`BA1aPSyV z&KwJgA;EKET=*R5X)T@0PgeEUoOAor=#NO(;F@jf$ADa2b@gSLdk@WqI({P5+^ zQcC4?%>nK#8_6(o|AY>w50ioT{AuY<{$N63lVri5HlFruoBzfv@n{uPs)4wg-y%Opz$rz-|Vvkv7N|7<(JP?KSvrT%VGn zapvC)t{THB0r|gD80BK>F}nj-yHo48ZvEhdH_^DT z7-1Z)C=}CDb?Pg{@Zogp{7&u_D6!-VFTyi%qWhaitZql@^|~F|$IvnwC0v_XA3CoI z-Fj;`S6JjM%Qt1^{fpY}f>;|=On}P$CQjv3xlzJ4?LVWNezAKd>}>BxcIAIQ50x+6 z+!Tp~-L*%p*j|n6qrTAZJ&TQ%OYz%m7;Qj1d9?7Fx>SQYlrWF)XX*4sKFW0>6ARyN z#)3%MMegVO8@c5(`!%oF@`ODx59YDG@O~CIWz4yX0WC9ZsxRC^7gQ#=YqA)fsDw!k zHSo(E7n+8^YIpT_d*1)H{aKn2U22t`isaU+m)pf*rp^%R;MHFBPN^D1S5U-h0rKL%7P3}i$qyz;E`tMC%;88^S~Ky#C}fJyXQ7Xr2`+CPD!DiFUTCBW$&XJy*M!Z3~PtTJ0t z%n5ppFox?bhe9lR88y`tO!!DPUnz_qqMIaR*%rMJnt0y=%0fL zkULAOK|2u0hg3jq;({iRDoSLtkR4}!be+N|g0u3u%ojF627|a7VMP&jvbX3T{&Zvl zx$Nw~0YCTdm`tPIr=bN+Cu-%+t3EpcW!sg36ev15 zmQ~Ltwd%i${^Lvfv zgZ|!TrFGwKR&__yFOFJN4=z!+BOJM75Pz7k5Jh-(kNtM-%&v#^WW@4VyI*X zKJ2Y`W|PrvpZTr3ZRnZ;$A)Vk31kSj2?7>;_ORHGCr7WUyyuIPsg;)=`fE~H!N?6{ zHoP^kl}lP2DK!`vh$u@JZmY5bB%-9t2s44FrGlZ}PA$+`_WBc`7bEn!WJ0SVvk&9( zw5^4nxN0{Lba=z^blNw!X|JsGQlb3rzYHy_0ui!WV^N?eQM~=moYTSG2u|RE z8~fJbPlCchTe!v21#Jbyk!Kd4Z^3}TdYG)YR_6H|4B9|X`|%vY;P$|N!SCz_pFef^ z6Q#hw^>B(XOjDek>=L({P`U}F>E+|n_out1ezKvE+id2F(1$+bhH0lZtzRbNa-BZjsZUD| z&#Xv*BkZDf>MN>m1$i5$@7*f^MRWR9JpHqZtZVV53`F#feRoC0y5fAol1}hFC$!bc z;n=FWhi>g>f)7ggLr#{K8iQG+F)oeK%o*36yh-8QY)&^|0_QM!zO+V`RN-45OgfLp z-AiMN##YlOlLU*LOL`^#6cg(&4*?!OW!IKn!p&c+iX)|S^NLGOL> z9L0frKJv>i0tX{{?01p682=c|&tTG@gq=Qw8U|NXfm;7#-N2P6D{aH@C{t8E0}R(Y z4M0~@(sI#fU&%E8ngX&QqkXGv^f+1TPHdehGq-B2F?W`uQG#E-N>u9?PVi#-(bH(( zt&hyv>7ggKRy3VPh5DAGEeyoJKR%m0?r7a92pi01Oce=K&+VwBAtZL)Q7AOmz7GY{ zR4TlUv^cI=oc|&1IJ#xnHW+Lb67*f{y-s@|8pnld*!wo&bL$_qecLkx*0qpQ-?59# z$i21t*(9RUL-_bMu>7&4OOkeC9duCmmyGRrK9r+)!uj^&Um`*DO`jjE2*AwVWpm9c zE~(#{Gc$H>k6{Z|9M0yY>Kw|~<9!~$wv){ySjSmm-*_Ru(q&96VQfzOwZ$J5p>JHLvc-@q6XvE*4 zHJ#Ub6eYx`_u!ORHqwW>#s~&klr#cMFKU zJ>{@tM@LMS?$dY2LRlA-fvHk7Uc)@-F?r&-sbjmbNEzdR|6kLZNKFV?PYL}h?lm9l~ z0jRQa?}{wt3^4`dSwH_peX6HjOsyE0?D|!RI314Vy%pD_w(LM?`rh5V%6GTO%uVsF zpNNHeVhG(U*&X*FkM2R+_QSYSl4MU_ik{3dGtaO z4vKCOnV)Mu&So$PweHh^=4RN#Vl@*=zm?vWnHe6up3$-~W2sL0AETP~bX3R&ES&HGt3AN?&D9Zc` z2>A@j|I=-*pC;5aVtgrMH=%USn=t_G!Bg#^BXQO;W*q9i8t$eReeyeG1(V$z#ScmB zRBJ3w4^G$rV%Fw~Dm_}O_Vf*ghqEg;)=ItZA)~3u_n-M}aHh|qM$tU!DEM?@j*V97 zsS=w*dKV$Ah_An*qeIjXeGpjA*;?K(L4%^Q0PbCKPTS&z()GxYa;_e9B-Ug{?aw3y z>f1>z^DB@F@ zrR0O;>=km6$B=1BAMWRR-3`?jxu~nSTP31$1N%BcVkdSu+pCXgeF_wcVoMOuD~1#5 zCFYQ8Mb}}GET_9IDi7UI31jswhg<9pNML{gyiUu&IeGR>GUH3?@e&~wgGs%%?d;%X$5RcMUvZD2Ez`kcqNq(>* zA2l$iELoPxZ?9$mS!x6#89Z(hjZOFbh`v02XYu!?uC@s(Rnv!1O3v@|f7@M)`wVLO zPv*GWbPXmkJ}T#~@QyjSQ(2d;6Ico>az&*xycOcA^ROL5cqK6Z(KhE^|BAKmSaxxwA5;%XdOo6IBtEczAiFQ7HE|~t{+IN~wQ%_FTC#G4N**rKVMMK3ls%pOK*|SXQEZ2^nU7C3;?#ofiI_qEw^&Yj7bWKL$#hdPW5+|MoEB!9`_O8&lj; zhyrhEXi31)Sq=0VmFE~dZKU4sXSE_vPsESPeOzh?fHn0xu@E6MML}L5h>u)Y!?9`R z`vKq<{!V0BQ=i=0QR*J3v9!#)TwJlegtzqPOqI12i6BnY0o@FiLJ`)T#|9q`E&eRO zWnbsHaacZi=79+{!g#Z?GD>h|M2O{i z+lFVK!iiGFTk(~$)y$r|7$fr ( <> - +

Sample React application is working!

See api documentation here

-); + ) + -export default Main; \ No newline at end of file +export default Main; diff --git a/applications/samples/frontend/src/components/RestTest.tsx b/applications/samples/frontend/src/components/RestTest.tsx index 3fbaa933..5732a17a 100644 --- a/applications/samples/frontend/src/components/RestTest.tsx +++ b/applications/samples/frontend/src/components/RestTest.tsx @@ -1,13 +1,16 @@ -import React, { useState, useEffect } from 'react'; +import { useState, useEffect } from 'react'; import { TestApi } from '../rest/api' +import { AxiosResponse } from 'axios'; const test = new TestApi(); + + const RestTest = () => { - const [result, setResult] = useState(null); + const [result, setResult] = useState(null); useEffect(() => { - test.ping().then(r => setResult(r), () => setResult({ data: "API error"})); + test.ping().then((r: AxiosResponse) => setResult(r), () => setResult({ data: "API error"})); }, []); diff --git a/applications/samples/frontend/src/components/Version.tsx b/applications/samples/frontend/src/components/Version.tsx index 2b5f5eb3..279e0aca 100644 --- a/applications/samples/frontend/src/components/Version.tsx +++ b/applications/samples/frontend/src/components/Version.tsx @@ -1,9 +1,9 @@ -import React, { useState, useEffect } from 'react'; +import { useState, useEffect } from 'react'; const Version = () => { - const [result, setResult] = useState(null); + const [result, setResult] = useState(null); useEffect(() => { fetch("/proxy/common/api/version", { headers: { diff --git a/applications/samples/frontend/src/index.css b/applications/samples/frontend/src/index.css new file mode 100644 index 00000000..7466f905 --- /dev/null +++ b/applications/samples/frontend/src/index.css @@ -0,0 +1,5 @@ +body { + text-align: center; + background-color: '#eeeeee'; + font-family: Roboto, Helvetica, sans-serif; +} \ No newline at end of file diff --git a/applications/samples/frontend/src/index.ejs b/applications/samples/frontend/src/index.ejs deleted file mode 100644 index 4cd05b48..00000000 --- a/applications/samples/frontend/src/index.ejs +++ /dev/null @@ -1,10 +0,0 @@ - - - - - Samples - - -
- - \ No newline at end of file diff --git a/applications/samples/frontend/src/index.tsx b/applications/samples/frontend/src/index.tsx deleted file mode 100644 index ae31e413..00000000 --- a/applications/samples/frontend/src/index.tsx +++ /dev/null @@ -1,6 +0,0 @@ -import React from 'react'; -import ReactDOM from 'react-dom'; - -import App from './App'; - -ReactDOM.render(, document.getElementById('root')); diff --git a/applications/samples/frontend/src/main.tsx b/applications/samples/frontend/src/main.tsx new file mode 100644 index 00000000..3d7150da --- /dev/null +++ b/applications/samples/frontend/src/main.tsx @@ -0,0 +1,10 @@ +import React from 'react' +import ReactDOM from 'react-dom/client' +import App from './App.tsx' +import './index.css' + +ReactDOM.createRoot(document.getElementById('root')!).render( + + + , +) diff --git a/applications/samples/frontend/src/styles/style.less b/applications/samples/frontend/src/styles/style.less deleted file mode 100644 index b1bece6a..00000000 --- a/applications/samples/frontend/src/styles/style.less +++ /dev/null @@ -1,4 +0,0 @@ -body { - text-align: center; - background-color: '#eeeeee'; -} \ No newline at end of file diff --git a/applications/samples/frontend/src/vite-env.d.ts b/applications/samples/frontend/src/vite-env.d.ts new file mode 100644 index 00000000..11f02fe2 --- /dev/null +++ b/applications/samples/frontend/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/applications/samples/frontend/tsconfig.app.json b/applications/samples/frontend/tsconfig.app.json new file mode 100644 index 00000000..d739292a --- /dev/null +++ b/applications/samples/frontend/tsconfig.app.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + "composite": true, + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "target": "ES2020", + "useDefineForClassFields": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "resolveJsonModule": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true + }, + "include": ["src"] +} diff --git a/applications/samples/frontend/tsconfig.json b/applications/samples/frontend/tsconfig.json old mode 100755 new mode 100644 index 21ec6c41..ea9d0cd8 --- a/applications/samples/frontend/tsconfig.json +++ b/applications/samples/frontend/tsconfig.json @@ -1,22 +1,11 @@ { - "compilerOptions": { - "outDir": "./dist/", - "sourceMap": true, - "noImplicitAny": false, - "module": "commonjs", - "target": "es2017", - "stripInternal":true, - "skipLibCheck": true, - "alwaysStrict":false, - "forceConsistentCasingInFileNames": true, - "noImplicitReturns": false, - "strict": false, - "noUnusedLocals": false, - "jsx": "react", - "types": [], - "esModuleInterop": true - }, - "exclude": [ - "node_modules" + "files": [], + "references": [ + { + "path": "./tsconfig.app.json" + }, + { + "path": "./tsconfig.node.json" + } ] -} \ No newline at end of file +} diff --git a/applications/samples/frontend/tsconfig.node.json b/applications/samples/frontend/tsconfig.node.json new file mode 100644 index 00000000..3afdd6e3 --- /dev/null +++ b/applications/samples/frontend/tsconfig.node.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "composite": true, + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", + "skipLibCheck": true, + "module": "ESNext", + "moduleResolution": "bundler", + "allowSyntheticDefaultImports": true, + "strict": true, + "noEmit": true + }, + "include": ["vite.config.ts"] +} diff --git a/applications/samples/frontend/vite.config.ts b/applications/samples/frontend/vite.config.ts new file mode 100644 index 00000000..71663236 --- /dev/null +++ b/applications/samples/frontend/vite.config.ts @@ -0,0 +1,32 @@ +import { defineConfig, loadEnv } from 'vite' +import react from '@vitejs/plugin-react' + + + +// https://vitejs.dev/config/ +export default defineConfig(({ mode }) => { + // Load env file based on `mode` in the current working directory. + // Set the third parameter to '' to load all env regardless of the `VITE_` prefix. + const env = loadEnv(mode, process.cwd(), '') + + const theDomain = env && env.DOMAIN ? env.DOMAIN : 'localhost:5000'; + + console.log('Dev server address: ', theDomain); + + const proxyTarget = theDomain; + const replaceHost = (uri: string, appName: string) => (uri.includes("samples") && uri.replace("samples", appName + '.' + theDomain)) || uri; + + + return { + plugins: [react()], + server: { + port: 9000, + proxy: { + '/api/': { + target: replaceHost( proxyTarget, 'samples'), + secure: false, + changeOrigin: true, + } + } +}}} +) diff --git a/applications/samples/frontend/webpack.config.dev.js b/applications/samples/frontend/webpack.config.dev.js deleted file mode 100644 index a36b5bd5..00000000 --- a/applications/samples/frontend/webpack.config.dev.js +++ /dev/null @@ -1,48 +0,0 @@ -const { merge } = require('webpack-merge'); -const common = require('./webpack.config.js'); - -var path = require('path'); - -const PORT = 9000; - - -module.exports = env => { - - const theDomain = env && env.DOMAIN ? env.DOMAIN : 'localhost:5000'; - - console.log('Dev server address: ', theDomain); - - const proxyTarget = theDomain; - const replaceHost = (uri, appName) => (uri.includes("samples") && uri.replace("samples", appName + '.' + theDomain)) || uri; - if (!env.port) { - env.devPort = PORT; - } - - - const devServer = { - static: [{ - directory: path.resolve(__dirname, 'dist'), - publicPath: '/', - }], - compress: true, - https: env.DOMAIN.includes("https"), - port: Number(env.devPort), - historyApiFallback: true, - proxy: { - '/api/': { - target: replaceHost( proxyTarget, 'samples'), - secure: false, - changeOrigin: true, - } - }, - }; - - return merge( - common(env), - { - mode: 'development', - devtool: 'source-map', - devServer, - } - ) -}; diff --git a/applications/samples/frontend/webpack.config.js b/applications/samples/frontend/webpack.config.js deleted file mode 100644 index 69f80cb9..00000000 --- a/applications/samples/frontend/webpack.config.js +++ /dev/null @@ -1,124 +0,0 @@ -const path = require("path"); -const HtmlWebpackPlugin = require("html-webpack-plugin"); -const CompressionPlugin = require("compression-webpack-plugin"); -const { CleanWebpackPlugin } = require("clean-webpack-plugin"); -const CopyWebpackPlugin = require("copy-webpack-plugin"); - -const copyPaths = [ - { from: path.resolve(__dirname, "src/assets"), to: "assets" }, -]; - -module.exports = function webpacking(envVariables) { - let env = envVariables; - if (!env) { - env = {}; - } - if (!env.mode) { - env.mode = "production"; - } - - console.log("####################"); - console.log("####################"); - console.log("BUILD bundle with parameters:"); - console.log(env); - console.log("####################"); - console.log("####################"); - - const { mode } = env; - const devtool = "source-map"; - - const output = { - path: path.resolve(__dirname, "dist"), - filename: "js/[name].[contenthash].js", - publicPath: "/" - }; - - const module = { - rules: [ - { - test: /\.(js|jsx)$/, - exclude: /node_modules/, - loader: "babel-loader", - }, - { - test: /\.ts(x?)$/, - include: path.resolve(__dirname, 'src'), - use: [ - { - loader: "ts-loader", - options: { - transpileOnly: true, - }, - } - ] - }, - { - test: /\.(css)$/, - use: [ - { - loader: "style-loader", - }, - { - loader: "css-loader", - }, - ], - }, - { - test: /\.less$/, - use: [ - { - loader: "style-loader", - }, - { - loader: "css-loader", - }, - { - loader: "less-loader", - options: { - lessOptions: { - strictMath: true, - }, - }, - }, - ], - }, - { - test: /\.(png|jpg|gif|eot|woff|woff2|svg|ttf)$/, - use: [ - "file-loader", - { - loader: "image-webpack-loader", - options: { - bypassOnDebug: true, // webpack@1.x - disable: true, // webpack@2.x and newer - }, - }, - ], - }, - ], - }; - - const resolve = { - extensions: ["*", ".js", ".json", ".ts", ".tsx", ".jsx"], - symlinks: false, - }; - - const plugins = [ - new CleanWebpackPlugin(), - new CopyWebpackPlugin({ patterns: copyPaths }), - new CompressionPlugin(), - new HtmlWebpackPlugin({ - template: "src/index.ejs", - favicon: path.join(__dirname, "src/assets/icon.png"), - }), - ]; - - return { - mode, - devtool, - output, - module, - resolve, - plugins, - }; -}; diff --git a/applications/samples/frontend/yarn.lock b/applications/samples/frontend/yarn.lock index 44bbec56..eae974cc 100644 --- a/applications/samples/frontend/yarn.lock +++ b/applications/samples/frontend/yarn.lock @@ -3,7394 +3,1710 @@ "@ampproject/remapping@^2.2.0": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" - integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== + version "2.3.0" + resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz" + integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw== dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.24" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": - version "7.23.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" - integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA== +"@babel/code-frame@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz" + integrity sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA== dependencies: - "@babel/highlight" "^7.23.4" - chalk "^2.4.2" + "@babel/highlight" "^7.24.7" + picocolors "^1.0.0" -"@babel/compat-data@^7.22.6", "@babel/compat-data@^7.23.3", "@babel/compat-data@^7.23.5": - version "7.23.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" - integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== +"@babel/compat-data@^7.24.8": + version "7.24.9" + resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.9.tgz" + integrity sha512-e701mcfApCJqMMueQI0Fb68Amflj83+dvAvHawoBpAz+GDjCIyGHzNwnefjsWJ3xiYAqqiQFoWbspGYBdb2/ng== -"@babel/core@^7.8.3": - version "7.23.7" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.7.tgz#4d8016e06a14b5f92530a13ed0561730b5c6483f" - integrity sha512-+UpDgowcmqe36d4NwqvKsyPMlOLNGMsfMmQ5WGCu+siCe3t3dfe9njrzGfdN4qq+bcNUt0+Vw6haRxBOycs4dw== +"@babel/core@^7.24.5": + version "7.24.9" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.24.9.tgz#dc07c9d307162c97fa9484ea997ade65841c7c82" + integrity sha512-5e3FI4Q3M3Pbr21+5xJwCv6ZT6KmGkI0vw3Tozy5ODAQFTIWe37iT8Cr7Ice2Ntb+M3iSKCEWMB1MBgKrW3whg== dependencies: "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.23.5" - "@babel/generator" "^7.23.6" - "@babel/helper-compilation-targets" "^7.23.6" - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helpers" "^7.23.7" - "@babel/parser" "^7.23.6" - "@babel/template" "^7.22.15" - "@babel/traverse" "^7.23.7" - "@babel/types" "^7.23.6" + "@babel/code-frame" "^7.24.7" + "@babel/generator" "^7.24.9" + "@babel/helper-compilation-targets" "^7.24.8" + "@babel/helper-module-transforms" "^7.24.9" + "@babel/helpers" "^7.24.8" + "@babel/parser" "^7.24.8" + "@babel/template" "^7.24.7" + "@babel/traverse" "^7.24.8" + "@babel/types" "^7.24.9" convert-source-map "^2.0.0" debug "^4.1.0" gensync "^1.0.0-beta.2" json5 "^2.2.3" semver "^6.3.1" -"@babel/generator@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e" - integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw== +"@babel/generator@^7.24.8", "@babel/generator@^7.24.9": + version "7.24.10" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.24.10.tgz" + integrity sha512-o9HBZL1G2129luEUlG1hB4N/nlYNWHnpwlND9eOMclRqqu1YDy2sSYVCFUZwl8I1Gxh+QSRrP2vD7EpUmFVXxg== dependencies: - "@babel/types" "^7.23.6" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" + "@babel/types" "^7.24.9" + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.25" jsesc "^2.5.1" -"@babel/helper-annotate-as-pure@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" - integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-builder-binary-assignment-operator-visitor@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz#5426b109cf3ad47b91120f8328d8ab1be8b0b956" - integrity sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw== - dependencies: - "@babel/types" "^7.22.15" - -"@babel/helper-compilation-targets@^7.22.15", "@babel/helper-compilation-targets@^7.22.6", "@babel/helper-compilation-targets@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz#4d79069b16cbcf1461289eccfbbd81501ae39991" - integrity sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ== +"@babel/helper-compilation-targets@^7.24.8": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.8.tgz" + integrity sha512-oU+UoqCHdp+nWVDkpldqIQL/i/bvAv53tRqLG/s+cOXxe66zOYLU7ar/Xs3LdmBihrUMEUhwu6dMZwbNOYDwvw== dependencies: - "@babel/compat-data" "^7.23.5" - "@babel/helper-validator-option" "^7.23.5" - browserslist "^4.22.2" + "@babel/compat-data" "^7.24.8" + "@babel/helper-validator-option" "^7.24.8" + browserslist "^4.23.1" lru-cache "^5.1.1" semver "^6.3.1" -"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.22.15": - version "7.23.7" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.23.7.tgz#b2e6826e0e20d337143655198b79d58fdc9bd43d" - integrity sha512-xCoqR/8+BoNnXOY7RVSgv6X+o7pmT5q1d+gGcRlXYkI+9B31glE4jeejhKVpA04O1AtzOt7OSQ6VYKP5FcRl9g== +"@babel/helper-environment-visitor@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz" + integrity sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ== dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-member-expression-to-functions" "^7.23.0" - "@babel/helper-optimise-call-expression" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.20" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - semver "^6.3.1" + "@babel/types" "^7.24.7" -"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.15", "@babel/helper-create-regexp-features-plugin@^7.22.5": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz#5ee90093914ea09639b01c711db0d6775e558be1" - integrity sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w== +"@babel/helper-function-name@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz" + integrity sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA== dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - regexpu-core "^5.3.1" - semver "^6.3.1" + "@babel/template" "^7.24.7" + "@babel/types" "^7.24.7" -"@babel/helper-define-polyfill-provider@^0.4.4": - version "0.4.4" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.4.tgz#64df615451cb30e94b59a9696022cffac9a10088" - integrity sha512-QcJMILQCu2jm5TFPGA3lCpJJTeEP+mqeXooG/NZbg/h5FTFi6V0+99ahlRsW8/kRLyb24LZVCCiclDedhLKcBA== - dependencies: - "@babel/helper-compilation-targets" "^7.22.6" - "@babel/helper-plugin-utils" "^7.22.5" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - -"@babel/helper-define-polyfill-provider@^0.5.0": - version "0.5.0" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.5.0.tgz#465805b7361f461e86c680f1de21eaf88c25901b" - integrity sha512-NovQquuQLAQ5HuyjCz7WQP9MjRj7dx++yspwiyUiGl9ZyadHRSql1HZh5ogRd8W8w6YM6EQ/NTB8rgjLt5W65Q== +"@babel/helper-hoist-variables@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz" + integrity sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ== dependencies: - "@babel/helper-compilation-targets" "^7.22.6" - "@babel/helper-plugin-utils" "^7.22.5" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - -"@babel/helper-environment-visitor@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" - integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== - -"@babel/helper-function-name@^7.22.5", "@babel/helper-function-name@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" - integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== - dependencies: - "@babel/template" "^7.22.15" - "@babel/types" "^7.23.0" + "@babel/types" "^7.24.7" -"@babel/helper-hoist-variables@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" - integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== +"@babel/helper-module-imports@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz" + integrity sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA== dependencies: - "@babel/types" "^7.22.5" + "@babel/traverse" "^7.24.7" + "@babel/types" "^7.24.7" -"@babel/helper-member-expression-to-functions@^7.22.15", "@babel/helper-member-expression-to-functions@^7.23.0": - version "7.23.0" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.23.0.tgz#9263e88cc5e41d39ec18c9a3e0eced59a3e7d366" - integrity sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA== +"@babel/helper-module-transforms@^7.24.9": + version "7.24.9" + resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.9.tgz" + integrity sha512-oYbh+rtFKj/HwBQkFlUzvcybzklmVdVV3UU+mN7n2t/q3yGHbuVdNxyFvSBO1tfvjyArpHNcWMAzsSPdyI46hw== dependencies: - "@babel/types" "^7.23.0" + "@babel/helper-environment-visitor" "^7.24.7" + "@babel/helper-module-imports" "^7.24.7" + "@babel/helper-simple-access" "^7.24.7" + "@babel/helper-split-export-declaration" "^7.24.7" + "@babel/helper-validator-identifier" "^7.24.7" -"@babel/helper-module-imports@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" - integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== - dependencies: - "@babel/types" "^7.22.15" +"@babel/helper-plugin-utils@^7.24.7": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz" + integrity sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg== -"@babel/helper-module-transforms@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" - integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== +"@babel/helper-simple-access@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz" + integrity sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg== dependencies: - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-module-imports" "^7.22.15" - "@babel/helper-simple-access" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/helper-validator-identifier" "^7.22.20" - -"@babel/helper-optimise-call-expression@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" - integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw== + "@babel/traverse" "^7.24.7" + "@babel/types" "^7.24.7" + +"@babel/helper-split-export-declaration@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz" + integrity sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA== dependencies: - "@babel/types" "^7.22.5" + "@babel/types" "^7.24.7" -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" - integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== +"@babel/helper-string-parser@^7.24.8": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz" + integrity sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ== -"@babel/helper-remap-async-to-generator@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz#7b68e1cb4fa964d2996fd063723fb48eca8498e0" - integrity sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-wrap-function" "^7.22.20" - -"@babel/helper-replace-supers@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.20.tgz#e37d367123ca98fe455a9887734ed2e16eb7a793" - integrity sha512-qsW0In3dbwQUbK8kejJ4R7IHVGwHJlV6lpG6UA7a9hSa2YEiAib+N1T2kr6PEeUT+Fl7najmSOS6SmAwCHK6Tw== - dependencies: - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-member-expression-to-functions" "^7.22.15" - "@babel/helper-optimise-call-expression" "^7.22.5" - -"@babel/helper-simple-access@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" - integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== - dependencies: - "@babel/types" "^7.22.5" +"@babel/helper-validator-identifier@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz" + integrity sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w== -"@babel/helper-skip-transparent-expression-wrappers@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" - integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== - dependencies: - "@babel/types" "^7.22.5" +"@babel/helper-validator-option@^7.24.8": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz" + integrity sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q== -"@babel/helper-split-export-declaration@^7.22.6": - version "7.22.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" - integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== - dependencies: - "@babel/types" "^7.22.5" - -"@babel/helper-string-parser@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" - integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== - -"@babel/helper-validator-identifier@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" - integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== - -"@babel/helper-validator-option@^7.22.15", "@babel/helper-validator-option@^7.23.5": - version "7.23.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" - integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== - -"@babel/helper-wrap-function@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz#15352b0b9bfb10fc9c76f79f6342c00e3411a569" - integrity sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw== +"@babel/helpers@^7.24.8": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.8.tgz" + integrity sha512-gV2265Nkcz7weJJfvDoAEVzC1e2OTDpkGbEsebse8koXUJUXPsCMi7sRo/+SPMuMZ9MtUPnGwITTnQnU5YjyaQ== dependencies: - "@babel/helper-function-name" "^7.22.5" - "@babel/template" "^7.22.15" - "@babel/types" "^7.22.19" - -"@babel/helpers@^7.23.7": - version "7.23.8" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.8.tgz#fc6b2d65b16847fd50adddbd4232c76378959e34" - integrity sha512-KDqYz4PiOWvDFrdHLPhKtCThtIcKVy6avWD2oG4GEvyQ+XDZwHD4YQd+H2vNMnq2rkdxsDkU82T+Vk8U/WXHRQ== - dependencies: - "@babel/template" "^7.22.15" - "@babel/traverse" "^7.23.7" - "@babel/types" "^7.23.6" - -"@babel/highlight@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" - integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== + "@babel/template" "^7.24.7" + "@babel/types" "^7.24.8" + +"@babel/highlight@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz" + integrity sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw== dependencies: - "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-validator-identifier" "^7.24.7" chalk "^2.4.2" js-tokens "^4.0.0" + picocolors "^1.0.0" -"@babel/parser@^7.22.15", "@babel/parser@^7.23.6", "@babel/parser@^7.7.0": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b" - integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ== +"@babel/parser@^7.1.0", "@babel/parser@^7.20.7", "@babel/parser@^7.24.7", "@babel/parser@^7.24.8": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.24.8.tgz" + integrity sha512-WzfbgXOkGzZiXXCqk43kKwZjzwx4oulxZi3nq2TYL9mOjQv6kYwul9mz6ID36njuL7Xkp6nJEfok848Zj10j/w== + +"@babel/plugin-transform-react-jsx-self@^7.24.5": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.24.7.tgz#66bff0248ea0b549972e733516ffad577477bdab" + integrity sha512-fOPQYbGSgH0HUp4UJO4sMBFjY6DuWq+2i8rixyUMb3CdGixs/gccURvYOAhajBdKDoGajFr3mUq5rH3phtkGzw== + dependencies: + "@babel/helper-plugin-utils" "^7.24.7" + +"@babel/plugin-transform-react-jsx-source@^7.24.1": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.24.7.tgz#1198aab2548ad19582013815c938d3ebd8291ee3" + integrity sha512-J2z+MWzZHVOemyLweMqngXrgGC42jQ//R0KdxqkIz/OrbVIIlhFI3WigZ5fO+nwFvBlncr4MGapd8vTyc7RPNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.24.7" + +"@babel/template@^7.24.7": + version "7.24.7" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz" + integrity sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig== + dependencies: + "@babel/code-frame" "^7.24.7" + "@babel/parser" "^7.24.7" + "@babel/types" "^7.24.7" + +"@babel/traverse@^7.24.7", "@babel/traverse@^7.24.8": + version "7.24.8" + resolved "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.8.tgz" + integrity sha512-t0P1xxAPzEDcEPmjprAQq19NWum4K0EQPjMwZQZbHt+GiZqvjCHjj755Weq1YRPVzBI+3zSfvScfpnuIecVFJQ== + dependencies: + "@babel/code-frame" "^7.24.7" + "@babel/generator" "^7.24.8" + "@babel/helper-environment-visitor" "^7.24.7" + "@babel/helper-function-name" "^7.24.7" + "@babel/helper-hoist-variables" "^7.24.7" + "@babel/helper-split-export-declaration" "^7.24.7" + "@babel/parser" "^7.24.8" + "@babel/types" "^7.24.8" + debug "^4.3.1" + globals "^11.1.0" -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.23.3.tgz#5cd1c87ba9380d0afb78469292c954fee5d2411a" - integrity sha512-iRkKcCqb7iGnq9+3G6rZ+Ciz5VywC4XNRHe57lKM+jOeYAoR0lVqdeeDRfh0tQcTfw/+vBhHn926FmQhLtlFLQ== +"@babel/types@^7.0.0", "@babel/types@^7.20.7", "@babel/types@^7.24.7", "@babel/types@^7.24.8", "@babel/types@^7.24.9": + version "7.24.9" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.24.9.tgz" + integrity sha512-xm8XrMKz0IlUdocVbYJe0Z9xEgidU7msskG8BbhnTPK/HZ2z/7FP7ykqPgrUH+C+r414mNfNWam1f2vqOjqjYQ== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-string-parser" "^7.24.8" + "@babel/helper-validator-identifier" "^7.24.7" + to-fast-properties "^2.0.0" -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.23.3.tgz#f6652bb16b94f8f9c20c50941e16e9756898dc5d" - integrity sha512-WwlxbfMNdVEpQjZmK5mhm7oSwD3dS6eU+Iwsi4Knl9wAletWem7kaRsGOG+8UEbRyqxY4SS5zvtfXwX+jMxUwQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" - "@babel/plugin-transform-optional-chaining" "^7.23.3" - -"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.23.7": - version "7.23.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.23.7.tgz#516462a95d10a9618f197d39ad291a9b47ae1d7b" - integrity sha512-LlRT7HgaifEpQA1ZgLVOIJZZFVPWN5iReq/7/JixwBtwcoeVGDBD53ZV28rrsLYOZs1Y/EHhA8N/Z6aazHR8cw== +"@esbuild/aix-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" + integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== + +"@esbuild/android-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" + integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== + +"@esbuild/android-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" + integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== + +"@esbuild/android-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" + integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== + +"@esbuild/darwin-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" + integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== + +"@esbuild/darwin-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" + integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== + +"@esbuild/freebsd-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" + integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== + +"@esbuild/freebsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" + integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== + +"@esbuild/linux-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" + integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== + +"@esbuild/linux-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" + integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== + +"@esbuild/linux-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" + integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== + +"@esbuild/linux-loong64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" + integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== + +"@esbuild/linux-mips64el@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" + integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== + +"@esbuild/linux-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" + integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== + +"@esbuild/linux-riscv64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" + integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== + +"@esbuild/linux-s390x@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" + integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== + +"@esbuild/linux-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" + integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== + +"@esbuild/netbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" + integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== + +"@esbuild/openbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" + integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== + +"@esbuild/sunos-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" + integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== + +"@esbuild/win32-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" + integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== + +"@esbuild/win32-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" + integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== + +"@esbuild/win32-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" + integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== + +"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": + version "4.4.0" + resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" + integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== dependencies: - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-plugin-utils" "^7.22.5" + eslint-visitor-keys "^3.3.0" -"@babel/plugin-proposal-class-properties@^7.8.3": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" - integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": - version "7.21.0-placeholder-for-preset-env.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" - integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" +"@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.6.1": + version "4.11.0" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.11.0.tgz#b0ffd0312b4a3fd2d6f77237e7248a5ad3a680ae" + integrity sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A== -"@babel/plugin-syntax-class-properties@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== +"@eslint/eslintrc@^2.1.4": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" + integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.6.0" + globals "^13.19.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" -"@babel/plugin-syntax-class-static-block@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" - integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" +"@eslint/js@8.57.0": + version "8.57.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.0.tgz#a5417ae8427873f1dd08b70b3574b453e67b5f7f" + integrity sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g== -"@babel/plugin-syntax-dynamic-import@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== +"@humanwhocodes/config-array@^0.11.14": + version "0.11.14" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.14.tgz#d78e481a039f7566ecc9660b4ea7fe6b1fec442b" + integrity sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@humanwhocodes/object-schema" "^2.0.2" + debug "^4.3.1" + minimatch "^3.0.5" -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.3" +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== -"@babel/plugin-syntax-import-assertions@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz#9c05a7f592982aff1a2768260ad84bcd3f0c77fc" - integrity sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +"@humanwhocodes/object-schema@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz#4a2868d75d6d6963e423bcf90b7fd1be343409d3" + integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== -"@babel/plugin-syntax-import-attributes@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.23.3.tgz#992aee922cf04512461d7dae3ff6951b90a2dc06" - integrity sha512-pawnE0P9g10xgoP7yKr6CK63K2FMsTE+FZidZO/1PwRdzmAPVs+HS1mAURUsgaoxammTJvULUdIkEK0gOcU2tA== +"@jridgewell/gen-mapping@^0.3.5": + version "0.3.5" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz" + integrity sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" -"@babel/plugin-syntax-import-meta@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== -"@babel/plugin-syntax-jsx@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz#8f2e4f8a9b5f9aa16067e142c1ac9cd9f810f473" - integrity sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== +"@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.4" + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" -"@babel/plugin-syntax-numeric-separator@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== +"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": + version "1.2.8" + resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" +"@rollup/rollup-android-arm-eabi@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.19.0.tgz#3d9fd50164b94964f5de68c3c4ce61933b3a338d" + integrity sha512-JlPfZ/C7yn5S5p0yKk7uhHTTnFlvTgLetl2VxqE518QgyM7C9bSfFTYvB/Q/ftkq0RIPY4ySxTz+/wKJ/dXC0w== + +"@rollup/rollup-android-arm64@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.19.0.tgz#e1a6d4bca2eb08c84fd996a4bf896ce4b6f4014c" + integrity sha512-RDxUSY8D1tWYfn00DDi5myxKgOk6RvWPxhmWexcICt/MEC6yEMr4HNCu1sXXYLw8iAsg0D44NuU+qNq7zVWCrw== + +"@rollup/rollup-darwin-arm64@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.19.0.tgz#0a3fffea69489a24a96079af414b0be78df8abbc" + integrity sha512-emvKHL4B15x6nlNTBMtIaC9tLPRpeA5jMvRLXVbl/W9Ie7HhkrE7KQjvgS9uxgatL1HmHWDXk5TTS4IaNJxbAA== + +"@rollup/rollup-darwin-x64@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.19.0.tgz#13fbdb15f58f090871b0ffff047ece06ad6ad74c" + integrity sha512-fO28cWA1dC57qCd+D0rfLC4VPbh6EOJXrreBmFLWPGI9dpMlER2YwSPZzSGfq11XgcEpPukPTfEVFtw2q2nYJg== + +"@rollup/rollup-linux-arm-gnueabihf@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.19.0.tgz#e9d9219ddf6f6e946e2ee322198af12466d2c868" + integrity sha512-2Rn36Ubxdv32NUcfm0wB1tgKqkQuft00PtM23VqLuCUR4N5jcNWDoV5iBC9jeGdgS38WK66ElncprqgMUOyomw== + +"@rollup/rollup-linux-arm-musleabihf@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.19.0.tgz#4ba804a00b5e793196a622f6977e05f23e01f59a" + integrity sha512-gJuzIVdq/X1ZA2bHeCGCISe0VWqCoNT8BvkQ+BfsixXwTOndhtLUpOg0A1Fcx/+eA6ei6rMBzlOz4JzmiDw7JQ== + +"@rollup/rollup-linux-arm64-gnu@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.19.0.tgz#d871e3f41de759a6db27fc99235b782ba47c15cc" + integrity sha512-0EkX2HYPkSADo9cfeGFoQ7R0/wTKb7q6DdwI4Yn/ULFE1wuRRCHybxpl2goQrx4c/yzK3I8OlgtBu4xvted0ug== + +"@rollup/rollup-linux-arm64-musl@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.19.0.tgz#6e63f7ad4cc51bd2c693a2826fd279de9eaa05b5" + integrity sha512-GlIQRj9px52ISomIOEUq/IojLZqzkvRpdP3cLgIE1wUWaiU5Takwlzpz002q0Nxxr1y2ZgxC2obWxjr13lvxNQ== + +"@rollup/rollup-linux-powerpc64le-gnu@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.19.0.tgz#1540b284d91c440bc9fa7a1714cfb71a5597e94d" + integrity sha512-N6cFJzssruDLUOKfEKeovCKiHcdwVYOT1Hs6dovDQ61+Y9n3Ek4zXvtghPPelt6U0AH4aDGnDLb83uiJMkWYzQ== + +"@rollup/rollup-linux-riscv64-gnu@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.19.0.tgz#70ae58103b5bc7ba2e2235738b51d97022c8ef92" + integrity sha512-2DnD3mkS2uuam/alF+I7M84koGwvn3ZVD7uG+LEWpyzo/bq8+kKnus2EVCkcvh6PlNB8QPNFOz6fWd5N8o1CYg== + +"@rollup/rollup-linux-s390x-gnu@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.19.0.tgz#579ca5f271421a961d3c73d221202c79e02ff03a" + integrity sha512-D6pkaF7OpE7lzlTOFCB2m3Ngzu2ykw40Nka9WmKGUOTS3xcIieHe82slQlNq69sVB04ch73thKYIWz/Ian8DUA== + +"@rollup/rollup-linux-x64-gnu@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.19.0.tgz#f0282d761b8b4e7b92b236813475248e37231849" + integrity sha512-HBndjQLP8OsdJNSxpNIN0einbDmRFg9+UQeZV1eiYupIRuZsDEoeGU43NQsS34Pp166DtwQOnpcbV/zQxM+rWA== + +"@rollup/rollup-linux-x64-musl@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.19.0.tgz#65da807ac66c505ad14b76f1e5976006cb67dd5f" + integrity sha512-HxfbvfCKJe/RMYJJn0a12eiOI9OOtAUF4G6ozrFUK95BNyoJaSiBjIOHjZskTUffUrB84IPKkFG9H9nEvJGW6A== + +"@rollup/rollup-win32-arm64-msvc@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.19.0.tgz#1eed24b91f421c2eea8bb7ca8889ba0c867e1780" + integrity sha512-HxDMKIhmcguGTiP5TsLNolwBUK3nGGUEoV/BO9ldUBoMLBssvh4J0X8pf11i1fTV7WShWItB1bKAKjX4RQeYmg== + +"@rollup/rollup-win32-ia32-msvc@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.19.0.tgz#1ed93c9cdc84e185359797a686f4d1576afcea58" + integrity sha512-xItlIAZZaiG/u0wooGzRsx11rokP4qyc/79LkAOdznGRAbOFc+SfEdfUOszG1odsHNgwippUJavag/+W/Etc6Q== + +"@rollup/rollup-win32-x64-msvc@4.19.0": + version "4.19.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.19.0.tgz#baf9b65023ea2ecc5e6ec68f787a0fecfd8ee84c" + integrity sha512-xNo5fV5ycvCCKqiZcpB65VMR11NJB+StnxHz20jdqRAktfdfzhgjTiJ2doTDQE/7dqGaV5I7ZGqKpgph6lCIag== + +"@types/babel__core@^7.20.5": + version "7.20.5" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.5.tgz#3df15f27ba85319caa07ba08d0721889bb39c017" + integrity sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA== + dependencies: + "@babel/parser" "^7.20.7" + "@babel/types" "^7.20.7" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.8" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.8.tgz#f836c61f48b1346e7d2b0d93c6dacc5b9535d3ab" + integrity sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.4" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.4.tgz#5672513701c1b2199bc6dad636a9d7491586766f" + integrity sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*": + version "7.20.6" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.20.6.tgz#8dc9f0ae0f202c08d8d4dab648912c8d6038e3f7" + integrity sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg== + dependencies: + "@babel/types" "^7.20.7" + +"@types/estree@1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" +"@types/prop-types@*": + version "15.7.12" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.12.tgz#12bb1e2be27293c1406acb6af1c3f3a1481d98c6" + integrity sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q== + +"@types/react-dom@^18.3.0": + version "18.3.0" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.3.0.tgz#0cbc818755d87066ab6ca74fbedb2547d74a82b0" + integrity sha512-EhwApuTmMBmXuFOikhQLIBUn6uFg81SwLMOAUgodJF14SOBOCMdU04gDoYi0WOJJHD144TL32z4yDqCW3dnkQg== + dependencies: + "@types/react" "*" + +"@types/react@*", "@types/react@^18.3.3": + version "18.3.3" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.3.3.tgz#9679020895318b0915d7a3ab004d92d33375c45f" + integrity sha512-hti/R0pS0q1/xx+TsI73XIqk26eBsISZ2R0wUijXIngRK9R/e7Xw/cXVxQK7R5JjW+SV4zGcn5hXjudkN/pLIw== + dependencies: + "@types/prop-types" "*" + csstype "^3.0.2" + +"@typescript-eslint/eslint-plugin@^7.15.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.17.0.tgz#c8ed1af1ad2928ede5cdd207f7e3090499e1f77b" + integrity sha512-pyiDhEuLM3PuANxH7uNYan1AaFs5XE0zw1hq69JBvGvE7gSuEoQl1ydtEe/XQeoC3GQxLXyOVa5kNOATgM638A== + dependencies: + "@eslint-community/regexpp" "^4.10.0" + "@typescript-eslint/scope-manager" "7.17.0" + "@typescript-eslint/type-utils" "7.17.0" + "@typescript-eslint/utils" "7.17.0" + "@typescript-eslint/visitor-keys" "7.17.0" + graphemer "^1.4.0" + ignore "^5.3.1" + natural-compare "^1.4.0" + ts-api-utils "^1.3.0" + +"@typescript-eslint/parser@^7.15.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.17.0.tgz#be8e32c159190cd40a305a2121220eadea5a88e7" + integrity sha512-puiYfGeg5Ydop8eusb/Hy1k7QmOU6X3nvsqCgzrB2K4qMavK//21+PzNE8qeECgNOIoertJPUC1SpegHDI515A== + dependencies: + "@typescript-eslint/scope-manager" "7.17.0" + "@typescript-eslint/types" "7.17.0" + "@typescript-eslint/typescript-estree" "7.17.0" + "@typescript-eslint/visitor-keys" "7.17.0" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.17.0.tgz#e072d0f914662a7bfd6c058165e3c2b35ea26b9d" + integrity sha512-0P2jTTqyxWp9HiKLu/Vemr2Rg1Xb5B7uHItdVZ6iAenXmPo4SZ86yOPCJwMqpCyaMiEHTNqizHfsbmCFT1x9SA== + dependencies: + "@typescript-eslint/types" "7.17.0" + "@typescript-eslint/visitor-keys" "7.17.0" + +"@typescript-eslint/type-utils@7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.17.0.tgz#c5da78feb134c9c9978cbe89e2b1a589ed22091a" + integrity sha512-XD3aaBt+orgkM/7Cei0XNEm1vwUxQ958AOLALzPlbPqb8C1G8PZK85tND7Jpe69Wualri81PLU+Zc48GVKIMMA== + dependencies: + "@typescript-eslint/typescript-estree" "7.17.0" + "@typescript-eslint/utils" "7.17.0" + debug "^4.3.4" + ts-api-utils "^1.3.0" + +"@typescript-eslint/types@7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.17.0.tgz#7ce8185bdf06bc3494e73d143dbf3293111b9cff" + integrity sha512-a29Ir0EbyKTKHnZWbNsrc/gqfIBqYPwj3F2M+jWE/9bqfEHg0AMtXzkbUkOG6QgEScxh2+Pz9OXe11jHDnHR7A== + +"@typescript-eslint/typescript-estree@7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.17.0.tgz#dcab3fea4c07482329dd6107d3c6480e228e4130" + integrity sha512-72I3TGq93t2GoSBWI093wmKo0n6/b7O4j9o8U+f65TVD0FS6bI2180X5eGEr8MA8PhKMvYe9myZJquUT2JkCZw== + dependencies: + "@typescript-eslint/types" "7.17.0" + "@typescript-eslint/visitor-keys" "7.17.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + minimatch "^9.0.4" + semver "^7.6.0" + ts-api-utils "^1.3.0" + +"@typescript-eslint/utils@7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.17.0.tgz#815cd85b9001845d41b699b0ce4f92d6dfb84902" + integrity sha512-r+JFlm5NdB+JXc7aWWZ3fKSm1gn0pkswEwIYsrGPdsT2GjsRATAKXiNtp3vgAAO1xZhX8alIOEQnNMl3kbTgJw== + dependencies: + "@eslint-community/eslint-utils" "^4.4.0" + "@typescript-eslint/scope-manager" "7.17.0" + "@typescript-eslint/types" "7.17.0" + "@typescript-eslint/typescript-estree" "7.17.0" + +"@typescript-eslint/visitor-keys@7.17.0": + version "7.17.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.17.0.tgz#680465c734be30969e564b4647f38d6cdf49bfb0" + integrity sha512-RVGC9UhPOCsfCdI9pU++K4nD7to+jTcMIbXTSOcrLqUEW6gF2pU1UUbYJKc9cvcRSK1UDeMJ7pdMxf4bhMpV/A== + dependencies: + "@typescript-eslint/types" "7.17.0" + eslint-visitor-keys "^3.4.3" + +"@ungap/structured-clone@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" + integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@babel/plugin-syntax-private-property-in-object@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" - integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== +"@vitejs/plugin-react@^4.3.1": + version "4.3.1" + resolved "https://registry.yarnpkg.com/@vitejs/plugin-react/-/plugin-react-4.3.1.tgz#d0be6594051ded8957df555ff07a991fb618b48e" + integrity sha512-m/V2syj5CuVnaxcUJOQRel/Wr31FFXRFlnOoq1TVtkCxsY5veGMTEmpWHndrhB2U8ScHtCQB1e+4hWYExQc6Lg== dependencies: - "@babel/helper-plugin-utils" "^7.14.5" + "@babel/core" "^7.24.5" + "@babel/plugin-transform-react-jsx-self" "^7.24.5" + "@babel/plugin-transform-react-jsx-source" "^7.24.1" + "@types/babel__core" "^7.20.5" + react-refresh "^0.14.2" -"@babel/plugin-syntax-top-level-await@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== -"@babel/plugin-syntax-unicode-sets-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" - integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" +acorn@^8.9.0: + version "8.12.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" + integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== -"@babel/plugin-transform-arrow-functions@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.23.3.tgz#94c6dcfd731af90f27a79509f9ab7fb2120fc38b" - integrity sha512-NzQcQrzaQPkaEwoTm4Mhyl8jI1huEL/WWIEvudjTCMJ9aBZNpsJbMASx7EQECtQQPS/DcnFpo0FIh3LvEO9cxQ== +ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" -"@babel/plugin-transform-async-generator-functions@^7.23.7": - version "7.23.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.23.7.tgz#3aa0b4f2fa3788b5226ef9346cf6d16ec61f99cd" - integrity sha512-PdxEpL71bJp1byMG0va5gwQcXHxuEYC/BgI/e88mGTtohbZN28O5Yit0Plkkm/dBzCF/BxmbNcses1RH1T+urA== - dependencies: - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-remap-async-to-generator" "^7.22.20" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-transform-async-to-generator@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.23.3.tgz#d1f513c7a8a506d43f47df2bf25f9254b0b051fa" - integrity sha512-A7LFsKi4U4fomjqXJlZg/u0ft/n8/7n7lpffUP/ZULx/DtV9SGlNKZolHH6PE8Xl1ngCc0M11OaeZptXVkfKSw== - dependencies: - "@babel/helper-module-imports" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-remap-async-to-generator" "^7.22.20" - -"@babel/plugin-transform-block-scoped-functions@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.23.3.tgz#fe1177d715fb569663095e04f3598525d98e8c77" - integrity sha512-vI+0sIaPIO6CNuM9Kk5VmXcMVRiOpDh7w2zZt9GXzmE/9KD70CUEVhvPR/etAeNK/FAEkhxQtXOzVF3EuRL41A== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -"@babel/plugin-transform-block-scoping@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.23.4.tgz#b2d38589531c6c80fbe25e6b58e763622d2d3cf5" - integrity sha512-0QqbP6B6HOh7/8iNR4CQU2Th/bbRtBp4KS9vcaZd1fZ0wSh5Fyssg0UCIHwxh+ka+pNDREbVLQnHCMHKZfPwfw== +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + color-convert "^1.9.0" -"@babel/plugin-transform-class-properties@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.23.3.tgz#35c377db11ca92a785a718b6aa4e3ed1eb65dc48" - integrity sha512-uM+AN8yCIjDPccsKGlw271xjJtGii+xQIF/uMPS8H15L12jZTsLfF4o5vNO7d/oUguOyfdikHGc/yi9ge4SGIg== +ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + color-convert "^2.0.1" -"@babel/plugin-transform-class-static-block@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.23.4.tgz#2a202c8787a8964dd11dfcedf994d36bfc844ab5" - integrity sha512-nsWu/1M+ggti1SOALj3hfx5FXzAY06fwPJsUZD4/A5e1bWi46VUIWtD+kOX6/IdhXGsXBWllLFDSnqSCdUNydQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - -"@babel/plugin-transform-classes@^7.23.8": - version "7.23.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.23.8.tgz#d08ae096c240347badd68cdf1b6d1624a6435d92" - integrity sha512-yAYslGsY1bX6Knmg46RjiCiNSwJKv2IUC8qOdYKqMMr0491SXFhcHqOdRDeCRohOOIzwN/90C6mQ9qAKgrP7dg== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-compilation-targets" "^7.23.6" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.20" - "@babel/helper-split-export-declaration" "^7.22.6" - globals "^11.1.0" +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -"@babel/plugin-transform-computed-properties@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.23.3.tgz#652e69561fcc9d2b50ba4f7ac7f60dcf65e86474" - integrity sha512-dTj83UVTLw/+nbiHqQSFdwO9CbTtwq1DsDqm3CUEtDrZNET5rT5E6bIdTlOftDTDLMYxvxHNEYO4B9SLl8SLZw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/template" "^7.22.15" +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -"@babel/plugin-transform-destructuring@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.3.tgz#8c9ee68228b12ae3dff986e56ed1ba4f3c446311" - integrity sha512-n225npDqjDIr967cMScVKHXJs7rout1q+tt50inyBCPkyZ8KxeI6d+GIbSBTT/w/9WdlWDOej3V9HE5Lgk57gw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== -"@babel/plugin-transform-dotall-regex@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.23.3.tgz#3f7af6054882ede89c378d0cf889b854a993da50" - integrity sha512-vgnFYDHAKzFaTVp+mneDsIEbnJ2Np/9ng9iviHw3P/KVcgONxpNULEW/51Z/BaFojG2GI2GwwXck5uV1+1NOYQ== +axios@^1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.2.tgz#b625db8a7051fbea61c35a3cbb3a1daa7b9c7621" + integrity sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + follow-redirects "^1.15.6" + form-data "^4.0.0" + proxy-from-env "^1.1.0" -"@babel/plugin-transform-duplicate-keys@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.23.3.tgz#664706ca0a5dfe8d066537f99032fc1dc8b720ce" - integrity sha512-RrqQ+BQmU3Oyav3J+7/myfvRCq7Tbz+kKLLshUmMwNlDHExbGL7ARhajvoBJEvc+fCguPPu887N+3RRXBVKZUA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -"@babel/plugin-transform-dynamic-import@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.23.4.tgz#c7629e7254011ac3630d47d7f34ddd40ca535143" - integrity sha512-V6jIbLhdJK86MaLh4Jpghi8ho5fGzt3imHOBu/x0jlBaPYqDoWz4RDXjmMOfnh+JWNaQleEAByZLV0QzBT4YQQ== +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" + balanced-match "^1.0.0" + concat-map "0.0.1" -"@babel/plugin-transform-exponentiation-operator@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.23.3.tgz#ea0d978f6b9232ba4722f3dbecdd18f450babd18" - integrity sha512-5fhCsl1odX96u7ILKHBj4/Y8vipoqwsJMh4csSA8qFfxrZDEA4Ssku2DyNvMJSmZNOEBT750LfFPbtrnTP90BQ== +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + balanced-match "^1.0.0" -"@babel/plugin-transform-export-namespace-from@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.23.4.tgz#084c7b25e9a5c8271e987a08cf85807b80283191" - integrity sha512-GzuSBcKkx62dGzZI1WVgTWvkkz84FZO5TC5T8dl/Tht/rAla6Dg/Mz9Yhypg+ezVACf/rgDuQt3kbWEv7LdUDQ== +braces@^3.0.3: + version "3.0.3" + resolved "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + fill-range "^7.1.1" -"@babel/plugin-transform-for-of@^7.23.6": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.23.6.tgz#81c37e24171b37b370ba6aaffa7ac86bcb46f94e" - integrity sha512-aYH4ytZ0qSuBbpfhuofbg/e96oQ7U2w1Aw/UQmKT+1l39uEhUPoFS3fHevDc1G0OvewyDudfMKY1OulczHzWIw== +browserslist@^4.23.1: + version "4.23.2" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.23.2.tgz" + integrity sha512-qkqSyistMYdxAcw+CzbZwlBy8AGmS/eEWs+sEV5TnLRGDOL+C5M2EnH6tlZyg0YoAxGJAFKh61En9BR941GnHA== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + caniuse-lite "^1.0.30001640" + electron-to-chromium "^1.4.820" + node-releases "^2.0.14" + update-browserslist-db "^1.1.0" -"@babel/plugin-transform-function-name@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.23.3.tgz#8f424fcd862bf84cb9a1a6b42bc2f47ed630f8dc" - integrity sha512-I1QXp1LxIvt8yLaib49dRW5Okt7Q4oaxao6tFVKS/anCdEOMtYwWVKoiOA1p34GOWIZjUK0E+zCp7+l1pfQyiw== - dependencies: - "@babel/helper-compilation-targets" "^7.22.15" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-json-strings@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.23.4.tgz#a871d9b6bd171976efad2e43e694c961ffa3714d" - integrity sha512-81nTOqM1dMwZ/aRXQ59zVubN9wHGqk6UtqRK+/q+ciXmRy8fSolhGVvG09HHRGo4l6fr/c4ZhXUQH0uFW7PZbg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-json-strings" "^7.8.3" +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== -"@babel/plugin-transform-literals@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.23.3.tgz#8214665f00506ead73de157eba233e7381f3beb4" - integrity sha512-wZ0PIXRxnwZvl9AYpqNUxpZ5BiTGrYt7kueGQ+N5FiQ7RCOD4cm8iShd6S6ggfVIWaJf2EMk8eRzAh52RfP4rQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +caniuse-lite@^1.0.30001640: + version "1.0.30001643" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001643.tgz" + integrity sha512-ERgWGNleEilSrHM6iUz/zJNSQTP8Mr21wDWpdgvRwcTXGAq6jMtOUPP4dqFPTdKqZ2wKTdtB+uucZ3MRpAUSmg== -"@babel/plugin-transform-logical-assignment-operators@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.23.4.tgz#e599f82c51d55fac725f62ce55d3a0886279ecb5" - integrity sha512-Mc/ALf1rmZTP4JKKEhUwiORU+vcfarFVLfcFiolKUo6sewoxSEgl36ak5t+4WamRsNr6nzjZXQjM35WsU+9vbg== +chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" -"@babel/plugin-transform-member-expression-literals@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.23.3.tgz#e37b3f0502289f477ac0e776b05a833d853cabcc" - integrity sha512-sC3LdDBDi5x96LA+Ytekz2ZPk8i/Ck+DEuDbRAll5rknJ5XRTSaPKEYwomLcs1AA8wg9b3KjIQRsnApj+q51Ag== +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + ansi-styles "^4.1.0" + supports-color "^7.1.0" -"@babel/plugin-transform-modules-amd@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.23.3.tgz#e19b55436a1416829df0a1afc495deedfae17f7d" - integrity sha512-vJYQGxeKM4t8hYCKVBlZX/gtIY2I7mRGFNcm85sgXGMTBcoV3QdVtdpbcWEbzbfUIUZKwvgFT82mRvaQIebZzw== +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helper-plugin-utils" "^7.22.5" + color-name "1.1.3" -"@babel/plugin-transform-modules-commonjs@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.23.3.tgz#661ae831b9577e52be57dd8356b734f9700b53b4" - integrity sha512-aVS0F65LKsdNOtcz6FRCpE4OgsP2OFnW46qNxNIX9h3wuzaNcSQsJysuMwqSibC98HPrf2vCgtxKNwS0DAlgcA== - dependencies: - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-simple-access" "^7.22.5" - -"@babel/plugin-transform-modules-systemjs@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.23.3.tgz#fa7e62248931cb15b9404f8052581c302dd9de81" - integrity sha512-ZxyKGTkF9xT9YJuKQRo19ewf3pXpopuYQd8cDXqNzc3mUNbOME0RKMoZxviQk74hwzfQsEe66dE92MaZbdHKNQ== - dependencies: - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.20" - -"@babel/plugin-transform-modules-umd@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.23.3.tgz#5d4395fccd071dfefe6585a4411aa7d6b7d769e9" - integrity sha512-zHsy9iXX2nIsCBFPud3jKn1IRPWg3Ing1qOZgeKV39m1ZgIdpJqvlWVeiHBZC6ITRG0MfskhYe9cLgntfSFPIg== +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: - "@babel/helper-module-transforms" "^7.23.3" - "@babel/helper-plugin-utils" "^7.22.5" + color-name "~1.1.4" -"@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" - integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== -"@babel/plugin-transform-new-target@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.23.3.tgz#5491bb78ed6ac87e990957cea367eab781c4d980" - integrity sha512-YJ3xKqtJMAT5/TIZnpAR3I+K+WaDowYbN3xyxI8zxx/Gsypwf9B9h0VB+1Nh6ACAAPRS5NSRje0uVv5i79HYGQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -"@babel/plugin-transform-nullish-coalescing-operator@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.23.4.tgz#45556aad123fc6e52189ea749e33ce090637346e" - integrity sha512-jHE9EVVqHKAQx+VePv5LLGHjmHSJR76vawFPTdlxR/LVJPfOEGxREQwQfjuZEOPTwG92X3LINSh3M40Rv4zpVA== +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + delayed-stream "~1.0.0" -"@babel/plugin-transform-numeric-separator@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.23.4.tgz#03d08e3691e405804ecdd19dd278a40cca531f29" - integrity sha512-mps6auzgwjRrwKEZA05cOwuDc9FAzoyFS4ZsG/8F43bTLf/TgkJg7QXOrPO1JO599iA3qgK9MXdMGOEC8O1h6Q== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -"@babel/plugin-transform-object-rest-spread@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.23.4.tgz#2b9c2d26bf62710460bdc0d1730d4f1048361b83" - integrity sha512-9x9K1YyeQVw0iOXJlIzwm8ltobIIv7j2iLyP2jIhEbqPRQ7ScNgwQufU2I0Gq11VjyG4gI4yMXt2VFags+1N3g== - dependencies: - "@babel/compat-data" "^7.23.3" - "@babel/helper-compilation-targets" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.23.3" - -"@babel/plugin-transform-object-super@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.23.3.tgz#81fdb636dcb306dd2e4e8fd80db5b2362ed2ebcd" - integrity sha512-BwQ8q0x2JG+3lxCVFohg+KbQM7plfpBwThdW9A6TMtWwLsbDA01Ek2Zb/AgDN39BiZsExm4qrXxjk+P1/fzGrA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.20" +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== -"@babel/plugin-transform-optional-catch-binding@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.23.4.tgz#318066de6dacce7d92fa244ae475aa8d91778017" - integrity sha512-XIq8t0rJPHf6Wvmbn9nFxU6ao4c7WhghTR5WyV8SrJfUFzyxhCm4nhC+iAp3HFhbAKLfYpgzhJ6t4XCtVwqO5A== +cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" -"@babel/plugin-transform-optional-chaining@^7.23.3", "@babel/plugin-transform-optional-chaining@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.23.4.tgz#6acf61203bdfc4de9d4e52e64490aeb3e52bd017" - integrity sha512-ZU8y5zWOfjM5vZ+asjgAPwDaBjJzgufjES89Rs4Lpq63O300R/kOz30WCLo6BxxX6QVEilwSlpClnG5cZaikTA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -"@babel/plugin-transform-parameters@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.23.3.tgz#83ef5d1baf4b1072fa6e54b2b0999a7b2527e2af" - integrity sha512-09lMt6UsUb3/34BbECKVbVwrT9bO6lILWln237z7sLaWnMsTi7Yc9fhX5DLpkJzAGfaReXI22wP41SZmnAA3Vw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +csstype@^3.0.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== -"@babel/plugin-transform-private-methods@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.23.3.tgz#b2d7a3c97e278bfe59137a978d53b2c2e038c0e4" - integrity sha512-UzqRcRtWsDMTLrRWFvUBDwmw06tCQH9Rl1uAjfh6ijMSmGYQ+fpdB+cnqRC8EMh5tuuxSv0/TejGL+7vyj+50g== +debug@^4.1.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.5" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.5.tgz#e83444eceb9fedd4a1da56d671ae2446a01a6e1e" + integrity sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg== dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + ms "2.1.2" -"@babel/plugin-transform-private-property-in-object@^7.23.4": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.23.4.tgz#3ec711d05d6608fd173d9b8de39872d8dbf68bf5" - integrity sha512-9G3K1YqTq3F4Vt88Djx1UZ79PDyj+yKRnUy7cZGSMe+a7jkwD259uKKuUzQlPkGam7R+8RJwh5z4xO27fA1o2A== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-create-class-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-transform-property-literals@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.23.3.tgz#54518f14ac4755d22b92162e4a852d308a560875" - integrity sha512-jR3Jn3y7cZp4oEWPFAlRsSWjxKe4PZILGBSd4nis1TsC5qeSpb+nrtihJuDhNI7QHiVbUaiXa0X2RZY3/TI6Nw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== -"@babel/plugin-transform-react-display-name@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.23.3.tgz#70529f034dd1e561045ad3c8152a267f0d7b6200" - integrity sha512-GnvhtVfA2OAtzdX58FJxU19rhoGeQzyVndw3GgtdECQvQFXPEZIOVULHVZGAYmOgmqjXpVpfocAbSjh99V/Fqw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -"@babel/plugin-transform-react-jsx-development@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz#e716b6edbef972a92165cd69d92f1255f7e73e87" - integrity sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A== +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: - "@babel/plugin-transform-react-jsx" "^7.22.5" + path-type "^4.0.0" -"@babel/plugin-transform-react-jsx@^7.22.15", "@babel/plugin-transform-react-jsx@^7.22.5": - version "7.23.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.23.4.tgz#393f99185110cea87184ea47bcb4a7b0c2e39312" - integrity sha512-5xOpoPguCZCRbo/JeHlloSkTA8Bld1J/E1/kLfD1nsuiW1m8tduTA1ERCgIZokDflX/IBzKcqR3l7VlRgiIfHA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-module-imports" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-jsx" "^7.23.3" - "@babel/types" "^7.23.4" - -"@babel/plugin-transform-react-pure-annotations@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.23.3.tgz#fabedbdb8ee40edf5da96f3ecfc6958e3783b93c" - integrity sha512-qMFdSS+TUhB7Q/3HVPnEdYJDQIk57jkntAwSuz9xfSE4n+3I+vHYCli3HoHawN1Z3RfCz/y1zXA/JXjG6cVImQ== +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" + esutils "^2.0.2" -"@babel/plugin-transform-regenerator@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.23.3.tgz#141afd4a2057298602069fce7f2dc5173e6c561c" - integrity sha512-KP+75h0KghBMcVpuKisx3XTu9Ncut8Q8TuvGO4IhY+9D5DFEckQefOuIsB/gQ2tG71lCke4NMrtIPS8pOj18BQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - regenerator-transform "^0.15.2" +electron-to-chromium@^1.4.820: + version "1.5.2" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.2.tgz" + integrity sha512-kc4r3U3V3WLaaZqThjYz/Y6z8tJe+7K0bbjUVo3i+LWIypVdMx5nXCkwRe6SWbY6ILqLdc1rKcKmr3HoH7wjSQ== -"@babel/plugin-transform-reserved-words@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.23.3.tgz#4130dcee12bd3dd5705c587947eb715da12efac8" - integrity sha512-QnNTazY54YqgGxwIexMZva9gqbPa15t/x9VS+0fsEFWplwVpXYZivtgl43Z1vMpc1bdPP2PP8siFeVcnFvA3Cg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +esbuild@^0.21.3: + version "0.21.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" + integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw== + optionalDependencies: + "@esbuild/aix-ppc64" "0.21.5" + "@esbuild/android-arm" "0.21.5" + "@esbuild/android-arm64" "0.21.5" + "@esbuild/android-x64" "0.21.5" + "@esbuild/darwin-arm64" "0.21.5" + "@esbuild/darwin-x64" "0.21.5" + "@esbuild/freebsd-arm64" "0.21.5" + "@esbuild/freebsd-x64" "0.21.5" + "@esbuild/linux-arm" "0.21.5" + "@esbuild/linux-arm64" "0.21.5" + "@esbuild/linux-ia32" "0.21.5" + "@esbuild/linux-loong64" "0.21.5" + "@esbuild/linux-mips64el" "0.21.5" + "@esbuild/linux-ppc64" "0.21.5" + "@esbuild/linux-riscv64" "0.21.5" + "@esbuild/linux-s390x" "0.21.5" + "@esbuild/linux-x64" "0.21.5" + "@esbuild/netbsd-x64" "0.21.5" + "@esbuild/openbsd-x64" "0.21.5" + "@esbuild/sunos-x64" "0.21.5" + "@esbuild/win32-arm64" "0.21.5" + "@esbuild/win32-ia32" "0.21.5" + "@esbuild/win32-x64" "0.21.5" + +escalade@^3.1.2: + version "3.1.2" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz" + integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== -"@babel/plugin-transform-shorthand-properties@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.23.3.tgz#97d82a39b0e0c24f8a981568a8ed851745f59210" - integrity sha512-ED2fgqZLmexWiN+YNFX26fx4gh5qHDhn1O2gvEhreLW2iI63Sqm4llRLCXALKrCnbN4Jy0VcMQZl/SAzqug/jg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== -"@babel/plugin-transform-spread@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.23.3.tgz#41d17aacb12bde55168403c6f2d6bdca563d362c" - integrity sha512-VvfVYlrlBVu+77xVTOAoxQ6mZbnIq5FM0aGBSFEcIh03qHf+zNqA4DC/3XMUozTg7bZV3e3mZQ0i13VB6v5yUg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== -"@babel/plugin-transform-sticky-regex@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.23.3.tgz#dec45588ab4a723cb579c609b294a3d1bd22ff04" - integrity sha512-HZOyN9g+rtvnOU3Yh7kSxXrKbzgrm5X4GncPY1QOquu7epga5MxKHVpYu2hvQnry/H+JjckSYRb93iNfsioAGg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +eslint-plugin-react-hooks@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz#c829eb06c0e6f484b3fbb85a97e57784f328c596" + integrity sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ== -"@babel/plugin-transform-template-literals@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.23.3.tgz#5f0f028eb14e50b5d0f76be57f90045757539d07" - integrity sha512-Flok06AYNp7GV2oJPZZcP9vZdszev6vPBkHLwxwSpaIqx75wn6mUd3UFWsSsA0l8nXAKkyCmL/sR02m8RYGeHg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +eslint-plugin-react-refresh@^0.4.7: + version "0.4.9" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.9.tgz#bf870372b353b12e1e6fb7fc41b282d9cbc8d93d" + integrity sha512-QK49YrBAo5CLNLseZ7sZgvgTy21E6NEw22eZqc4teZfH8pxV3yXc9XXOYfUI6JNpw7mfHNkAeWtBxrTyykB6HA== -"@babel/plugin-transform-typeof-symbol@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.23.3.tgz#9dfab97acc87495c0c449014eb9c547d8966bca4" - integrity sha512-4t15ViVnaFdrPC74be1gXBSMzXk3B4Us9lP7uLRQHTFpV5Dvt33pn+2MyyNxmN3VTTm3oTrZVMUmuw3oBnQ2oQ== +eslint-scope@^7.2.2: + version "7.2.2" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" + integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + esrecurse "^4.3.0" + estraverse "^5.2.0" -"@babel/plugin-transform-unicode-escapes@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.23.3.tgz#1f66d16cab01fab98d784867d24f70c1ca65b925" - integrity sha512-OMCUx/bU6ChE3r4+ZdylEqAjaQgHAgipgW8nsCfu5pGqDcFytVd91AwRvUJSBZDz0exPGgnjoqhgRYLRjFZc9Q== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" + integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== + +eslint@^8.57.0: + version "8.57.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.57.0.tgz#c786a6fd0e0b68941aaf624596fb987089195668" + integrity sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/regexpp" "^4.6.1" + "@eslint/eslintrc" "^2.1.4" + "@eslint/js" "8.57.0" + "@humanwhocodes/config-array" "^0.11.14" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" + "@ungap/structured-clone" "^1.2.0" + ajv "^6.12.4" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.2.2" + eslint-visitor-keys "^3.4.3" + espree "^9.6.1" + esquery "^1.4.2" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + graphemer "^1.4.0" + ignore "^5.2.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + is-path-inside "^3.0.3" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" -"@babel/plugin-transform-unicode-property-regex@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.23.3.tgz#19e234129e5ffa7205010feec0d94c251083d7ad" - integrity sha512-KcLIm+pDZkWZQAFJ9pdfmh89EwVfmNovFBcXko8szpBeF8z68kWIPeKlmSOkT9BXJxs2C0uk+5LxoxIv62MROA== +espree@^9.6.0, espree@^9.6.1: + version "9.6.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" + integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + acorn "^8.9.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.4.1" -"@babel/plugin-transform-unicode-regex@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.23.3.tgz#26897708d8f42654ca4ce1b73e96140fbad879dc" - integrity sha512-wMHpNA4x2cIA32b/ci3AfwNgheiva2W0WUKWTK7vBHBhDKfPsc5cFGNWm69WBqpwd86u1qwZ9PWevKqm1A3yAw== +esquery@^1.4.2: + version "1.6.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.6.0.tgz#91419234f804d852a82dceec3e16cdc22cf9dae7" + integrity sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + estraverse "^5.1.0" -"@babel/plugin-transform-unicode-sets-regex@^7.23.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.23.3.tgz#4fb6f0a719c2c5859d11f6b55a050cc987f3799e" - integrity sha512-W7lliA/v9bNR83Qc3q1ip9CQMZ09CcHDbHfbLRDNuAhn1Mvkr1ZNF7hPmztMQvtTGVLJ9m8IZqWsTkXOml8dbw== +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.15" - "@babel/helper-plugin-utils" "^7.22.5" + estraverse "^5.2.0" -"@babel/preset-env@^7.8.3": - version "7.23.8" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.23.8.tgz#7d6f8171ea7c221ecd28059e65ad37c20e441e3e" - integrity sha512-lFlpmkApLkEP6woIKprO6DO60RImpatTQKtz4sUcDjVcK8M8mQ4sZsuxaTMNOZf0sqAq/ReYW1ZBHnOQwKpLWA== - dependencies: - "@babel/compat-data" "^7.23.5" - "@babel/helper-compilation-targets" "^7.23.6" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-option" "^7.23.5" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.23.3" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.23.3" - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly" "^7.23.7" - "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.23.3" - "@babel/plugin-syntax-import-attributes" "^7.23.3" - "@babel/plugin-syntax-import-meta" "^7.10.4" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" - "@babel/plugin-transform-arrow-functions" "^7.23.3" - "@babel/plugin-transform-async-generator-functions" "^7.23.7" - "@babel/plugin-transform-async-to-generator" "^7.23.3" - "@babel/plugin-transform-block-scoped-functions" "^7.23.3" - "@babel/plugin-transform-block-scoping" "^7.23.4" - "@babel/plugin-transform-class-properties" "^7.23.3" - "@babel/plugin-transform-class-static-block" "^7.23.4" - "@babel/plugin-transform-classes" "^7.23.8" - "@babel/plugin-transform-computed-properties" "^7.23.3" - "@babel/plugin-transform-destructuring" "^7.23.3" - "@babel/plugin-transform-dotall-regex" "^7.23.3" - "@babel/plugin-transform-duplicate-keys" "^7.23.3" - "@babel/plugin-transform-dynamic-import" "^7.23.4" - "@babel/plugin-transform-exponentiation-operator" "^7.23.3" - "@babel/plugin-transform-export-namespace-from" "^7.23.4" - "@babel/plugin-transform-for-of" "^7.23.6" - "@babel/plugin-transform-function-name" "^7.23.3" - "@babel/plugin-transform-json-strings" "^7.23.4" - "@babel/plugin-transform-literals" "^7.23.3" - "@babel/plugin-transform-logical-assignment-operators" "^7.23.4" - "@babel/plugin-transform-member-expression-literals" "^7.23.3" - "@babel/plugin-transform-modules-amd" "^7.23.3" - "@babel/plugin-transform-modules-commonjs" "^7.23.3" - "@babel/plugin-transform-modules-systemjs" "^7.23.3" - "@babel/plugin-transform-modules-umd" "^7.23.3" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.22.5" - "@babel/plugin-transform-new-target" "^7.23.3" - "@babel/plugin-transform-nullish-coalescing-operator" "^7.23.4" - "@babel/plugin-transform-numeric-separator" "^7.23.4" - "@babel/plugin-transform-object-rest-spread" "^7.23.4" - "@babel/plugin-transform-object-super" "^7.23.3" - "@babel/plugin-transform-optional-catch-binding" "^7.23.4" - "@babel/plugin-transform-optional-chaining" "^7.23.4" - "@babel/plugin-transform-parameters" "^7.23.3" - "@babel/plugin-transform-private-methods" "^7.23.3" - "@babel/plugin-transform-private-property-in-object" "^7.23.4" - "@babel/plugin-transform-property-literals" "^7.23.3" - "@babel/plugin-transform-regenerator" "^7.23.3" - "@babel/plugin-transform-reserved-words" "^7.23.3" - "@babel/plugin-transform-shorthand-properties" "^7.23.3" - "@babel/plugin-transform-spread" "^7.23.3" - "@babel/plugin-transform-sticky-regex" "^7.23.3" - "@babel/plugin-transform-template-literals" "^7.23.3" - "@babel/plugin-transform-typeof-symbol" "^7.23.3" - "@babel/plugin-transform-unicode-escapes" "^7.23.3" - "@babel/plugin-transform-unicode-property-regex" "^7.23.3" - "@babel/plugin-transform-unicode-regex" "^7.23.3" - "@babel/plugin-transform-unicode-sets-regex" "^7.23.3" - "@babel/preset-modules" "0.1.6-no-external-plugins" - babel-plugin-polyfill-corejs2 "^0.4.7" - babel-plugin-polyfill-corejs3 "^0.8.7" - babel-plugin-polyfill-regenerator "^0.5.4" - core-js-compat "^3.31.0" - semver "^6.3.1" +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== -"@babel/preset-modules@0.1.6-no-external-plugins": - version "0.1.6-no-external-plugins" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz#ccb88a2c49c817236861fee7826080573b8a923a" - integrity sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/types" "^7.4.4" - esutils "^2.0.2" +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -"@babel/preset-react@^7.8.3": - version "7.23.3" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.23.3.tgz#f73ca07e7590f977db07eb54dbe46538cc015709" - integrity sha512-tbkHOS9axH6Ysf2OUEqoSZ6T3Fa2SrNH6WTWSPBboxKzdxNc9qOICeLXkNG0ZEwbQ1HY8liwOce4aN/Ceyuq6w== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-option" "^7.22.15" - "@babel/plugin-transform-react-display-name" "^7.23.3" - "@babel/plugin-transform-react-jsx" "^7.22.15" - "@babel/plugin-transform-react-jsx-development" "^7.22.5" - "@babel/plugin-transform-react-pure-annotations" "^7.23.3" - -"@babel/regjsgen@^0.8.0": - version "0.8.0" - resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" - integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== - -"@babel/runtime@^7.1.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.8.4": - version "7.23.8" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.8.tgz#8ee6fe1ac47add7122902f257b8ddf55c898f650" - integrity sha512-Y7KbAP984rn1VGMbGqKmBLio9V7y5Je9GvU4rQPCPinCyNfUcToxIXl06d59URp/F3LwinvODxab5N/G6qggkw== - dependencies: - regenerator-runtime "^0.14.0" +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -"@babel/template@^7.22.15": - version "7.22.15" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" - integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== - dependencies: - "@babel/code-frame" "^7.22.13" - "@babel/parser" "^7.22.15" - "@babel/types" "^7.22.15" - -"@babel/traverse@^7.23.7", "@babel/traverse@^7.7.0": - version "7.23.7" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.7.tgz#9a7bf285c928cb99b5ead19c3b1ce5b310c9c305" - integrity sha512-tY3mM8rH9jM0YHFGyfC0/xf+SB5eKUu7HPj7/k3fpi9dAlsMc5YbQvDi0Sh2QTPXqMhyaAtzAr807TIyfQrmyg== +fast-glob@^3.2.9: + version "3.3.2" + resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz" + integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow== dependencies: - "@babel/code-frame" "^7.23.5" - "@babel/generator" "^7.23.6" - "@babel/helper-environment-visitor" "^7.22.20" - "@babel/helper-function-name" "^7.23.0" - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.23.6" - "@babel/types" "^7.23.6" - debug "^4.3.1" - globals "^11.1.0" + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" -"@babel/types@^7.22.15", "@babel/types@^7.22.19", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.4", "@babel/types@^7.23.6", "@babel/types@^7.4.4", "@babel/types@^7.7.0": - version "7.23.6" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd" - integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg== - dependencies: - "@babel/helper-string-parser" "^7.23.4" - "@babel/helper-validator-identifier" "^7.22.20" - to-fast-properties "^2.0.0" - -"@discoveryjs/json-ext@^0.5.0": - version "0.5.7" - resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" - integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== - -"@gar/promisify@^1.0.1": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" - integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw== - -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== - -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/source-map@^0.3.3": - version "0.3.5" - resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.5.tgz#a3bb4d5c6825aab0d281268f47f6ad5853431e91" - integrity sha512-UTYAUj/wviwdsMfzoSJspJxbkH5o1snzwX0//0ENX1u/55kkZZkcTZP6u9bwKGkv+dkk9at4m1Cpt0uY80kcpQ== - dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" - integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== - -"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.21" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.21.tgz#5dc1df7b3dc4a6209e503a924e1ca56097a2bb15" - integrity sha512-SRfKmRe1KvYnxjEMtxEr+J4HIeMX5YBg/qhRHpxEIGjhX1rshcHlnFUE9K0GazhVKWM7B+nARSkV8LuvJdJ5/g== - dependencies: - "@jridgewell/resolve-uri" "^3.1.0" - "@jridgewell/sourcemap-codec" "^1.4.14" - -"@leichtgewicht/ip-codec@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" - integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@npmcli/fs@^1.0.0": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-1.1.1.tgz#72f719fe935e687c56a4faecf3c03d06ba593257" - integrity sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ== - dependencies: - "@gar/promisify" "^1.0.1" - semver "^7.3.5" - -"@npmcli/move-file@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.1.2.tgz#1a82c3e372f7cae9253eb66d72543d6b8685c674" - integrity sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg== - dependencies: - mkdirp "^1.0.4" - rimraf "^3.0.2" - -"@sindresorhus/is@^0.7.0": - version "0.7.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.7.0.tgz#9a06f4f137ee84d7df0460c1fdb1135ffa6c50fd" - integrity sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow== - -"@trysound/sax@0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" - integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== - -"@types/body-parser@*": - version "1.19.5" - resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.5.tgz#04ce9a3b677dc8bd681a17da1ab9835dc9d3ede4" - integrity sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg== - dependencies: - "@types/connect" "*" - "@types/node" "*" - -"@types/bonjour@^3.5.9": - version "3.5.13" - resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.13.tgz#adf90ce1a105e81dd1f9c61fdc5afda1bfb92956" - integrity sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ== - dependencies: - "@types/node" "*" - -"@types/connect-history-api-fallback@^1.3.5": - version "1.5.4" - resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.4.tgz#7de71645a103056b48ac3ce07b3520b819c1d5b3" - integrity sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw== - dependencies: - "@types/express-serve-static-core" "*" - "@types/node" "*" - -"@types/connect@*": - version "3.4.38" - resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.38.tgz#5ba7f3bc4fbbdeaff8dded952e5ff2cc53f8d858" - integrity sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug== - dependencies: - "@types/node" "*" - -"@types/eslint-scope@^3.7.3": - version "3.7.7" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" - integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.56.2" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.56.2.tgz#1c72a9b794aa26a8b94ad26d5b9aa51c8a6384bb" - integrity sha512-uQDwm1wFHmbBbCZCqAlq6Do9LYwByNZHWzXppSnay9SuwJ+VRbjkbLABer54kcPnMSlG6Fdiy2yaFXm/z9Z5gw== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - -"@types/estree@*", "@types/estree@^1.0.0": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" - integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== - -"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": - version "4.17.41" - resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.41.tgz#5077defa630c2e8d28aa9ffc2c01c157c305bef6" - integrity sha512-OaJ7XLaelTgrvlZD8/aa0vvvxZdUmlCn6MtWeB7TkiKW70BQLc9XEPpDLPdbo52ZhXUCrznlWdCHWxJWtdyajA== - dependencies: - "@types/node" "*" - "@types/qs" "*" - "@types/range-parser" "*" - "@types/send" "*" - -"@types/express@*", "@types/express@^4.17.13": - version "4.17.21" - resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.21.tgz#c26d4a151e60efe0084b23dc3369ebc631ed192d" - integrity sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ== - dependencies: - "@types/body-parser" "*" - "@types/express-serve-static-core" "^4.17.33" - "@types/qs" "*" - "@types/serve-static" "*" - -"@types/glob@^7.1.1": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" - integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== - dependencies: - "@types/minimatch" "*" - "@types/node" "*" - -"@types/html-minifier-terser@^6.0.0": - version "6.1.0" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" - integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== - -"@types/http-errors@*": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.4.tgz#7eb47726c391b7345a6ec35ad7f4de469cf5ba4f" - integrity sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA== - -"@types/http-proxy@^1.17.8": - version "1.17.14" - resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.14.tgz#57f8ccaa1c1c3780644f8a94f9c6b5000b5e2eec" - integrity sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w== - dependencies: - "@types/node" "*" - -"@types/json-schema@*", "@types/json-schema@^7.0.3", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": - version "7.0.15" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" - integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== - -"@types/mime@*": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.4.tgz#2198ac274de6017b44d941e00261d5bc6a0e0a45" - integrity sha512-iJt33IQnVRkqeqC7PzBHPTC6fDlRNRW8vjrgqtScAhrmMwe8c4Eo7+fUGTa+XdWrpEgpyKWMYmi2dIwMAYRzPw== - -"@types/mime@^1": - version "1.3.5" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.5.tgz#1ef302e01cf7d2b5a0fa526790c9123bf1d06690" - integrity sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w== - -"@types/minimatch@*": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" - integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== - -"@types/node-forge@^1.3.0": - version "1.3.11" - resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da" - integrity sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ== - dependencies: - "@types/node" "*" - -"@types/node@*": - version "20.11.5" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.5.tgz#be10c622ca7fcaa3cf226cf80166abc31389d86e" - integrity sha512-g557vgQjUUfN76MZAN/dt1z3dzcUsimuysco0KeluHgrPdJXkP/XdAURgyO2W9fZWHRtRBiVKzKn8vyOAwlG+w== - dependencies: - undici-types "~5.26.4" - -"@types/qs@*": - version "6.9.11" - resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.11.tgz#208d8a30bc507bd82e03ada29e4732ea46a6bbda" - integrity sha512-oGk0gmhnEJK4Yyk+oI7EfXsLayXatCWPHary1MtcmbAifkobT9cM9yutG/hZKIseOU0MqbIwQ/u2nn/Gb+ltuQ== - -"@types/range-parser@*": - version "1.2.7" - resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.7.tgz#50ae4353eaaddc04044279812f52c8c65857dbcb" - integrity sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ== - -"@types/retry@0.12.0": - version "0.12.0" - resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" - integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== - -"@types/send@*": - version "0.17.4" - resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.4.tgz#6619cd24e7270793702e4e6a4b958a9010cfc57a" - integrity sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA== - dependencies: - "@types/mime" "^1" - "@types/node" "*" - -"@types/serve-index@^1.9.1": - version "1.9.4" - resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.4.tgz#e6ae13d5053cb06ed36392110b4f9a49ac4ec898" - integrity sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug== - dependencies: - "@types/express" "*" - -"@types/serve-static@*", "@types/serve-static@^1.13.10": - version "1.15.5" - resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.5.tgz#15e67500ec40789a1e8c9defc2d32a896f05b033" - integrity sha512-PDRk21MnK70hja/YF8AHfC7yIsiQHn1rcXx7ijCFBX/k+XQJhQT/gw3xekXKJvx+5SXaMMS8oqQy09Mzvz2TuQ== - dependencies: - "@types/http-errors" "*" - "@types/mime" "*" - "@types/node" "*" - -"@types/sockjs@^0.3.33": - version "0.3.36" - resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.36.tgz#ce322cf07bcc119d4cbf7f88954f3a3bd0f67535" - integrity sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q== - dependencies: - "@types/node" "*" - -"@types/source-list-map@*": - version "0.1.6" - resolved "https://registry.yarnpkg.com/@types/source-list-map/-/source-list-map-0.1.6.tgz#164e169dd061795b50b83c19e4d3be09f8d3a454" - integrity sha512-5JcVt1u5HDmlXkwOD2nslZVllBBc7HDuOICfiZah2Z0is8M8g+ddAEawbmd3VjedfDHBzxCaXLs07QEmb7y54g== - -"@types/tapable@^1": - version "1.0.12" - resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.12.tgz#bc2cab12e87978eee89fb21576b670350d6d86ab" - integrity sha512-bTHG8fcxEqv1M9+TD14P8ok8hjxoOCkfKc8XXLaaD05kI7ohpeI956jtDOD3XHKBQrlyPughUtzm1jtVhHpA5Q== - -"@types/uglify-js@*": - version "3.17.4" - resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.17.4.tgz#3c70021f08023e5a760ce133d22966f200e1d31c" - integrity sha512-Hm/T0kV3ywpJyMGNbsItdivRhYNCQQf1IIsYsXnoVPES4t+FMLyDe0/K+Ea7ahWtMtSNb22ZdY7MIyoD9rqARg== - dependencies: - source-map "^0.6.1" - -"@types/webpack-sources@*": - version "3.2.3" - resolved "https://registry.yarnpkg.com/@types/webpack-sources/-/webpack-sources-3.2.3.tgz#b667bd13e9fa15a9c26603dce502c7985418c3d8" - integrity sha512-4nZOdMwSPHZ4pTEZzSp0AsTM4K7Qmu40UKW4tJDiOVs20UzYF9l+qUe4s0ftfN0pin06n+5cWWDJXH+sbhAiDw== - dependencies: - "@types/node" "*" - "@types/source-list-map" "*" - source-map "^0.7.3" - -"@types/webpack@^4.4.31": - version "4.41.38" - resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.38.tgz#5a40ac81bdd052bf405e8bdcf3e1236f6db6dc26" - integrity sha512-oOW7E931XJU1mVfCnxCVgv8GLFL768pDO5u2Gzk82i8yTIgX6i7cntyZOkZYb/JtYM8252SN9bQp9tgkVDSsRw== - dependencies: - "@types/node" "*" - "@types/tapable" "^1" - "@types/uglify-js" "*" - "@types/webpack-sources" "*" - anymatch "^3.0.0" - source-map "^0.6.0" - -"@types/ws@^8.5.5": - version "8.5.10" - resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.10.tgz#4acfb517970853fa6574a3a6886791d04a396787" - integrity sha512-vmQSUcfalpIq0R9q7uTo2lXs6eGIpt9wtnLdMv9LVpIjCA/+ufZRozlVoVelIYixx1ugCBKDhn89vnsEGOCx9A== - dependencies: - "@types/node" "*" - -"@typescript-eslint/experimental-utils@^2.5.0": - version "2.34.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-2.34.0.tgz#d3524b644cdb40eebceca67f8cf3e4cc9c8f980f" - integrity sha512-eS6FTkq+wuMJ+sgtuNTtcqavWXqsflWcfBnlYhg/nS4aZ1leewkXGbvBhaapn1q6qf4M71bsR1tez5JTRMuqwA== - dependencies: - "@types/json-schema" "^7.0.3" - "@typescript-eslint/typescript-estree" "2.34.0" - eslint-scope "^5.0.0" - eslint-utils "^2.0.0" - -"@typescript-eslint/typescript-estree@2.34.0": - version "2.34.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz#14aeb6353b39ef0732cc7f1b8285294937cf37d5" - integrity sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg== - dependencies: - debug "^4.1.1" - eslint-visitor-keys "^1.1.0" - glob "^7.1.6" - is-glob "^4.0.1" - lodash "^4.17.15" - semver "^7.3.2" - tsutils "^3.17.1" - -"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" - integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== - dependencies: - "@webassemblyjs/helper-numbers" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - -"@webassemblyjs/floating-point-hex-parser@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" - integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== - -"@webassemblyjs/helper-api-error@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" - integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== - -"@webassemblyjs/helper-buffer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" - integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== - -"@webassemblyjs/helper-numbers@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" - integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== - dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.6" - "@webassemblyjs/helper-api-error" "1.11.6" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/helper-wasm-bytecode@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" - integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== - -"@webassemblyjs/helper-wasm-section@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" - integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - -"@webassemblyjs/ieee754@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" - integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" - integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== - dependencies: - "@xtuc/long" "4.2.2" - -"@webassemblyjs/utf8@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" - integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== - -"@webassemblyjs/wasm-edit@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" - integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/helper-wasm-section" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-opt" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - "@webassemblyjs/wast-printer" "1.11.6" - -"@webassemblyjs/wasm-gen@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" - integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/ieee754" "1.11.6" - "@webassemblyjs/leb128" "1.11.6" - "@webassemblyjs/utf8" "1.11.6" - -"@webassemblyjs/wasm-opt@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" - integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - -"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" - integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-api-error" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/ieee754" "1.11.6" - "@webassemblyjs/leb128" "1.11.6" - "@webassemblyjs/utf8" "1.11.6" - -"@webassemblyjs/wast-printer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" - integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@xtuc/long" "4.2.2" - -"@webpack-cli/configtest@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@webpack-cli/configtest/-/configtest-1.2.0.tgz#7b20ce1c12533912c3b217ea68262365fa29a6f5" - integrity sha512-4FB8Tj6xyVkyqjj1OaTqCjXYULB9FMkqQ8yGrZjRDrYh0nOE+7Lhs45WioWQQMV+ceFlE368Ukhe6xdvJM9Egg== - -"@webpack-cli/info@^1.5.0": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@webpack-cli/info/-/info-1.5.0.tgz#6c78c13c5874852d6e2dd17f08a41f3fe4c261b1" - integrity sha512-e8tSXZpw2hPl2uMJY6fsMswaok5FdlGNRTktvFk2sD8RjH0hE2+XistawJx1vmKteh4NmGmNUrp+Tb2w+udPcQ== - dependencies: - envinfo "^7.7.3" - -"@webpack-cli/serve@^1.7.0": - version "1.7.0" - resolved "https://registry.yarnpkg.com/@webpack-cli/serve/-/serve-1.7.0.tgz#e1993689ac42d2b16e9194376cfb6753f6254db1" - integrity sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q== - -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.2": - version "4.2.2" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" - integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== - -accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -acorn-import-assertions@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== - -acorn-jsx@^5.0.0: - version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn@^6.0.7: - version "6.4.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" - integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== - -acorn@^8.7.1, acorn@^8.8.2: - version "8.11.3" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" - integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== - -aggregate-error@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" - integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== - dependencies: - clean-stack "^2.0.0" - indent-string "^4.0.0" - -ajv-formats@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" - integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== - dependencies: - ajv "^8.0.0" - -ajv-keywords@^3.5.2: - version "3.5.2" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" - integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== - -ajv-keywords@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" - integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== - dependencies: - fast-deep-equal "^3.1.3" - -ajv@^6.10.2, ajv@^6.12.4, ajv@^6.12.5, ajv@^6.9.1: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^8.0.0, ajv@^8.9.0: - version "8.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" - integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - -ansi-escapes@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" - integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== - -ansi-html-community@^0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" - integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== - -ansi-regex@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" - integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== - -ansi-regex@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.1.tgz#164daac87ab2d6f6db3a29875e2d1766582dabed" - integrity sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g== - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-styles@^3.2.0, ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -anymatch@^3.0.0, anymatch@~3.1.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -arch@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11" - integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ== - -archive-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/archive-type/-/archive-type-4.0.0.tgz#f92e72233056dfc6969472749c267bdb046b1d70" - integrity sha512-zV4Ky0v1F8dBrdYElwTvQhweQ0P7Kwc1aluqJsYtOBP01jXcWCyW2IEfI1YiqsG+Iy7ZR+o5LF1N+PGECBxHWA== - dependencies: - file-type "^4.2.0" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -array-buffer-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" - integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== - dependencies: - call-bind "^1.0.2" - is-array-buffer "^3.0.1" - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - -array-includes@^3.1.6: - version "3.1.7" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.7.tgz#8cd2e01b26f7a3086cbc87271593fe921c62abda" - integrity sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - is-string "^1.0.7" - -array-union@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" - integrity sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng== - dependencies: - array-uniq "^1.0.1" - -array-union@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" - integrity sha512-MNha4BWQ6JbwhFhj03YK552f7cb3AzoE8SzeljgChvL1dl3IcvggXVz1DilzySZkCja+CXuZbdW7yATchWn8/Q== - -array.prototype.flat@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz#1476217df8cff17d72ee8f3ba06738db5b387d18" - integrity sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - -array.prototype.flatmap@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz#c9a7c6831db8e719d6ce639190146c24bbd3e527" - integrity sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - -array.prototype.tosorted@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/array.prototype.tosorted/-/array.prototype.tosorted-1.1.2.tgz#620eff7442503d66c799d95503f82b475745cefd" - integrity sha512-HuQCHOlk1Weat5jzStICBCd83NxiIMwqDg/dHEsoefabn/hJRj5pVdWcPUSpRrwhwxZOsQassMpgN/xRYFBMIg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - get-intrinsic "^1.2.1" - -arraybuffer.prototype.slice@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz#98bd561953e3e74bb34938e77647179dfe6e9f12" - integrity sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw== - dependencies: - array-buffer-byte-length "^1.0.0" - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - is-array-buffer "^3.0.2" - is-shared-array-buffer "^1.0.2" - -ast-types@0.9.6: - version "0.9.6" - resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.9.6.tgz#102c9e9e9005d3e7e3829bf0c4fa24ee862ee9b9" - integrity sha512-qEdtR2UH78yyHX/AUNfXmJTlM48XoFZKBdwi1nzkI1mJL21cmbu0cvjxjpkXJ5NENMq42H+hNs8VLJcqXLerBQ== - -astral-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" - integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== - -asynciterator.prototype@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz#8c5df0514936cdd133604dfcc9d3fb93f09b2b62" - integrity sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg== - dependencies: - has-symbols "^1.0.3" - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - -available-typed-arrays@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" - integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== - -axios@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.0.tgz#f1e5292f26b2fd5c2e66876adc5b06cdbd7d2102" - integrity sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg== - dependencies: - follow-redirects "^1.15.0" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - -babel-eslint@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232" - integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg== - dependencies: - "@babel/code-frame" "^7.0.0" - "@babel/parser" "^7.7.0" - "@babel/traverse" "^7.7.0" - "@babel/types" "^7.7.0" - eslint-visitor-keys "^1.0.0" - resolve "^1.12.0" - -babel-helper-evaluate-path@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-helper-evaluate-path/-/babel-helper-evaluate-path-0.5.0.tgz#a62fa9c4e64ff7ea5cea9353174ef023a900a67c" - integrity sha512-mUh0UhS607bGh5wUMAQfOpt2JX2ThXMtppHRdRU1kL7ZLRWIXxoV2UIV1r2cAeeNeU1M5SB5/RSUgUxrK8yOkA== - -babel-helper-flip-expressions@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-helper-flip-expressions/-/babel-helper-flip-expressions-0.4.3.tgz#3696736a128ac18bc25254b5f40a22ceb3c1d3fd" - integrity sha512-rSrkRW4YQ2ETCWww9gbsWk4N0x1BOtln349Tk0dlCS90oT68WMLyGR7WvaMp3eAnsVrCqdUtC19lo1avyGPejA== - -babel-helper-is-nodes-equiv@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/babel-helper-is-nodes-equiv/-/babel-helper-is-nodes-equiv-0.0.1.tgz#34e9b300b1479ddd98ec77ea0bbe9342dfe39684" - integrity sha512-ri/nsMFVRqXn7IyT5qW4/hIAGQxuYUFHa3qsxmPtbk6spZQcYlyDogfVpNm2XYOslH/ULS4VEJGUqQX5u7ACQw== - -babel-helper-is-void-0@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-helper-is-void-0/-/babel-helper-is-void-0-0.4.3.tgz#7d9c01b4561e7b95dbda0f6eee48f5b60e67313e" - integrity sha512-07rBV0xPRM3TM5NVJEOQEkECX3qnHDjaIbFvWYPv+T1ajpUiVLiqTfC+MmiZxY5KOL/Ec08vJdJD9kZiP9UkUg== - -babel-helper-mark-eval-scopes@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-helper-mark-eval-scopes/-/babel-helper-mark-eval-scopes-0.4.3.tgz#d244a3bef9844872603ffb46e22ce8acdf551562" - integrity sha512-+d/mXPP33bhgHkdVOiPkmYoeXJ+rXRWi7OdhwpyseIqOS8CmzHQXHUp/+/Qr8baXsT0kjGpMHHofHs6C3cskdA== - -babel-helper-remove-or-void@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-helper-remove-or-void/-/babel-helper-remove-or-void-0.4.3.tgz#a4f03b40077a0ffe88e45d07010dee241ff5ae60" - integrity sha512-eYNceYtcGKpifHDir62gHJadVXdg9fAhuZEXiRQnJJ4Yi4oUTpqpNY//1pM4nVyjjDMPYaC2xSf0I+9IqVzwdA== - -babel-helper-to-multiple-sequence-expressions@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-helper-to-multiple-sequence-expressions/-/babel-helper-to-multiple-sequence-expressions-0.5.0.tgz#a3f924e3561882d42fcf48907aa98f7979a4588d" - integrity sha512-m2CvfDW4+1qfDdsrtf4dwOslQC3yhbgyBFptncp4wvtdrDHqueW7slsYv4gArie056phvQFhT2nRcGS4bnm6mA== - -babel-loader@^8.0.6: - version "8.3.0" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.3.0.tgz#124936e841ba4fe8176786d6ff28add1f134d6a8" - integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== - dependencies: - find-cache-dir "^3.3.1" - loader-utils "^2.0.0" - make-dir "^3.1.0" - schema-utils "^2.6.5" - -babel-plugin-minify-builtins@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-builtins/-/babel-plugin-minify-builtins-0.5.0.tgz#31eb82ed1a0d0efdc31312f93b6e4741ce82c36b" - integrity sha512-wpqbN7Ov5hsNwGdzuzvFcjgRlzbIeVv1gMIlICbPj0xkexnfoIDe7q+AZHMkQmAE/F9R5jkrB6TLfTegImlXag== - -babel-plugin-minify-constant-folding@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-constant-folding/-/babel-plugin-minify-constant-folding-0.5.0.tgz#f84bc8dbf6a561e5e350ff95ae216b0ad5515b6e" - integrity sha512-Vj97CTn/lE9hR1D+jKUeHfNy+m1baNiJ1wJvoGyOBUx7F7kJqDZxr9nCHjO/Ad+irbR3HzR6jABpSSA29QsrXQ== - dependencies: - babel-helper-evaluate-path "^0.5.0" - -babel-plugin-minify-dead-code-elimination@^0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-dead-code-elimination/-/babel-plugin-minify-dead-code-elimination-0.5.2.tgz#f386ceec77a80cc4e76022a04c21b7d68e0aa5eb" - integrity sha512-krq9Lwi0QIzyAlcNBXTL4usqUvevB4BzktdEsb8srcXC1AaYqRJiAQw6vdKdJSaXbz6snBvziGr6ch/aoRCfpA== - dependencies: - babel-helper-evaluate-path "^0.5.0" - babel-helper-mark-eval-scopes "^0.4.3" - babel-helper-remove-or-void "^0.4.3" - lodash "^4.17.11" - -babel-plugin-minify-flip-comparisons@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-flip-comparisons/-/babel-plugin-minify-flip-comparisons-0.4.3.tgz#00ca870cb8f13b45c038b3c1ebc0f227293c965a" - integrity sha512-8hNwgLVeJzpeLVOVArag2DfTkbKodzOHU7+gAZ8mGBFGPQHK6uXVpg3jh5I/F6gfi5Q5usWU2OKcstn1YbAV7A== - dependencies: - babel-helper-is-void-0 "^0.4.3" - -babel-plugin-minify-guarded-expressions@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-guarded-expressions/-/babel-plugin-minify-guarded-expressions-0.4.4.tgz#818960f64cc08aee9d6c75bec6da974c4d621135" - integrity sha512-RMv0tM72YuPPfLT9QLr3ix9nwUIq+sHT6z8Iu3sLbqldzC1Dls8DPCywzUIzkTx9Zh1hWX4q/m9BPoPed9GOfA== - dependencies: - babel-helper-evaluate-path "^0.5.0" - babel-helper-flip-expressions "^0.4.3" - -babel-plugin-minify-infinity@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-infinity/-/babel-plugin-minify-infinity-0.4.3.tgz#dfb876a1b08a06576384ef3f92e653ba607b39ca" - integrity sha512-X0ictxCk8y+NvIf+bZ1HJPbVZKMlPku3lgYxPmIp62Dp8wdtbMLSekczty3MzvUOlrk5xzWYpBpQprXUjDRyMA== - -babel-plugin-minify-mangle-names@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-mangle-names/-/babel-plugin-minify-mangle-names-0.5.1.tgz#3dfba7f4e649ff37a767542ea0d1093bee3bb155" - integrity sha512-8KMichAOae2FHlipjNDTo2wz97MdEb2Q0jrn4NIRXzHH7SJ3c5TaNNBkeTHbk9WUsMnqpNUx949ugM9NFWewzw== - dependencies: - babel-helper-mark-eval-scopes "^0.4.3" - -babel-plugin-minify-numeric-literals@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-numeric-literals/-/babel-plugin-minify-numeric-literals-0.4.3.tgz#8e4fd561c79f7801286ff60e8c5fd9deee93c0bc" - integrity sha512-5D54hvs9YVuCknfWywq0eaYDt7qYxlNwCqW9Ipm/kYeS9gYhJd0Rr/Pm2WhHKJ8DC6aIlDdqSBODSthabLSX3A== - -babel-plugin-minify-replace@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-replace/-/babel-plugin-minify-replace-0.5.0.tgz#d3e2c9946c9096c070efc96761ce288ec5c3f71c" - integrity sha512-aXZiaqWDNUbyNNNpWs/8NyST+oU7QTpK7J9zFEFSA0eOmtUNMU3fczlTTTlnCxHmq/jYNFEmkkSG3DDBtW3Y4Q== - -babel-plugin-minify-simplify@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-simplify/-/babel-plugin-minify-simplify-0.5.1.tgz#f21613c8b95af3450a2ca71502fdbd91793c8d6a" - integrity sha512-OSYDSnoCxP2cYDMk9gxNAed6uJDiDz65zgL6h8d3tm8qXIagWGMLWhqysT6DY3Vs7Fgq7YUDcjOomhVUb+xX6A== - dependencies: - babel-helper-evaluate-path "^0.5.0" - babel-helper-flip-expressions "^0.4.3" - babel-helper-is-nodes-equiv "^0.0.1" - babel-helper-to-multiple-sequence-expressions "^0.5.0" - -babel-plugin-minify-type-constructors@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-minify-type-constructors/-/babel-plugin-minify-type-constructors-0.4.3.tgz#1bc6f15b87f7ab1085d42b330b717657a2156500" - integrity sha512-4ADB0irJ/6BeXWHubjCJmrPbzhxDgjphBMjIjxCc25n4NGJ00NsYqwYt+F/OvE9RXx8KaSW7cJvp+iZX436tnQ== - dependencies: - babel-helper-is-void-0 "^0.4.3" - -babel-plugin-module-resolver@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/babel-plugin-module-resolver/-/babel-plugin-module-resolver-4.1.0.tgz#22a4f32f7441727ec1fbf4967b863e1e3e9f33e2" - integrity sha512-MlX10UDheRr3lb3P0WcaIdtCSRlxdQsB1sBqL7W0raF070bGl1HQQq5K3T2vf2XAYie+ww+5AKC/WrkjRO2knA== - dependencies: - find-babel-config "^1.2.0" - glob "^7.1.6" - pkg-up "^3.1.0" - reselect "^4.0.0" - resolve "^1.13.1" - -babel-plugin-polyfill-corejs2@^0.4.7: - version "0.4.8" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.8.tgz#dbcc3c8ca758a290d47c3c6a490d59429b0d2269" - integrity sha512-OtIuQfafSzpo/LhnJaykc0R/MMnuLSSVjVYy9mHArIZ9qTCSZ6TpWCuEKZYVoN//t8HqBNScHrOtCrIK5IaGLg== - dependencies: - "@babel/compat-data" "^7.22.6" - "@babel/helper-define-polyfill-provider" "^0.5.0" - semver "^6.3.1" - -babel-plugin-polyfill-corejs3@^0.8.7: - version "0.8.7" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.7.tgz#941855aa7fdaac06ed24c730a93450d2b2b76d04" - integrity sha512-KyDvZYxAzkC0Aj2dAPyDzi2Ym15e5JKZSK+maI7NAwSqofvuFglbSsxE7wUOvTg9oFVnHMzVzBKcqEb4PJgtOA== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.4" - core-js-compat "^3.33.1" - -babel-plugin-polyfill-regenerator@^0.5.4: - version "0.5.5" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.5.tgz#8b0c8fc6434239e5d7b8a9d1f832bb2b0310f06a" - integrity sha512-OJGYZlhLqBh2DDHeqAxWB1XIvr49CxiJ2gIt61/PU55CQK4Z58OzMqjDe1zwQdQk+rBYsRc+1rJmdajM3gimHg== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.5.0" - -babel-plugin-transform-inline-consecutive-adds@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-inline-consecutive-adds/-/babel-plugin-transform-inline-consecutive-adds-0.4.3.tgz#323d47a3ea63a83a7ac3c811ae8e6941faf2b0d1" - integrity sha512-8D104wbzzI5RlxeVPYeQb9QsUyepiH1rAO5hpPpQ6NPRgQLpIVwkS/Nbx944pm4K8Z+rx7CgjPsFACz/VCBN0Q== - -babel-plugin-transform-member-expression-literals@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-member-expression-literals/-/babel-plugin-transform-member-expression-literals-6.9.4.tgz#37039c9a0c3313a39495faac2ff3a6b5b9d038bf" - integrity sha512-Xq9/Rarpj+bjOZSl1nBbZYETsNEDDJSrb6Plb1sS3/36FukWFLLRysgecva5KZECjUJTrJoQqjJgtWToaflk5Q== - -babel-plugin-transform-merge-sibling-variables@^6.9.5: - version "6.9.5" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-merge-sibling-variables/-/babel-plugin-transform-merge-sibling-variables-6.9.5.tgz#0b2faa9e027ef47d4e7502f77cd1a7f3a6dfbc7b" - integrity sha512-xj/KrWi6/uP+DrD844h66Qh2cZN++iugEIgH8QcIxhmZZPNP6VpOE9b4gP2FFW39xDAY43kCmYMM6U0QNKN8fw== - -babel-plugin-transform-minify-booleans@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-minify-booleans/-/babel-plugin-transform-minify-booleans-6.9.4.tgz#acbb3e56a3555dd23928e4b582d285162dd2b198" - integrity sha512-9pW9ePng6DZpzGPalcrULuhSCcauGAbn8AeU3bE34HcDkGm8Ldt0ysjGkyb64f0K3T5ilV4mriayOVv5fg0ASA== - -babel-plugin-transform-property-literals@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-property-literals/-/babel-plugin-transform-property-literals-6.9.4.tgz#98c1d21e255736573f93ece54459f6ce24985d39" - integrity sha512-Pf8JHTjTPxecqVyL6KSwD/hxGpoTZjiEgV7nCx0KFQsJYM0nuuoCajbg09KRmZWeZbJ5NGTySABYv8b/hY1eEA== - dependencies: - esutils "^2.0.2" - -babel-plugin-transform-regexp-constructors@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-regexp-constructors/-/babel-plugin-transform-regexp-constructors-0.4.3.tgz#58b7775b63afcf33328fae9a5f88fbd4fb0b4965" - integrity sha512-JjymDyEyRNhAoNFp09y/xGwYVYzT2nWTGrBrWaL6eCg2m+B24qH2jR0AA8V8GzKJTgC8NW6joJmc6nabvWBD/g== - -babel-plugin-transform-remove-console@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-remove-console/-/babel-plugin-transform-remove-console-6.9.4.tgz#b980360c067384e24b357a588d807d3c83527780" - integrity sha512-88blrUrMX3SPiGkT1GnvVY8E/7A+k6oj3MNvUtTIxJflFzXTw1bHkuJ/y039ouhFMp2prRn5cQGzokViYi1dsg== - -babel-plugin-transform-remove-debugger@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-remove-debugger/-/babel-plugin-transform-remove-debugger-6.9.4.tgz#42b727631c97978e1eb2d199a7aec84a18339ef2" - integrity sha512-Kd+eTBYlXfwoFzisburVwrngsrz4xh9I0ppoJnU/qlLysxVBRgI4Pj+dk3X8F5tDiehp3hhP8oarRMT9v2Z3lw== - -babel-plugin-transform-remove-undefined@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-remove-undefined/-/babel-plugin-transform-remove-undefined-0.5.0.tgz#80208b31225766c630c97fa2d288952056ea22dd" - integrity sha512-+M7fJYFaEE/M9CXa0/IRkDbiV3wRELzA1kKQFCJ4ifhrzLKn/9VCCgj9OFmYWwBd8IB48YdgPkHYtbYq+4vtHQ== - dependencies: - babel-helper-evaluate-path "^0.5.0" - -babel-plugin-transform-simplify-comparison-operators@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-simplify-comparison-operators/-/babel-plugin-transform-simplify-comparison-operators-6.9.4.tgz#f62afe096cab0e1f68a2d753fdf283888471ceb9" - integrity sha512-GLInxhGAQWJ9YIdjwF6dAFlmh4U+kN8pL6Big7nkDzHoZcaDQOtBm28atEhQJq6m9GpAovbiGEShKqXv4BSp0A== - -babel-plugin-transform-undefined-to-void@^6.9.4: - version "6.9.4" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-undefined-to-void/-/babel-plugin-transform-undefined-to-void-6.9.4.tgz#be241ca81404030678b748717322b89d0c8fe280" - integrity sha512-D2UbwxawEY1xVc9svYAUZQM2xarwSNXue2qDIx6CeV2EuMGaes/0su78zlIDIAgE7BvnMw4UpmSo9fDy+znghg== - -babel-preset-minify@^0.5.1: - version "0.5.2" - resolved "https://registry.yarnpkg.com/babel-preset-minify/-/babel-preset-minify-0.5.2.tgz#4d5be8b1c21d126ac403a3fd002d8b5fb7bb3c34" - integrity sha512-v4GL+kk0TfovbRIKZnC3HPbu2cAGmPAby7BsOmuPdMJfHV+4FVdsGXTH/OOGQRKYdjemBuL1+MsE6mobobhe9w== - dependencies: - babel-plugin-minify-builtins "^0.5.0" - babel-plugin-minify-constant-folding "^0.5.0" - babel-plugin-minify-dead-code-elimination "^0.5.2" - babel-plugin-minify-flip-comparisons "^0.4.3" - babel-plugin-minify-guarded-expressions "^0.4.4" - babel-plugin-minify-infinity "^0.4.3" - babel-plugin-minify-mangle-names "^0.5.1" - babel-plugin-minify-numeric-literals "^0.4.3" - babel-plugin-minify-replace "^0.5.0" - babel-plugin-minify-simplify "^0.5.1" - babel-plugin-minify-type-constructors "^0.4.3" - babel-plugin-transform-inline-consecutive-adds "^0.4.3" - babel-plugin-transform-member-expression-literals "^6.9.4" - babel-plugin-transform-merge-sibling-variables "^6.9.5" - babel-plugin-transform-minify-booleans "^6.9.4" - babel-plugin-transform-property-literals "^6.9.4" - babel-plugin-transform-regexp-constructors "^0.4.3" - babel-plugin-transform-remove-console "^6.9.4" - babel-plugin-transform-remove-debugger "^6.9.4" - babel-plugin-transform-remove-undefined "^0.5.0" - babel-plugin-transform-simplify-comparison-operators "^6.9.4" - babel-plugin-transform-undefined-to-void "^6.9.4" - lodash "^4.17.11" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - -batch@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - integrity sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw== - -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - -bin-build@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bin-build/-/bin-build-3.0.0.tgz#c5780a25a8a9f966d8244217e6c1f5082a143861" - integrity sha512-jcUOof71/TNAI2uM5uoUaDq2ePcVBQ3R/qhxAz1rX7UfvduAL/RXD3jXzvn8cVcDJdGVkiR1shal3OH0ImpuhA== - dependencies: - decompress "^4.0.0" - download "^6.2.2" - execa "^0.7.0" - p-map-series "^1.0.0" - tempfile "^2.0.0" - -bin-check@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bin-check/-/bin-check-4.1.0.tgz#fc495970bdc88bb1d5a35fc17e65c4a149fc4a49" - integrity sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA== - dependencies: - execa "^0.7.0" - executable "^4.1.0" - -bin-version-check@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/bin-version-check/-/bin-version-check-4.0.0.tgz#7d819c62496991f80d893e6e02a3032361608f71" - integrity sha512-sR631OrhC+1f8Cvs8WyVWOA33Y8tgwjETNPyyD/myRBXLkfS/vl74FmH/lFcRl9KY3zwGh7jFhvyk9vV3/3ilQ== - dependencies: - bin-version "^3.0.0" - semver "^5.6.0" - semver-truncate "^1.1.2" - -bin-version@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/bin-version/-/bin-version-3.1.0.tgz#5b09eb280752b1bd28f0c9db3f96f2f43b6c0839" - integrity sha512-Mkfm4iE1VFt4xd4vH+gx+0/71esbfus2LsnCGe8Pi4mndSPyT+NGES/Eg99jx8/lUGWfu3z2yuB/bt5UB+iVbQ== - dependencies: - execa "^1.0.0" - find-versions "^3.0.0" - -bin-wrapper@^4.0.0, bin-wrapper@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bin-wrapper/-/bin-wrapper-4.1.0.tgz#99348f2cf85031e3ef7efce7e5300aeaae960605" - integrity sha512-hfRmo7hWIXPkbpi0ZltboCMVrU+0ClXR/JgbCKKjlDjQf6igXa7OwdqNcFWQZPZTgiY7ZpzE3+LjjkLiTN2T7Q== - dependencies: - bin-check "^4.1.0" - bin-version-check "^4.0.0" - download "^7.1.0" - import-lazy "^3.1.0" - os-filter-obj "^2.0.0" - pify "^4.0.1" - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -bl@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.3.tgz#1e8dd80142eac80d7158c9dccc047fb620e035e7" - integrity sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww== - dependencies: - readable-stream "^2.3.5" - safe-buffer "^5.1.1" - -body-parser@1.20.1: - version "1.20.1" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" - integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.11.0" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - -bonjour-service@^1.0.11: - version "1.2.1" - resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.2.1.tgz#eb41b3085183df3321da1264719fbada12478d02" - integrity sha512-oSzCS2zV14bh2kji6vNe7vrpJYCHGvcZnlffFQ1MEoX/WOeQ/teD8SYWKR942OI3INjq8OMNJlbPK5LLLUxFDw== - dependencies: - fast-deep-equal "^3.1.3" - multicast-dns "^7.2.5" - -boolbase@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" - integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browserslist@^4.14.5, browserslist@^4.22.2: - version "4.22.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" - integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== - dependencies: - caniuse-lite "^1.0.30001565" - electron-to-chromium "^1.4.601" - node-releases "^2.0.14" - update-browserslist-db "^1.0.13" - -buffer-alloc-unsafe@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" - integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== - -buffer-alloc@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" - integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== - dependencies: - buffer-alloc-unsafe "^1.1.0" - buffer-fill "^1.0.0" - -buffer-crc32@~0.2.3: - version "0.2.13" - resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" - integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== - -buffer-fill@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" - integrity sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ== - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -buffer@^5.2.1: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - -bytes@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw== - -bytes@3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -cacache@^15.0.5: - version "15.3.0" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.3.0.tgz#dc85380fb2f556fe3dda4c719bfa0ec875a7f1eb" - integrity sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ== - dependencies: - "@npmcli/fs" "^1.0.0" - "@npmcli/move-file" "^1.0.1" - chownr "^2.0.0" - fs-minipass "^2.0.0" - glob "^7.1.4" - infer-owner "^1.0.4" - lru-cache "^6.0.0" - minipass "^3.1.1" - minipass-collect "^1.0.2" - minipass-flush "^1.0.5" - minipass-pipeline "^1.2.2" - mkdirp "^1.0.3" - p-map "^4.0.0" - promise-inflight "^1.0.1" - rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.0.2" - unique-filename "^1.1.1" - -cacheable-request@^2.1.1: - version "2.1.4" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d" - integrity sha512-vag0O2LKZ/najSoUwDbVlnlCFvhBE/7mGTY2B5FgCBDcRD+oVV1HYTOwM6JZfMg/hIcM6IwnTZ1uQQL5/X3xIQ== - dependencies: - clone-response "1.0.2" - get-stream "3.0.0" - http-cache-semantics "3.8.1" - keyv "3.0.0" - lowercase-keys "1.0.0" - normalize-url "2.0.1" - responselike "1.0.2" - -call-bind@^1.0.0, call-bind@^1.0.2, call-bind@^1.0.4, call-bind@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" - integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== - dependencies: - function-bind "^1.1.2" - get-intrinsic "^1.2.1" - set-function-length "^1.1.1" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camel-case@3.0.x: - version "3.0.0" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-3.0.0.tgz#ca3c3688a4e9cf3a4cda777dc4dcbc713249cf73" - integrity sha512-+MbKztAYHXPr1jNTSKQF52VpcFjwY5RkR7fxksV8Doo4KAYc5Fl4UJRgthBbTmEx8C54DqahhbLJkDwjI3PI/w== - dependencies: - no-case "^2.2.0" - upper-case "^1.1.1" - -camel-case@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" - integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== - dependencies: - pascal-case "^3.1.2" - tslib "^2.0.3" - -caniuse-lite@^1.0.30001565: - version "1.0.30001579" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001579.tgz#45c065216110f46d6274311a4b3fcf6278e0852a" - integrity sha512-u5AUVkixruKHJjw/pj9wISlcMpgFWzSrczLZbrqBSxukQixmg0SJ5sZTpvaFvxU0HoQKd4yoyAogyrAz9pzJnA== - -caw@^2.0.0, caw@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/caw/-/caw-2.0.1.tgz#6c3ca071fc194720883c2dc5da9b074bfc7e9e95" - integrity sha512-Cg8/ZSBEa8ZVY9HspcGUYaK63d/bN7rqS3CYCzEGUxuYv6UlmcjzDUz2fCFFHyTvUW5Pk0I+3hkA3iXlIj6guA== - dependencies: - get-proxy "^2.0.0" - isurl "^1.0.0-alpha5" - tunnel-agent "^0.6.0" - url-to-options "^1.0.1" - -chalk@^2.1.0, chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^4.1.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chardet@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" - integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== - -chokidar@^3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -chownr@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" - integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== - -chrome-trace-event@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" - integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== - -clean-css@4.2.x: - version "4.2.4" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.4.tgz#733bf46eba4e607c6891ea57c24a989356831178" - integrity sha512-EJUDT7nDVFDvaQgAo2G/PJvxmp1o/c6iXLbswsBbUFXi1Nr+AjA2cKmfbKDMjMvzEe75g3P6JkaDDAKk96A85A== - dependencies: - source-map "~0.6.0" - -clean-css@^5.2.2: - version "5.3.3" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.3.tgz#b330653cd3bd6b75009cc25c714cae7b93351ccd" - integrity sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg== - dependencies: - source-map "~0.6.0" - -clean-stack@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" - integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== - -clean-webpack-plugin@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/clean-webpack-plugin/-/clean-webpack-plugin-3.0.0.tgz#a99d8ec34c1c628a4541567aa7b457446460c62b" - integrity sha512-MciirUH5r+cYLGCOL5JX/ZLzOZbVr1ot3Fw+KcvbhUb6PM+yycqd9ZhIlcigQ5gl+XhppNmw3bEFuaaMNyLj3A== - dependencies: - "@types/webpack" "^4.4.31" - del "^4.1.1" - -cli-cursor@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" - integrity sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw== - dependencies: - restore-cursor "^2.0.0" - -cli-width@^2.0.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" - integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw== - -clipboard-copy@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/clipboard-copy/-/clipboard-copy-3.2.0.tgz#3c5b8651d3512dcfad295d77a9eb09e7fac8d5fb" - integrity sha512-vooFaGFL6ulEP1liiaWFBmmfuPm3cY3y7T9eB83ZTnYc/oFeAKsq3NcDrOkBC8XaauEE8zHQwI7k0+JSYiVQSQ== - -clone-deep@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" - integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== - dependencies: - is-plain-object "^2.0.4" - kind-of "^6.0.2" - shallow-clone "^3.0.0" - -clone-response@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== - dependencies: - mimic-response "^1.0.0" - -clone@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" - integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -colorette@^2.0.10, colorette@^2.0.14: - version "2.0.20" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" - integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== - -combined-stream@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -commander@2.17.x: - version "2.17.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.17.1.tgz#bd77ab7de6de94205ceacc72f1716d29f20a77bf" - integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== - -commander@^2.20.0, commander@^2.8.1: - version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - -commander@^7.0.0, commander@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - -commander@^8.3.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" - integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== - -commander@~2.19.0: - version "2.19.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" - integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== - -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== - -compressible@~2.0.16: - version "2.0.18" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" - integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== - dependencies: - mime-db ">= 1.43.0 < 2" - -compression-webpack-plugin@^7.1.2: - version "7.1.2" - resolved "https://registry.yarnpkg.com/compression-webpack-plugin/-/compression-webpack-plugin-7.1.2.tgz#f9a1ba84d4879693e29726f6884b382940876597" - integrity sha512-9DKNW6ILLjx+bNBoviHDgLx6swBhWWH9ApClC9sTH2NoFfQM47BapQfovCm9zjD9v1uZwInF5a925FB9ErGQeQ== - dependencies: - schema-utils "^3.0.0" - serialize-javascript "^5.0.1" - -compression@^1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -config-chain@^1.1.11: - version "1.1.13" - resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.13.tgz#fad0795aa6a6cdaff9ed1b68e9dff94372c232f4" - integrity sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ== - dependencies: - ini "^1.3.4" - proto-list "~1.2.1" - -connect-history-api-fallback@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" - integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== - -content-disposition@0.5.4, content-disposition@^0.5.2: - version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== - dependencies: - safe-buffer "5.2.1" - -content-type@~1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" - integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== - -convert-source-map@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" - integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== - -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== - -cookie@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== - -copy-anything@^2.0.1: - version "2.0.6" - resolved "https://registry.yarnpkg.com/copy-anything/-/copy-anything-2.0.6.tgz#092454ea9584a7b7ad5573062b2a87f5900fc480" - integrity sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw== - dependencies: - is-what "^3.14.1" - -copy-webpack-plugin@^6.2.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-6.4.1.tgz#138cd9b436dbca0a6d071720d5414848992ec47e" - integrity sha512-MXyPCjdPVx5iiWyl40Va3JGh27bKzOTNY3NjUTrosD2q7dR/cLD0013uqJ3BpFbUjyONINjb6qI7nDIJujrMbA== - dependencies: - cacache "^15.0.5" - fast-glob "^3.2.4" - find-cache-dir "^3.3.1" - glob-parent "^5.1.1" - globby "^11.0.1" - loader-utils "^2.0.0" - normalize-path "^3.0.0" - p-limit "^3.0.2" - schema-utils "^3.0.0" - serialize-javascript "^5.0.1" - webpack-sources "^1.4.3" - -core-js-compat@^3.31.0, core-js-compat@^3.33.1: - version "3.35.0" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.35.0.tgz#c149a3d1ab51e743bc1da61e39cb51f461a41873" - integrity sha512-5blwFAddknKeNgsjBzilkdQ0+YK8L1PfqPYq40NOYMYFSS38qj+hpTcLLWwpIwA2A5bje/x5jmVn2tzUMg9IVw== - dependencies: - browserslist "^4.22.2" - -core-util-is@~1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -cross-spawn@^5.0.1: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - integrity sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A== - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - -cross-spawn@^6.0.0, cross-spawn@^6.0.5: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== - dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" - -cross-spawn@^7.0.0, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -css-loader@^5.2.4: - version "5.2.7" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-5.2.7.tgz#9b9f111edf6fb2be5dc62525644cbc9c232064ae" - integrity sha512-Q7mOvpBNBG7YrVGMxRxcBJZFL75o+cH2abNASdibkj/fffYD8qWbInZrD0S9ccI6vZclF3DsHE7njGlLtaHbhg== - dependencies: - icss-utils "^5.1.0" - loader-utils "^2.0.0" - postcss "^8.2.15" - postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.0" - postcss-modules-scope "^3.0.0" - postcss-modules-values "^4.0.0" - postcss-value-parser "^4.1.0" - schema-utils "^3.0.0" - semver "^7.3.5" - -css-select@^4.1.3: - version "4.3.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" - integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== - dependencies: - boolbase "^1.0.0" - css-what "^6.0.1" - domhandler "^4.3.1" - domutils "^2.8.0" - nth-check "^2.0.1" - -css-tree@^1.1.2, css-tree@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" - integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== - dependencies: - mdn-data "2.0.14" - source-map "^0.6.1" - -css-what@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" - integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== - -cssesc@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" - integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== - -csso@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" - integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== - dependencies: - css-tree "^1.1.2" - -cwebp-bin@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/cwebp-bin/-/cwebp-bin-7.0.1.tgz#cb1303bf43f645ba5b2ece342773c4a93574d4f4" - integrity sha512-Ko5ADY74/dbfd8xG0+f+MUP9UKjCe1TG4ehpW0E5y4YlPdwDJlGrSzSR4/Yonxpm9QmZE1RratkIxFlKeyo3FA== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.1" - -debug@2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -decode-uri-component@^0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" - integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== - -decompress-response@^3.2.0, decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" - integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== - dependencies: - mimic-response "^1.0.0" - -decompress-tar@^4.0.0, decompress-tar@^4.1.0, decompress-tar@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/decompress-tar/-/decompress-tar-4.1.1.tgz#718cbd3fcb16209716e70a26b84e7ba4592e5af1" - integrity sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ== - dependencies: - file-type "^5.2.0" - is-stream "^1.1.0" - tar-stream "^1.5.2" - -decompress-tarbz2@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz#3082a5b880ea4043816349f378b56c516be1a39b" - integrity sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A== - dependencies: - decompress-tar "^4.1.0" - file-type "^6.1.0" - is-stream "^1.1.0" - seek-bzip "^1.0.5" - unbzip2-stream "^1.0.9" - -decompress-targz@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/decompress-targz/-/decompress-targz-4.1.1.tgz#c09bc35c4d11f3de09f2d2da53e9de23e7ce1eee" - integrity sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w== - dependencies: - decompress-tar "^4.1.1" - file-type "^5.2.0" - is-stream "^1.1.0" - -decompress-unzip@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/decompress-unzip/-/decompress-unzip-4.0.1.tgz#deaaccdfd14aeaf85578f733ae8210f9b4848f69" - integrity sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw== - dependencies: - file-type "^3.8.0" - get-stream "^2.2.0" - pify "^2.3.0" - yauzl "^2.4.2" - -decompress@^4.0.0, decompress@^4.2.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/decompress/-/decompress-4.2.1.tgz#007f55cc6a62c055afa37c07eb6a4ee1b773f118" - integrity sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ== - dependencies: - decompress-tar "^4.0.0" - decompress-tarbz2 "^4.0.0" - decompress-targz "^4.0.0" - decompress-unzip "^4.0.1" - graceful-fs "^4.1.10" - make-dir "^1.0.0" - pify "^2.3.0" - strip-dirs "^2.0.0" - -deep-is@~0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -default-gateway@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" - integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== - dependencies: - execa "^5.0.0" - -define-data-property@^1.0.1, define-data-property@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" - integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== - dependencies: - get-intrinsic "^1.2.1" - gopd "^1.0.1" - has-property-descriptors "^1.0.0" - -define-lazy-prop@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" - integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== - -define-properties@^1.1.3, define-properties@^1.2.0, define-properties@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" - integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== - dependencies: - define-data-property "^1.0.1" - has-property-descriptors "^1.0.0" - object-keys "^1.1.1" - -del@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" - integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== - dependencies: - "@types/glob" "^7.1.1" - globby "^6.1.0" - is-path-cwd "^2.0.0" - is-path-in-cwd "^2.0.0" - p-map "^2.0.0" - pify "^4.0.1" - rimraf "^2.6.3" - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - -depd@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== - -destroy@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - -detect-node@^2.0.4: - version "2.1.0" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" - integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -dns-packet@^5.2.2: - version "5.6.1" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.1.tgz#ae888ad425a9d1478a0674256ab866de1012cf2f" - integrity sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw== - dependencies: - "@leichtgewicht/ip-codec" "^2.0.1" - -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -dom-converter@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - -dom-serializer@^1.0.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" - integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.2.0" - entities "^2.0.0" - -domelementtype@^2.0.1, domelementtype@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" - integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== - -domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" - integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== - dependencies: - domelementtype "^2.2.0" - -domutils@^2.5.2, domutils@^2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" - integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== - dependencies: - dom-serializer "^1.0.1" - domelementtype "^2.2.0" - domhandler "^4.2.0" - -dot-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" - integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -dotenv@^16.0.2: - version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" - integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== - -download@^6.2.2: - version "6.2.5" - resolved "https://registry.yarnpkg.com/download/-/download-6.2.5.tgz#acd6a542e4cd0bb42ca70cfc98c9e43b07039714" - integrity sha512-DpO9K1sXAST8Cpzb7kmEhogJxymyVUd5qz/vCOSyvwtp2Klj2XcDt5YUuasgxka44SxF0q5RriKIwJmQHG2AuA== - dependencies: - caw "^2.0.0" - content-disposition "^0.5.2" - decompress "^4.0.0" - ext-name "^5.0.0" - file-type "5.2.0" - filenamify "^2.0.0" - get-stream "^3.0.0" - got "^7.0.0" - make-dir "^1.0.0" - p-event "^1.0.0" - pify "^3.0.0" - -download@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/download/-/download-7.1.0.tgz#9059aa9d70b503ee76a132897be6dec8e5587233" - integrity sha512-xqnBTVd/E+GxJVrX5/eUJiLYjCGPwMpdL+jGhGU57BvtcA7wwhtHVbXBeUk51kOpW3S7Jn3BQbN9Q1R1Km2qDQ== - dependencies: - archive-type "^4.0.0" - caw "^2.0.1" - content-disposition "^0.5.2" - decompress "^4.2.0" - ext-name "^5.0.0" - file-type "^8.1.0" - filenamify "^2.0.0" - get-stream "^3.0.0" - got "^8.3.1" - make-dir "^1.2.0" - p-event "^2.1.0" - pify "^3.0.0" - -duplexer3@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e" - integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== - -electron-to-chromium@^1.4.601: - version "1.4.639" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.639.tgz#c6f9cc685f9efb2980d2cfc95a27f8142c9adf28" - integrity sha512-CkKf3ZUVZchr+zDpAlNLEEy2NJJ9T64ULWaDgy3THXXlPVPkLu3VOs9Bac44nebVtdwl2geSj6AxTtGDOxoXhg== - -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== - -emojis-list@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" - integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -end-of-stream@^1.0.0, end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -enhanced-resolve@^5.0.0, enhanced-resolve@^5.15.0: - version "5.15.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" - integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== - dependencies: - graceful-fs "^4.2.4" - tapable "^2.2.0" - -entities@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" - integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== - -envinfo@^7.7.3: - version "7.11.0" - resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.11.0.tgz#c3793f44284a55ff8c82faf1ffd91bc6478ea01f" - integrity sha512-G9/6xF1FPbIw0TtalAMaVPpiq2aDEuKLXM314jPVAO9r2fo2a4BLqMNkmRS7O/xPPZ+COAhGIz3ETvHEV3eUcg== - -errno@^0.1.1: - version "0.1.8" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" - integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== - dependencies: - prr "~1.0.1" - -es-abstract@^1.22.1: - version "1.22.3" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.3.tgz#48e79f5573198de6dee3589195727f4f74bc4f32" - integrity sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA== - dependencies: - array-buffer-byte-length "^1.0.0" - arraybuffer.prototype.slice "^1.0.2" - available-typed-arrays "^1.0.5" - call-bind "^1.0.5" - es-set-tostringtag "^2.0.1" - es-to-primitive "^1.2.1" - function.prototype.name "^1.1.6" - get-intrinsic "^1.2.2" - get-symbol-description "^1.0.0" - globalthis "^1.0.3" - gopd "^1.0.1" - has-property-descriptors "^1.0.0" - has-proto "^1.0.1" - has-symbols "^1.0.3" - hasown "^2.0.0" - internal-slot "^1.0.5" - is-array-buffer "^3.0.2" - is-callable "^1.2.7" - is-negative-zero "^2.0.2" - is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" - is-string "^1.0.7" - is-typed-array "^1.1.12" - is-weakref "^1.0.2" - object-inspect "^1.13.1" - object-keys "^1.1.1" - object.assign "^4.1.4" - regexp.prototype.flags "^1.5.1" - safe-array-concat "^1.0.1" - safe-regex-test "^1.0.0" - string.prototype.trim "^1.2.8" - string.prototype.trimend "^1.0.7" - string.prototype.trimstart "^1.0.7" - typed-array-buffer "^1.0.0" - typed-array-byte-length "^1.0.0" - typed-array-byte-offset "^1.0.0" - typed-array-length "^1.0.4" - unbox-primitive "^1.0.2" - which-typed-array "^1.1.13" - -es-iterator-helpers@^1.0.12: - version "1.0.15" - resolved "https://registry.yarnpkg.com/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz#bd81d275ac766431d19305923707c3efd9f1ae40" - integrity sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g== - dependencies: - asynciterator.prototype "^1.0.0" - call-bind "^1.0.2" - define-properties "^1.2.1" - es-abstract "^1.22.1" - es-set-tostringtag "^2.0.1" - function-bind "^1.1.1" - get-intrinsic "^1.2.1" - globalthis "^1.0.3" - has-property-descriptors "^1.0.0" - has-proto "^1.0.1" - has-symbols "^1.0.3" - internal-slot "^1.0.5" - iterator.prototype "^1.1.2" - safe-array-concat "^1.0.1" - -es-module-lexer@^1.2.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.4.1.tgz#41ea21b43908fe6a287ffcbe4300f790555331f5" - integrity sha512-cXLGjP0c4T3flZJKQSuziYoq7MlT+rnvfZjfp7h+I7K9BNX54kP9nyWvdbwjQ4u1iWbOL4u96fgeZLToQlZC7w== - -es-set-tostringtag@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz#11f7cc9f63376930a5f20be4915834f4bc74f9c9" - integrity sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q== - dependencies: - get-intrinsic "^1.2.2" - has-tostringtag "^1.0.0" - hasown "^2.0.0" - -es-shim-unscopables@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz#1f6942e71ecc7835ed1c8a83006d8771a63a3763" - integrity sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw== - dependencies: - hasown "^2.0.0" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -es6-templates@^0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/es6-templates/-/es6-templates-0.2.3.tgz#5cb9ac9fb1ded6eb1239342b81d792bbb4078ee4" - integrity sha512-sziUVwcvQ+lOsrTyUY0Q11ilAPj+dy7AQ1E1MgSaHTaaAFTffaa08QSlGNU61iyVaroyb6nYdBV6oD7nzn6i8w== - dependencies: - recast "~0.11.12" - through "~2.3.6" - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -eslint-plugin-jest@^23.8.2: - version "23.20.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-23.20.0.tgz#e1d69c75f639e99d836642453c4e75ed22da4099" - integrity sha512-+6BGQt85OREevBDWCvhqj1yYA4+BFK4XnRZSGJionuEYmcglMZYLNNBBemwzbqUAckURaHdJSBcjHPyrtypZOw== - dependencies: - "@typescript-eslint/experimental-utils" "^2.5.0" - -eslint-plugin-react-hooks@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-3.0.0.tgz#9e80c71846eb68dd29c3b21d832728aa66e5bd35" - integrity sha512-EjxTHxjLKIBWFgDJdhKKzLh5q+vjTFrqNZX36uIxWS4OfyXe5DawqPj3U5qeJ1ngLwatjzQnmR0Lz0J0YH3kxw== - -eslint-plugin-react@^7.19.0: - version "7.33.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz#69ee09443ffc583927eafe86ffebb470ee737608" - integrity sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw== - dependencies: - array-includes "^3.1.6" - array.prototype.flatmap "^1.3.1" - array.prototype.tosorted "^1.1.1" - doctrine "^2.1.0" - es-iterator-helpers "^1.0.12" - estraverse "^5.3.0" - jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.1.2" - object.entries "^1.1.6" - object.fromentries "^2.0.6" - object.hasown "^1.1.2" - object.values "^1.1.6" - prop-types "^15.8.1" - resolve "^2.0.0-next.4" - semver "^6.3.1" - string.prototype.matchall "^4.0.8" - -eslint-scope@5.1.1, eslint-scope@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-scope@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" - integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-utils@^1.3.1: - version "1.4.3" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f" - integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q== - dependencies: - eslint-visitor-keys "^1.1.0" - -eslint-utils@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" - integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== - dependencies: - eslint-visitor-keys "^1.1.0" - -eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== - -eslint@5.16.0: - version "5.16.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-5.16.0.tgz#a1e3ac1aae4a3fbd8296fcf8f7ab7314cbb6abea" - integrity sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg== - dependencies: - "@babel/code-frame" "^7.0.0" - ajv "^6.9.1" - chalk "^2.1.0" - cross-spawn "^6.0.5" - debug "^4.0.1" - doctrine "^3.0.0" - eslint-scope "^4.0.3" - eslint-utils "^1.3.1" - eslint-visitor-keys "^1.0.0" - espree "^5.0.1" - esquery "^1.0.1" - esutils "^2.0.2" - file-entry-cache "^5.0.1" - functional-red-black-tree "^1.0.1" - glob "^7.1.2" - globals "^11.7.0" - ignore "^4.0.6" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - inquirer "^6.2.2" - js-yaml "^3.13.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.3.0" - lodash "^4.17.11" - minimatch "^3.0.4" - mkdirp "^0.5.1" - natural-compare "^1.4.0" - optionator "^0.8.2" - path-is-inside "^1.0.2" - progress "^2.0.0" - regexpp "^2.0.1" - semver "^5.5.1" - strip-ansi "^4.0.0" - strip-json-comments "^2.0.1" - table "^5.2.3" - text-table "^0.2.0" - -espree@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-5.0.1.tgz#5d6526fa4fc7f0788a5cf75b15f30323e2f81f7a" - integrity sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A== - dependencies: - acorn "^6.0.7" - acorn-jsx "^5.0.0" - eslint-visitor-keys "^1.0.0" - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esprima@~3.1.0: - version "3.1.3" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" - integrity sha512-AWwVMNxwhN8+NIPQzAQZCm7RkLC4RbM3B1OobMuyp3i+w73X57KCKaVIxaRZb+DYCojq7rspo+fmuQfAboyhFg== - -esquery@^1.0.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" - integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.1.0, esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -etag@~1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== - -eventemitter3@^4.0.0: - version "4.0.7" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -events@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" - integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== - -exec-buffer@^3.0.0, exec-buffer@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/exec-buffer/-/exec-buffer-3.2.0.tgz#b1686dbd904c7cf982e652c1f5a79b1e5573082b" - integrity sha512-wsiD+2Tp6BWHoVv3B+5Dcx6E7u5zky+hUwOHjuH2hKSLR3dvRmX8fk8UD8uqQixHs4Wk6eDmiegVrMPjKj7wpA== - dependencies: - execa "^0.7.0" - p-finally "^1.0.0" - pify "^3.0.0" - rimraf "^2.5.4" - tempfile "^2.0.0" - -execa@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" - integrity sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw== - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a" - integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== - dependencies: - cross-spawn "^7.0.0" - get-stream "^5.0.0" - human-signals "^1.1.1" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.0" - onetime "^5.1.0" - signal-exit "^3.0.2" - strip-final-newline "^2.0.0" - -execa@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -executable@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/executable/-/executable-4.1.1.tgz#41532bff361d3e57af4d763b70582db18f5d133c" - integrity sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg== - dependencies: - pify "^2.2.0" - -express@^4.17.3: - version "4.18.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" - integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.1" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.5.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.2.0" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.7" - qs "6.11.0" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -ext-list@^2.0.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/ext-list/-/ext-list-2.2.2.tgz#0b98e64ed82f5acf0f2931babf69212ef52ddd37" - integrity sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA== - dependencies: - mime-db "^1.28.0" - -ext-name@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/ext-name/-/ext-name-5.0.0.tgz#70781981d183ee15d13993c8822045c506c8f0a6" - integrity sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ== - dependencies: - ext-list "^2.0.0" - sort-keys-length "^1.0.0" - -external-editor@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" - integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== - dependencies: - chardet "^0.7.0" - iconv-lite "^0.4.24" - tmp "^0.0.33" - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.0.3, fast-glob@^3.2.4, fast-glob@^3.2.9: - version "3.3.2" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.2.tgz#a904501e57cfdd2ffcded45e99a54fef55e46129" - integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fast-xml-parser@^4.1.3: - version "4.3.3" - resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.3.tgz#aeaf5778392329f17168c40c51bcbfec8ff965be" - integrity sha512-coV/D1MhrShMvU6D0I+VAK3umz6hUaxxhL0yp/9RjfiYUfAv14rDhGQL+PLForhMdr0wq3PiV07WtkkNjJjNHg== - dependencies: - strnum "^1.0.5" - -fastest-levenshtein@^1.0.12: - version "1.0.16" - resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5" - integrity sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg== - -fastparse@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9" - integrity sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ== - -fastq@^1.6.0: - version "1.16.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.16.0.tgz#83b9a9375692db77a822df081edb6a9cf6839320" - integrity sha512-ifCoaXsDrsdkWTtiNJX5uzHDsrck5TzfKKDcuFFTIrrc/BS076qgEIfoIy1VeZqViznfKiysPYTh/QeHtnIsYA== - dependencies: - reusify "^1.0.4" - -faye-websocket@^0.11.3: - version "0.11.4" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" - integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== - dependencies: - websocket-driver ">=0.5.1" - -fd-slicer@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" - integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== - dependencies: - pend "~1.2.0" - -figures@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" - integrity sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA== - dependencies: - escape-string-regexp "^1.0.5" - -file-entry-cache@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c" - integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g== - dependencies: - flat-cache "^2.0.1" - -file-loader@^5.0.2: - version "5.1.0" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-5.1.0.tgz#cb56c070efc0e40666424309bd0d9e45ac6f2bb8" - integrity sha512-u/VkLGskw3Ue59nyOwUwXI/6nuBCo7KBkniB/l7ICwr/7cPNGsL1WCXUp3GB0qgOOKU1TiP49bv4DZF/LJqprg== - dependencies: - loader-utils "^1.4.0" - schema-utils "^2.5.0" - -file-type@5.2.0, file-type@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-5.2.0.tgz#2ddbea7c73ffe36368dfae49dc338c058c2b8ad6" - integrity sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ== - -file-type@^10.4.0, file-type@^10.5.0: - version "10.11.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-10.11.0.tgz#2961d09e4675b9fb9a3ee6b69e9cd23f43fd1890" - integrity sha512-uzk64HRpUZyTGZtVuvrjP0FYxzQrBf4rojot6J65YMEbwBLB0CWm0CLojVpwpmFmxcE/lkvYICgfcGozbBq6rw== - -file-type@^12.0.0: - version "12.4.2" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-12.4.2.tgz#a344ea5664a1d01447ee7fb1b635f72feb6169d9" - integrity sha512-UssQP5ZgIOKelfsaB5CuGAL+Y+q7EmONuiwF3N5HAH0t27rvrttgi6Ra9k/+DVaY9UF6+ybxu5pOXLUdA8N7Vg== - -file-type@^3.8.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-3.9.0.tgz#257a078384d1db8087bc449d107d52a52672b9e9" - integrity sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA== - -file-type@^4.2.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-4.4.0.tgz#1b600e5fca1fbdc6e80c0a70c71c8dba5f7906c5" - integrity sha512-f2UbFQEk7LXgWpi5ntcO86OeA/cC80fuDDDaX/fZ2ZGel+AF7leRQqBBW1eJNiiQkrZlAoM6P+VYP5P6bOlDEQ== - -file-type@^6.1.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-6.2.0.tgz#e50cd75d356ffed4e306dc4f5bcf52a79903a919" - integrity sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg== - -file-type@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-8.1.0.tgz#244f3b7ef641bbe0cca196c7276e4b332399f68c" - integrity sha512-qyQ0pzAy78gVoJsmYeNgl8uH8yKhr1lVhW7JbzJmnlRi0I4R2eEDEJZVKG8agpDnLpacwNbDhLNG/LMdxHD2YQ== - -filename-reserved-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz#abf73dfab735d045440abfea2d91f389ebbfa229" - integrity sha512-lc1bnsSr4L4Bdif8Xb/qrtokGbq5zlsms/CYH8PP+WtCkGNF65DPiQY8vG3SakEdRn8Dlnm+gW/qWKKjS5sZzQ== - -filenamify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/filenamify/-/filenamify-2.1.0.tgz#88faf495fb1b47abfd612300002a16228c677ee9" - integrity sha512-ICw7NTT6RsDp2rnYKVd8Fu4cr6ITzGy3+u4vUujPkabyaz+03F24NWEX7fs5fp+kBonlaqPH8fAO2NM+SXt/JA== - dependencies: - filename-reserved-regex "^2.0.0" - strip-outer "^1.0.0" - trim-repeated "^1.0.0" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" - -find-babel-config@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/find-babel-config/-/find-babel-config-1.2.0.tgz#a9b7b317eb5b9860cda9d54740a8c8337a2283a2" - integrity sha512-jB2CHJeqy6a820ssiqwrKMeyC6nNdmrcgkKWJWmpoxpE8RKciYJXCcXRq1h2AzCo5I5BJeN2tkGEO3hLTuePRA== - dependencies: - json5 "^0.5.1" - path-exists "^3.0.0" - -find-cache-dir@^3.3.1: - version "3.3.2" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" - integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== - dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -find-up@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -find-versions@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/find-versions/-/find-versions-3.2.0.tgz#10297f98030a786829681690545ef659ed1d254e" - integrity sha512-P8WRou2S+oe222TOCHitLy8zj+SIsVJh52VP4lvXkaFVnOFFdoWv1H1Jjvel1aI6NCFOAaeAVm8qrI0odiLcww== - dependencies: - semver-regex "^2.0.0" - -flat-cache@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" - integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA== - dependencies: - flatted "^2.0.0" - rimraf "2.6.3" - write "1.0.3" - -flat@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" - integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== - -flatted@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138" - integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== - -follow-redirects@^1.0.0, follow-redirects@^1.15.0: - version "1.15.5" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020" - integrity sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw== - -for-each@^0.3.3: - version "0.3.3" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" - integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== - dependencies: - is-callable "^1.1.3" - -form-data@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" - integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - -forwarded@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" - integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== - -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== - -from2@^2.1.1: - version "2.3.0" - resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" - integrity sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g== - dependencies: - inherits "^2.0.1" - readable-stream "^2.0.0" - -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -fs-minipass@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" - integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== - dependencies: - minipass "^3.0.0" - -fs-monkey@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.5.tgz#fe450175f0db0d7ea758102e1d84096acb925788" - integrity sha512-8uMbBjrhzW76TYgEV27Y5E//W2f/lTFmx78P2w19FZSxarhI/798APGQyuGCwmkNxgwGRhrLfvWyLBvNtuOmew== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== - -function-bind@^1.1.1, function-bind@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" - integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== - -function.prototype.name@^1.1.5, function.prototype.name@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" - integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - functions-have-names "^1.2.3" - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== - -functions-have-names@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" - integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" - integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== - dependencies: - function-bind "^1.1.2" - has-proto "^1.0.1" - has-symbols "^1.0.3" - hasown "^2.0.0" - -get-proxy@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/get-proxy/-/get-proxy-2.1.0.tgz#349f2b4d91d44c4d4d4e9cba2ad90143fac5ef93" - integrity sha512-zmZIaQTWnNQb4R4fJUEp/FC51eZsc6EkErspy3xtIYStaq8EB/hDIWipxsal+E8rz0qD7f2sL/NA9Xee4RInJw== - dependencies: - npm-conf "^1.1.0" - -get-stream@3.0.0, get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ== - -get-stream@^2.2.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-2.3.1.tgz#5f38f93f346009666ee0150a054167f91bdd95de" - integrity sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA== - dependencies: - object-assign "^4.0.1" - pinkie-promise "^2.0.0" - -get-stream@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - -get-stream@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -gifsicle@^5.0.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/gifsicle/-/gifsicle-5.3.0.tgz#499713c6f1e89ebbc3630da3a74fdb4697913b4e" - integrity sha512-FJTpgdj1Ow/FITB7SVza5HlzXa+/lqEY0tHQazAJbuAdvyJtkH4wIdsR2K414oaTwRXHFLLF+tYbipj+OpYg+Q== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.0" - execa "^5.0.0" - -glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-to-regexp@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" - integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== - -glob@^7.0.3, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^11.1.0, globals@^11.7.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globalthis@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" - integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== - dependencies: - define-properties "^1.1.3" - -globby@^10.0.0: - version "10.0.2" - resolved "https://registry.yarnpkg.com/globby/-/globby-10.0.2.tgz#277593e745acaa4646c3ab411289ec47a0392543" - integrity sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg== - dependencies: - "@types/glob" "^7.1.1" - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.0.3" - glob "^7.1.3" - ignore "^5.1.1" - merge2 "^1.2.3" - slash "^3.0.0" - -globby@^11.0.1: - version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -globby@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" - integrity sha512-KVbFv2TQtbzCoxAnfD6JcHZTYCzyliEaaeM/gH8qQdkKr5s0OP9scEgvdcngyk7AVdY6YVW/TJHd+lQ/Df3Daw== - dependencies: - array-union "^1.0.1" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -gopd@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" - integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== - dependencies: - get-intrinsic "^1.1.3" - -got@^7.0.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/got/-/got-7.1.0.tgz#05450fd84094e6bbea56f451a43a9c289166385a" - integrity sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw== - dependencies: - decompress-response "^3.2.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-plain-obj "^1.1.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - p-cancelable "^0.3.0" - p-timeout "^1.1.1" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - url-parse-lax "^1.0.0" - url-to-options "^1.0.1" - -got@^8.3.1: - version "8.3.2" - resolved "https://registry.yarnpkg.com/got/-/got-8.3.2.tgz#1d23f64390e97f776cac52e5b936e5f514d2e937" - integrity sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw== - dependencies: - "@sindresorhus/is" "^0.7.0" - cacheable-request "^2.1.1" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - into-stream "^3.1.0" - is-retry-allowed "^1.1.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - mimic-response "^1.0.0" - p-cancelable "^0.4.0" - p-timeout "^2.0.1" - pify "^3.0.0" - safe-buffer "^5.1.1" - timed-out "^4.0.1" - url-parse-lax "^3.0.0" - url-to-options "^1.0.1" - -graceful-fs@^4.1.10, graceful-fs@^4.1.2, graceful-fs@^4.2.2, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: - version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - -handle-thing@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" - integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== - -has-bigints@^1.0.1, has-bigints@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" - integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-property-descriptors@^1.0.0, has-property-descriptors@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz#52ba30b6c5ec87fd89fa574bc1c39125c6f65340" - integrity sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg== - dependencies: - get-intrinsic "^1.2.2" - -has-proto@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" - integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== - -has-symbol-support-x@^1.4.1: - version "1.4.2" - resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" - integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== - -has-symbols@^1.0.2, has-symbols@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has-to-string-tag-x@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" - integrity sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw== - dependencies: - has-symbol-support-x "^1.4.1" - -has-tostringtag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" - integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== - dependencies: - has-symbols "^1.0.2" - -hasown@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" - integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== - dependencies: - function-bind "^1.1.2" - -he@1.2.x, he@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -history@^4.9.0: - version "4.10.1" - resolved "https://registry.yarnpkg.com/history/-/history-4.10.1.tgz#33371a65e3a83b267434e2b3f3b1b4c58aad4cf3" - integrity sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew== - dependencies: - "@babel/runtime" "^7.1.2" - loose-envify "^1.2.0" - resolve-pathname "^3.0.0" - tiny-invariant "^1.0.2" - tiny-warning "^1.0.0" - value-equal "^1.0.1" - -hoist-non-react-statics@^3.1.0: - version "3.3.2" - resolved "https://registry.yarnpkg.com/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz#ece0acaf71d62c2969c2ec59feff42a4b1a85b45" - integrity sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw== - dependencies: - react-is "^16.7.0" - -hpack.js@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - integrity sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ== - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -html-entities@^2.3.2: - version "2.4.0" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.4.0.tgz#edd0cee70402584c8c76cc2c0556db09d1f45061" - integrity sha512-igBTJcNNNhvZFRtm8uA6xMY6xYleeDwn3PeBCkDz7tHttv4F2hsDI2aPgNERWzvRcNYHNT3ymRaQzllmXj4YsQ== - -html-loader@^0.5.5: - version "0.5.5" - resolved "https://registry.yarnpkg.com/html-loader/-/html-loader-0.5.5.tgz#6356dbeb0c49756d8ebd5ca327f16ff06ab5faea" - integrity sha512-7hIW7YinOYUpo//kSYcPB6dCKoceKLmOwjEMmhIobHuWGDVl0Nwe4l68mdG/Ru0wcUxQjVMEoZpkalZ/SE7zog== - dependencies: - es6-templates "^0.2.3" - fastparse "^1.1.1" - html-minifier "^3.5.8" - loader-utils "^1.1.0" - object-assign "^4.1.1" - -html-minifier-terser@^6.0.2: - version "6.1.0" - resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" - integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== - dependencies: - camel-case "^4.1.2" - clean-css "^5.2.2" - commander "^8.3.0" - he "^1.2.0" - param-case "^3.0.4" - relateurl "^0.2.7" - terser "^5.10.0" - -html-minifier@^3.5.8: - version "3.5.21" - resolved "https://registry.yarnpkg.com/html-minifier/-/html-minifier-3.5.21.tgz#d0040e054730e354db008463593194015212d20c" - integrity sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA== - dependencies: - camel-case "3.0.x" - clean-css "4.2.x" - commander "2.17.x" - he "1.2.x" - param-case "2.1.x" - relateurl "0.2.x" - uglify-js "3.4.x" - -html-webpack-plugin@^5.5.0: - version "5.6.0" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.6.0.tgz#50a8fa6709245608cb00e811eacecb8e0d7b7ea0" - integrity sha512-iwaY4wzbe48AfKLZ/Cc8k0L+FKG6oSNRaZ8x5A/T/IVDGyXcbHncM9TdDa93wn0FsSm82FhTKW7f3vS61thXAw== - dependencies: - "@types/html-minifier-terser" "^6.0.0" - html-minifier-terser "^6.0.2" - lodash "^4.17.21" - pretty-error "^4.0.0" - tapable "^2.0.0" - -htmlparser2@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" - integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.0.0" - domutils "^2.5.2" - entities "^2.0.0" - -http-cache-semantics@3.8.1: - version "3.8.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" - integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== - -http-errors@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-parser-js@>=0.5.1: - version "0.5.8" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" - integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== - -http-proxy-middleware@^2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" - integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== - dependencies: - "@types/http-proxy" "^1.17.8" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy@^1.18.1: - version "1.18.1" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" - integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== - dependencies: - eventemitter3 "^4.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -human-signals@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" - integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== - -human-signals@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -iconv-lite@0.4.24, iconv-lite@^0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -icss-utils@^5.0.0, icss-utils@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" - integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== - -ieee754@^1.1.13: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -ignore@^5.1.1, ignore@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" - integrity sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg== - -image-size@~0.5.0: - version "0.5.5" - resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" - integrity sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ== - -image-webpack-loader@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/image-webpack-loader/-/image-webpack-loader-8.1.0.tgz#cd97172e1e7304ef5eb898344fc25bbb650fc7d7" - integrity sha512-bxzMIBNu42KGo6Bc9YMB0QEUt+XuVTl2ZSX3oGAlbsqYOkxkT4TEWvVsnwUkCRCYISJrMCEc/s0y8OYrmEfUOg== - dependencies: - imagemin "^7.0.1" - loader-utils "^2.0.0" - object-assign "^4.1.1" - schema-utils "^2.7.1" - optionalDependencies: - imagemin-gifsicle "^7.0.0" - imagemin-mozjpeg "^9.0.0" - imagemin-optipng "^8.0.0" - imagemin-pngquant "^9.0.2" - imagemin-svgo "^9.0.0" - imagemin-webp "^7.0.0" - -imagemin-gifsicle@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/imagemin-gifsicle/-/imagemin-gifsicle-7.0.0.tgz#1a7ab136a144c4678657ba3b6c412f80805d26b0" - integrity sha512-LaP38xhxAwS3W8PFh4y5iQ6feoTSF+dTAXFRUEYQWYst6Xd+9L/iPk34QGgK/VO/objmIlmq9TStGfVY2IcHIA== - dependencies: - execa "^1.0.0" - gifsicle "^5.0.0" - is-gif "^3.0.0" - -imagemin-mozjpeg@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/imagemin-mozjpeg/-/imagemin-mozjpeg-9.0.0.tgz#d1af26d0b43d75a41c211051c1910da59d9d2324" - integrity sha512-TwOjTzYqCFRgROTWpVSt5UTT0JeCuzF1jswPLKALDd89+PmrJ2PdMMYeDLYZ1fs9cTovI9GJd68mRSnuVt691w== - dependencies: - execa "^4.0.0" - is-jpg "^2.0.0" - mozjpeg "^7.0.0" - -imagemin-optipng@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/imagemin-optipng/-/imagemin-optipng-8.0.0.tgz#b88e5cf6da25cc8479e07cdf38c3ae0479df7ef2" - integrity sha512-CUGfhfwqlPjAC0rm8Fy+R2DJDBGjzy2SkfyT09L8rasnF9jSoHFqJ1xxSZWK6HVPZBMhGPMxCTL70OgTHlLF5A== - dependencies: - exec-buffer "^3.0.0" - is-png "^2.0.0" - optipng-bin "^7.0.0" - -imagemin-pngquant@^9.0.2: - version "9.0.2" - resolved "https://registry.yarnpkg.com/imagemin-pngquant/-/imagemin-pngquant-9.0.2.tgz#38155702b0cc4f60f671ba7c2b086ea3805d9567" - integrity sha512-cj//bKo8+Frd/DM8l6Pg9pws1pnDUjgb7ae++sUX1kUVdv2nrngPykhiUOgFeE0LGY/LmUbCf4egCHC4YUcZSg== - dependencies: - execa "^4.0.0" - is-png "^2.0.0" - is-stream "^2.0.0" - ow "^0.17.0" - pngquant-bin "^6.0.0" - -imagemin-svgo@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/imagemin-svgo/-/imagemin-svgo-9.0.0.tgz#749370804608917a67d4ff590f07a87756aec006" - integrity sha512-uNgXpKHd99C0WODkrJ8OO/3zW3qjgS4pW7hcuII0RcHN3tnKxDjJWcitdVC/TZyfIqSricU8WfrHn26bdSW62g== - dependencies: - is-svg "^4.2.1" - svgo "^2.1.0" - -imagemin-webp@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/imagemin-webp/-/imagemin-webp-7.0.0.tgz#df000ec927855d74d4cfafec8558ac418c88d2a9" - integrity sha512-JoYjvHNgBLgrQAkeCO7T5iNc8XVpiBmMPZmiXMhalC7K6gwY/3DCEUfNxVPOmNJ+NIJlJFvzcMR9RBxIE74Xxw== - dependencies: - cwebp-bin "^7.0.1" - exec-buffer "^3.2.0" - is-cwebp-readable "^3.0.0" - -imagemin@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/imagemin/-/imagemin-7.0.1.tgz#f6441ca647197632e23db7d971fffbd530c87dbf" - integrity sha512-33AmZ+xjZhg2JMCe+vDf6a9mzWukE7l+wAtesjE7KyteqqKjzxv7aVQeWnul1Ve26mWvEQqyPwl0OctNBfSR9w== - dependencies: - file-type "^12.0.0" - globby "^10.0.0" - graceful-fs "^4.2.2" - junk "^3.1.0" - make-dir "^3.0.0" - p-pipe "^3.0.0" - replace-ext "^1.0.0" - -import-fresh@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-lazy@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-3.1.0.tgz#891279202c8a2280fdbd6674dbd8da1a1dfc67cc" - integrity sha512-8/gvXvX2JMn0F+CDlSC4l6kOmVaLOO3XLkksI7CI3Ud95KDYJuYur2b9P/PUt/i/pDAMd/DulQsNbbbmRRsDIQ== - -import-local@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -indent-string@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" - integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== - -infer-owner@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" - integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== - -ini@^1.3.4: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - -inquirer@^6.2.2: - version "6.5.2" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.5.2.tgz#ad50942375d036d327ff528c08bd5fab089928ca" - integrity sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ== - dependencies: - ansi-escapes "^3.2.0" - chalk "^2.4.2" - cli-cursor "^2.1.0" - cli-width "^2.0.0" - external-editor "^3.0.3" - figures "^2.0.0" - lodash "^4.17.12" - mute-stream "0.0.7" - run-async "^2.2.0" - rxjs "^6.4.0" - string-width "^2.1.0" - strip-ansi "^5.1.0" - through "^2.3.6" - -internal-slot@^1.0.5: - version "1.0.6" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.6.tgz#37e756098c4911c5e912b8edbf71ed3aa116f930" - integrity sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg== - dependencies: - get-intrinsic "^1.2.2" - hasown "^2.0.0" - side-channel "^1.0.4" - -interpret@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" - integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== - -into-stream@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-3.1.0.tgz#96fb0a936c12babd6ff1752a17d05616abd094c6" - integrity sha512-TcdjPibTksa1NQximqep2r17ISRiNE9fwlfbg3F8ANdvP5/yrFTew86VcO//jk4QTaMlbjypPBq76HN2zaKfZQ== - dependencies: - from2 "^2.1.1" - p-is-promise "^1.1.0" - -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -ipaddr.js@^2.0.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" - integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== - -is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" - integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.0" - is-typed-array "^1.1.10" - -is-async-function@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-async-function/-/is-async-function-2.0.0.tgz#8e4418efd3e5d3a6ebb0164c05ef5afb69aa9646" - integrity sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA== - dependencies: - has-tostringtag "^1.0.0" - -is-bigint@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" - integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== - dependencies: - has-bigints "^1.0.1" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-boolean-object@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" - integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" - integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== - -is-core-module@^2.13.0: - version "2.13.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" - integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== - dependencies: - hasown "^2.0.0" - -is-cwebp-readable@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-cwebp-readable/-/is-cwebp-readable-3.0.0.tgz#0554aaa400977a2fc4de366d8c0244f13cde58cb" - integrity sha512-bpELc7/Q1/U5MWHn4NdHI44R3jxk0h9ew9ljzabiRl70/UIjL/ZAqRMb52F5+eke/VC8yTiv4Ewryo1fPWidvA== - dependencies: - file-type "^10.5.0" - -is-date-object@^1.0.1, is-date-object@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" - integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== - dependencies: - has-tostringtag "^1.0.0" - -is-docker@^2.0.0, is-docker@^2.1.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" - integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-finalizationregistry@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz#c8749b65f17c133313e661b1289b95ad3dbd62e6" - integrity sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw== - dependencies: - call-bind "^1.0.2" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w== - -is-generator-function@^1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" - integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== - dependencies: - has-tostringtag "^1.0.0" - -is-gif@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-gif/-/is-gif-3.0.0.tgz#c4be60b26a301d695bb833b20d9b5d66c6cf83b1" - integrity sha512-IqJ/jlbw5WJSNfwQ/lHEDXF8rxhRgF6ythk2oiEvhpG29F704eX9NO6TvPfMiq9DrbwgcEDnETYNcZDPewQoVw== - dependencies: - file-type "^10.4.0" - -is-glob@^4.0.1, is-glob@~4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-jpg@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-jpg/-/is-jpg-2.0.0.tgz#2e1997fa6e9166eaac0242daae443403e4ef1d97" - integrity sha512-ODlO0ruzhkzD3sdynIainVP5eoOFNN85rxA1+cwwnPe4dKyX0r5+hxNO5XpCrxlHcmb9vkOit9mhRD2JVuimHg== - -is-map@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" - integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== - -is-natural-number@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-natural-number/-/is-natural-number-4.0.1.tgz#ab9d76e1db4ced51e35de0c72ebecf09f734cde8" - integrity sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ== - -is-negative-zero@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" - integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== - -is-number-object@^1.0.4: - version "1.0.7" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" - integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== - dependencies: - has-tostringtag "^1.0.0" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-object@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" - integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== - -is-path-cwd@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" - integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== - -is-path-in-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" - integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== - dependencies: - is-path-inside "^2.1.0" - -is-path-inside@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" - integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== - dependencies: - path-is-inside "^1.0.2" - -is-plain-obj@^1.0.0, is-plain-obj@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== - -is-plain-obj@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" - integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== - -is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-png@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-png/-/is-png-2.0.0.tgz#ee8cbc9e9b050425cedeeb4a6fb74a649b0a4a8d" - integrity sha512-4KPGizaVGj2LK7xwJIz8o5B2ubu1D/vcQsgOGFEDlpcvgZHto4gBnyd0ig7Ws+67ixmwKoNmu0hYnpo6AaKb5g== - -is-regex@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" - integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-retry-allowed@^1.0.0, is-retry-allowed@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4" - integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== - -is-set@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" - integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== - -is-shared-array-buffer@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" - integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== - dependencies: - call-bind "^1.0.2" - -is-stream@^1.0.0, is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== - -is-stream@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -is-string@^1.0.5, is-string@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" - integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== - dependencies: - has-tostringtag "^1.0.0" - -is-svg@^4.2.1: - version "4.4.0" - resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-4.4.0.tgz#34db20a38146be5f2b3060154da33d11e6f74b7c" - integrity sha512-v+AgVwiK5DsGtT9ng+m4mClp6zDAmwrW8nZi6Gg15qzvBnRWWdfWA1TGaXyCDnWq5g5asofIgMVl3PjKxvk1ug== - dependencies: - fast-xml-parser "^4.1.3" - -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" - integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== - dependencies: - has-symbols "^1.0.2" - -is-typed-array@^1.1.10, is-typed-array@^1.1.12, is-typed-array@^1.1.9: - version "1.1.12" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" - integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== - dependencies: - which-typed-array "^1.1.11" - -is-weakmap@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" - integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== - -is-weakref@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" - integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== - dependencies: - call-bind "^1.0.2" - -is-weakset@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.2.tgz#4569d67a747a1ce5a994dfd4ef6dcea76e7c0a1d" - integrity sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -is-what@^3.14.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/is-what/-/is-what-3.14.1.tgz#e1222f46ddda85dead0fd1c9df131760e77755c1" - integrity sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA== - -is-wsl@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== - -isarray@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" - integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== - -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== - -isurl@^1.0.0-alpha5: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67" - integrity sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w== - dependencies: - has-to-string-tag-x "^1.2.0" - is-object "^1.0.1" - -iterator.prototype@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/iterator.prototype/-/iterator.prototype-1.1.2.tgz#5e29c8924f01916cb9335f1ff80619dcff22b0c0" - integrity sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w== - dependencies: - define-properties "^1.2.1" - get-intrinsic "^1.2.1" - has-symbols "^1.0.3" - reflect.getprototypeof "^1.0.4" - set-function-name "^2.0.1" - -jest-worker@^27.4.5: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" - integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.0: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== - -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== - -json-parse-even-better-errors@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" - integrity sha512-4xrs1aW+6N5DalkqSVA8fxh458CXvR99WU8WLKmq4v8eWAL86Xo3BVqyd3SkA9wEVjCMqyvvRRkshAdOnBp5rw== - -json5@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" - integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== - dependencies: - minimist "^1.2.0" - -json5@^2.1.2, json5@^2.2.3: - version "2.2.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" - integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== - -"jsx-ast-utils@^2.4.1 || ^3.0.0": - version "3.3.5" - resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz#4766bd05a8e2a11af222becd19e15575e52a853a" - integrity sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ== - dependencies: - array-includes "^3.1.6" - array.prototype.flat "^1.3.1" - object.assign "^4.1.4" - object.values "^1.1.6" - -junk@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/junk/-/junk-3.1.0.tgz#31499098d902b7e98c5d9b9c80f43457a88abfa1" - integrity sha512-pBxcB3LFc8QVgdggvZWyeys+hnrNWg4OcZIU/1X59k5jQdLBlCsYGRQaz234SqoRLTCgMH00fY0xRJH+F9METQ== - -keyv@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.0.0.tgz#44923ba39e68b12a7cec7df6c3268c031f2ef373" - integrity sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA== - dependencies: - json-buffer "3.0.0" - -kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -launch-editor@^2.6.0: - version "2.6.1" - resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.1.tgz#f259c9ef95cbc9425620bbbd14b468fcdb4ffe3c" - integrity sha512-eB/uXmFVpY4zezmGp5XtU21kwo7GBbKB+EQ+UZeWtGb9yAM5xt/Evk+lYH3eRNAtId+ej4u7TYPFZ07w4s7rRw== - dependencies: - picocolors "^1.0.0" - shell-quote "^1.8.1" - -less-loader@^6.1.2: - version "6.2.0" - resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-6.2.0.tgz#8b26f621c155b342eefc24f5bd6e9dc40c42a719" - integrity sha512-Cl5h95/Pz/PWub/tCBgT1oNMFeH1WTD33piG80jn5jr12T4XbxZcjThwNXDQ7AG649WEynuIzO4b0+2Tn9Qolg== - dependencies: - clone "^2.1.2" - less "^3.11.3" - loader-utils "^2.0.0" - schema-utils "^2.7.0" - -less-vars-to-js@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/less-vars-to-js/-/less-vars-to-js-1.3.0.tgz#c322cf43a3c8fc3fab655da3e51a14c1499ab571" - integrity sha512-xeiLLn/IMCGtdyCkYQnW8UuzoW2oYMCKg9boZRaGI58fLz5r90bNJDlqGzmVt/1Uqk75/DxIVtQSNCMkE5fRZQ== - dependencies: - strip-json-comments "^2.0.1" - -less@^3.10.3, less@^3.11.3: - version "3.13.1" - resolved "https://registry.yarnpkg.com/less/-/less-3.13.1.tgz#0ebc91d2a0e9c0c6735b83d496b0ab0583077909" - integrity sha512-SwA1aQXGUvp+P5XdZslUOhhLnClSLIjWvJhmd+Vgib5BFIr9lMNlQwmwUNOjXThF/A0x+MCYYPeWEfeWiLRnTw== - dependencies: - copy-anything "^2.0.1" - tslib "^1.10.0" - optionalDependencies: - errno "^0.1.1" - graceful-fs "^4.1.2" - image-size "~0.5.0" - make-dir "^2.1.0" - mime "^1.4.1" - native-request "^1.0.5" - source-map "~0.6.0" - -levn@^0.3.0, levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -loader-runner@^4.2.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" - integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== - -loader-utils@^1.1.0, loader-utils@^1.4.0: - version "1.4.2" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" - integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^1.0.1" - -loader-utils@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" - integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" - integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== - -lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -lower-case@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac" - integrity sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA== - -lower-case@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" - integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== - dependencies: - tslib "^2.0.3" - -lowercase-keys@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" - integrity sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A== - -lowercase-keys@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lru-cache@^4.0.1: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -make-dir@^1.0.0, make-dir@^1.2.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" - integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== - dependencies: - pify "^3.0.0" - -make-dir@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" - integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== - dependencies: - pify "^4.0.1" - semver "^5.6.0" - -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -mdn-data@2.0.14: - version "2.0.14" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" - integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== - -memfs@^3.4.3: - version "3.6.0" - resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6" - integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ== - dependencies: - fs-monkey "^1.0.4" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.2.3, merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== - -micromatch@^4.0.0, micromatch@^4.0.2, micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2", mime-db@^1.28.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - -mime@1.6.0, mime@^1.4.1: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mimic-fn@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" - integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -mimic-response@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - -minimalistic-assert@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.2.0, minimist@^1.2.6: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -minipass-collect@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" - integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== - dependencies: - minipass "^3.0.0" - -minipass-flush@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" - integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== - dependencies: - minipass "^3.0.0" - -minipass-pipeline@^1.2.2: - version "1.2.4" - resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" - integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== - dependencies: - minipass "^3.0.0" - -minipass@^3.0.0, minipass@^3.1.1: - version "3.3.6" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" - integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== - dependencies: - yallist "^4.0.0" - -minipass@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" - integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== - -minizlib@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" - integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== - dependencies: - minipass "^3.0.0" - yallist "^4.0.0" - -mkdirp@^0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -mkdirp@^1.0.3, mkdirp@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" - integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== - -mozjpeg@^7.0.0: - version "7.1.1" - resolved "https://registry.yarnpkg.com/mozjpeg/-/mozjpeg-7.1.1.tgz#dfb61953536e66fcabd4ae795e7a312d42a51f18" - integrity sha512-iIDxWvzhWvLC9mcRJ1uSkiKaj4drF58oCqK2bITm5c2Jt6cJ8qQjSSru2PCaysG+hLIinryj8mgz5ZJzOYTv1A== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.0" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@2.1.3: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -multicast-dns@^7.2.5: - version "7.2.5" - resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" - integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== - dependencies: - dns-packet "^5.2.2" - thunky "^1.0.2" - -mute-stream@0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" - integrity sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ== - -nanoid@^3.3.7: - version "3.3.7" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" - integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== - -native-request@^1.0.5: - version "1.1.0" - resolved "https://registry.yarnpkg.com/native-request/-/native-request-1.1.0.tgz#acdb30fe2eefa3e1bc8c54b3a6852e9c5c0d3cb0" - integrity sha512-uZ5rQaeRn15XmpgE0xoPL8YWqcX90VtCFglYwAgkvKM5e8fog+vePLAhHxuuv/gRkrQxIeh5U3q9sMNUrENqWw== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -negotiator@0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -neo-async@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - -no-case@^2.2.0: - version "2.3.2" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac" - integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ== - dependencies: - lower-case "^1.1.1" - -no-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" - integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== - dependencies: - lower-case "^2.0.2" - tslib "^2.0.3" - -node-forge@^1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== - -node-releases@^2.0.14: - version "2.0.14" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" - integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-url@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" - integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== - dependencies: - prepend-http "^2.0.0" - query-string "^5.0.1" - sort-keys "^2.0.0" - -npm-conf@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/npm-conf/-/npm-conf-1.1.3.tgz#256cc47bd0e218c259c4e9550bf413bc2192aff9" - integrity sha512-Yic4bZHJOt9RCFbRP3GgpqhScOY4HH3V2P8yBj6CeYq118Qr+BLXqT2JvpJ00mryLESpgOxf5XlFv4ZjXxLScw== - dependencies: - config-chain "^1.1.11" - pify "^3.0.0" - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw== - dependencies: - path-key "^2.0.0" - -npm-run-path@^4.0.0, npm-run-path@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -nth-check@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" - integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== - dependencies: - boolbase "^1.0.0" - -object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -object-inspect@^1.13.1, object-inspect@^1.9.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" - integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== - -object-keys@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object.assign@^4.1.4: - version "4.1.5" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0" - integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== - dependencies: - call-bind "^1.0.5" - define-properties "^1.2.1" - has-symbols "^1.0.3" - object-keys "^1.1.1" - -object.entries@^1.1.6: - version "1.1.7" - resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.7.tgz#2b47760e2a2e3a752f39dd874655c61a7f03c131" - integrity sha512-jCBs/0plmPsOnrKAfFQXRG2NFjlhZgjjcBLSmTnEhU8U6vVTsVe8ANeQJCHTl3gSsI4J+0emOoCgoKlmQPMgmA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -object.fromentries@^2.0.6: - version "2.0.7" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.7.tgz#71e95f441e9a0ea6baf682ecaaf37fa2a8d7e616" - integrity sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -object.hasown@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.3.tgz#6a5f2897bb4d3668b8e79364f98ccf971bda55ae" - integrity sha512-fFI4VcYpRHvSLXxP7yiZOMAd331cPfd2p7PFDVbgUsYOfCT3tICVqXWngbjr4m49OvsBwUBQ6O2uQoJvy3RexA== - dependencies: - define-properties "^1.2.0" - es-abstract "^1.22.1" - -object.values@^1.1.6: - version "1.1.7" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.7.tgz#617ed13272e7e1071b43973aa1655d9291b8442a" - integrity sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -obuf@^1.0.0, obuf@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" - integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== - -on-finished@2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -onetime@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" - integrity sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ== - dependencies: - mimic-fn "^1.0.0" - -onetime@^5.1.0, onetime@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -open@^8.0.9: - version "8.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" - integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== - dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" - -optionator@^0.8.2: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - -optipng-bin@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/optipng-bin/-/optipng-bin-7.0.1.tgz#beb8e55a52f8a26f885ee57ab44fcf62397d6972" - integrity sha512-W99mpdW7Nt2PpFiaO+74pkht7KEqkXkeRomdWXfEz3SALZ6hns81y/pm1dsGZ6ItUIfchiNIP6ORDr1zETU1jA== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.0" - -os-filter-obj@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/os-filter-obj/-/os-filter-obj-2.0.0.tgz#1c0b62d5f3a2442749a2d139e6dddee6e81d8d16" - integrity sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg== - dependencies: - arch "^2.1.0" - -os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== - -ow@^0.17.0: - version "0.17.0" - resolved "https://registry.yarnpkg.com/ow/-/ow-0.17.0.tgz#4f938999fed6264c9048cd6254356e0f1e7f688c" - integrity sha512-i3keDzDQP5lWIe4oODyDFey1qVrq2hXKTuTH2VpqwpYtzPiKZt2ziRI4NBQmgW40AnV5Euz17OyWweCb+bNEQA== - dependencies: - type-fest "^0.11.0" - -p-cancelable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" - integrity sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw== - -p-cancelable@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.4.1.tgz#35f363d67d52081c8d9585e37bcceb7e0bbcb2a0" - integrity sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ== - -p-event@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-event/-/p-event-1.3.0.tgz#8e6b4f4f65c72bc5b6fe28b75eda874f96a4a085" - integrity sha512-hV1zbA7gwqPVFcapfeATaNjQ3J0NuzorHPyG8GPL9g/Y/TplWVBVoCKCXL6Ej2zscrCEv195QNWJXuBH6XZuzA== - dependencies: - p-timeout "^1.1.1" - -p-event@^2.1.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/p-event/-/p-event-2.3.1.tgz#596279ef169ab2c3e0cae88c1cfbb08079993ef6" - integrity sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA== - dependencies: - p-timeout "^2.0.1" - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== - -p-is-promise@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-1.1.0.tgz#9c9456989e9f6588017b0434d56097675c3da05e" - integrity sha512-zL7VE4JVS2IFSkR2GQKDSPEVxkoH43/p7oEnwpdCndKYJO0HVeRB7fA8TJwuLOTBREtK0ea8eHaxdwcpob5dmg== - -p-limit@^2.0.0, p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-limit@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-map-series@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-map-series/-/p-map-series-1.0.0.tgz#bf98fe575705658a9e1351befb85ae4c1f07bdca" - integrity sha512-4k9LlvY6Bo/1FcIdV33wqZQES0Py+iKISU9Uc8p8AjWoZPnFKMpVIVD3s0EYn4jzLh1I+WeUZkJ0Yoa4Qfw3Kg== - dependencies: - p-reduce "^1.0.0" - -p-map@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== - -p-map@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" - integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== - dependencies: - aggregate-error "^3.0.0" - -p-pipe@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-pipe/-/p-pipe-3.1.0.tgz#48b57c922aa2e1af6a6404cb7c6bf0eb9cc8e60e" - integrity sha512-08pj8ATpzMR0Y80x50yJHn37NF6vjrqHutASaX5LiH5npS9XPvrUmscd9MF5R4fuYRHOxQR1FfMIlF7AzwoPqw== - -p-reduce@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-reduce/-/p-reduce-1.0.0.tgz#18c2b0dd936a4690a529f8231f58a0fdb6a47dfa" - integrity sha512-3Tx1T3oM1xO/Y8Gj0sWyE78EIJZ+t+aEmXUdvQgvGmSMri7aPTHoovbXEreWKkL5j21Er60XAWLTzKbAKYOujQ== - -p-retry@^4.5.0: - version "4.6.2" - resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" - integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== - dependencies: - "@types/retry" "0.12.0" - retry "^0.13.1" - -p-timeout@^1.1.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-1.2.1.tgz#5eb3b353b7fce99f101a1038880bb054ebbea386" - integrity sha512-gb0ryzr+K2qFqFv6qi3khoeqMZF/+ajxQipEF6NteZVnvz9tzdsfAVj3lYtn1gAXvH5lfLwfxEII799gt/mRIA== - dependencies: - p-finally "^1.0.0" - -p-timeout@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-2.0.1.tgz#d8dd1979595d2dc0139e1fe46b8b646cb3cdf038" - integrity sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA== - dependencies: - p-finally "^1.0.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -param-case@2.1.x: - version "2.1.1" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-2.1.1.tgz#df94fd8cf6531ecf75e6bef9a0858fbc72be2247" - integrity sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w== - dependencies: - no-case "^2.2.0" - -param-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" - integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== - dependencies: - dot-case "^3.0.4" - tslib "^2.0.3" - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parseurl@~1.3.2, parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -pascal-case@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" - integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-is-inside@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" - integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w== - -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" - -path-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" - integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pify@^2.0.0, pify@^2.2.0, pify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== - -pify@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" - integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw== - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg== - -pkg-dir@^4.1.0, pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pkg-up@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" - integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== - dependencies: - find-up "^3.0.0" - -pngquant-bin@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/pngquant-bin/-/pngquant-bin-6.0.1.tgz#2b5789ca219eeb4d8509ab1ae082092801b7f07e" - integrity sha512-Q3PUyolfktf+hYio6wsg3SanQzEU/v8aICg/WpzxXcuCMRb7H2Q81okfpcEztbMvw25ILjd3a87doj2N9kvbpQ== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.1" - execa "^4.0.0" - -postcss-modules-extract-imports@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" - integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== - -postcss-modules-local-by-default@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.4.tgz#7cbed92abd312b94aaea85b68226d3dec39a14e6" - integrity sha512-L4QzMnOdVwRm1Qb8m4x8jsZzKAaPAgrUF1r/hjDR2Xj7R+8Zsf97jAlSQzWtKx5YNiNGN8QxmPFIc/sh+RQl+Q== - dependencies: - icss-utils "^5.0.0" - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.1.0" - -postcss-modules-scope@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.1.1.tgz#32cfab55e84887c079a19bbb215e721d683ef134" - integrity sha512-uZgqzdTleelWjzJY+Fhti6F3C9iF1JR/dODLs/JDefozYcKTBCdD8BIl6nNPbTbcLnGrk56hzwZC2DaGNvYjzA== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-modules-values@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" - integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== - dependencies: - icss-utils "^5.0.0" - -postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4: - version "6.0.15" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz#11cc2b21eebc0b99ea374ffb9887174855a01535" - integrity sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - -postcss-value-parser@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" - integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== - -postcss@^8.2.15: - version "8.4.33" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.33.tgz#1378e859c9f69bf6f638b990a0212f43e2aaa742" - integrity sha512-Kkpbhhdjw2qQs2O2DGX+8m5OVqEcbB9HRBvuYM9pgrjEFUg30A9LmXNlTAUj4S9kgtGyrMbTzVjH7E+s5Re2yg== - dependencies: - nanoid "^3.3.7" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== - -prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - integrity sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg== - -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== - -pretty-error@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" - integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== - dependencies: - lodash "^4.17.20" - renderkid "^3.0.0" - -private@~0.1.5: - version "0.1.8" - resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" - integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -promise-inflight@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" - integrity sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g== - -prop-types@^15.6.2, prop-types@^15.8.1: - version "15.8.1" - resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" - integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== - dependencies: - loose-envify "^1.4.0" - object-assign "^4.1.1" - react-is "^16.13.1" - -proto-list@~1.2.1: - version "1.2.4" - resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" - integrity sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA== - -proxy-addr@~2.0.7: - version "2.0.7" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" - integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== - dependencies: - forwarded "0.2.0" - ipaddr.js "1.9.1" - -proxy-from-env@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - -prr@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== - -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -punycode@^2.1.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" - integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== - -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== - dependencies: - side-channel "^1.0.4" - -query-string@^5.0.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" - integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== - dependencies: - decode-uri-component "^0.2.0" - object-assign "^4.1.0" - strict-uri-encode "^1.0.0" - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -randombytes@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -range-parser@^1.2.1, range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -raw-loader@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/raw-loader/-/raw-loader-4.0.2.tgz#1aac6b7d1ad1501e66efdac1522c73e59a584eb6" - integrity sha512-ZnScIV3ag9A4wPX/ZayxL/jZH+euYb6FcUinPcgiQW0+UBtEv0O6Q3lGd3cqJ+GHH+rksEv3Pj99oxJ3u3VIKA== - dependencies: - loader-utils "^2.0.0" - schema-utils "^3.0.0" - -react-dom@^16.12.0: - version "16.14.0" - resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.14.0.tgz#7ad838ec29a777fb3c75c3a190f661cf92ab8b89" - integrity sha512-1gCeQXDLoIqMgqD3IO2Ah9bnf0w9kzhwN5q4FGnHZ67hBm9yePzB5JJAIQCc8x3pFnNlwFq4RidZggNAAkzWWw== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - prop-types "^15.6.2" - scheduler "^0.19.1" - -react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: - version "16.13.1" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" - integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== - -react-router-dom@^5.0.0: - version "5.3.4" - resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.4.tgz#2ed62ffd88cae6db134445f4a0c0ae8b91d2e5e6" - integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ== - dependencies: - "@babel/runtime" "^7.12.13" - history "^4.9.0" - loose-envify "^1.3.1" - prop-types "^15.6.2" - react-router "5.3.4" - tiny-invariant "^1.0.2" - tiny-warning "^1.0.0" - -react-router@5.3.4, react-router@^5.0.0: - version "5.3.4" - resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.4.tgz#8ca252d70fcc37841e31473c7a151cf777887bb5" - integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== - dependencies: - "@babel/runtime" "^7.12.13" - history "^4.9.0" - hoist-non-react-statics "^3.1.0" - loose-envify "^1.3.1" - path-to-regexp "^1.7.0" - prop-types "^15.6.2" - react-is "^16.6.0" - tiny-invariant "^1.0.2" - tiny-warning "^1.0.0" - -react@^16.12.0: - version "16.14.0" - resolved "https://registry.yarnpkg.com/react/-/react-16.14.0.tgz#94d776ddd0aaa37da3eda8fc5b6b18a4c9a3114d" - integrity sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - prop-types "^15.6.2" - -readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.3.0, readable-stream@^2.3.5: - version "2.3.8" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" - integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.0.6: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readdirp@~3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -recast@~0.11.12: - version "0.11.23" - resolved "https://registry.yarnpkg.com/recast/-/recast-0.11.23.tgz#451fd3004ab1e4df9b4e4b66376b2a21912462d3" - integrity sha512-+nixG+3NugceyR8O1bLU45qs84JgI3+8EauyRZafLgC9XbdAOIVgwV1Pe2da0YzGo62KzWoZwUpVEQf6qNAXWA== - dependencies: - ast-types "0.9.6" - esprima "~3.1.0" - private "~0.1.5" - source-map "~0.5.0" - -rechoir@^0.7.0: - version "0.7.1" - resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.7.1.tgz#9478a96a1ca135b5e88fc027f03ee92d6c645686" - integrity sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg== - dependencies: - resolve "^1.9.0" - -reflect.getprototypeof@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz#aaccbf41aca3821b87bb71d9dcbc7ad0ba50a3f3" - integrity sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - globalthis "^1.0.3" - which-builtin-type "^1.1.3" - -regenerate-unicode-properties@^10.1.0: - version "10.1.1" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz#6b0e05489d9076b04c436f318d9b067bba459480" - integrity sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q== - dependencies: - regenerate "^1.4.2" - -regenerate@^1.4.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" - integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== - -regenerator-runtime@^0.14.0: - version "0.14.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" - integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== - -regenerator-transform@^0.15.2: - version "0.15.2" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4" - integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg== - dependencies: - "@babel/runtime" "^7.8.4" - -regexp.prototype.flags@^1.5.0, regexp.prototype.flags@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e" - integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - set-function-name "^2.0.0" +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -regexpp@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" - integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw== +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== -regexpu-core@^5.3.1: - version "5.3.2" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" - integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== - dependencies: - "@babel/regjsgen" "^0.8.0" - regenerate "^1.4.2" - regenerate-unicode-properties "^10.1.0" - regjsparser "^0.9.1" - unicode-match-property-ecmascript "^2.0.0" - unicode-match-property-value-ecmascript "^2.1.0" - -regjsparser@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" - integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== +fastq@^1.6.0: + version "1.17.1" + resolved "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz" + integrity sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w== dependencies: - jsesc "~0.5.0" - -relateurl@0.2.x, relateurl@^0.2.7: - version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== + reusify "^1.0.4" -renderkid@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" - integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: - css-select "^4.1.3" - dom-converter "^0.2.0" - htmlparser2 "^6.1.0" - lodash "^4.17.21" - strip-ansi "^6.0.1" - -replace-ext@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" - integrity sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw== - -require-from-string@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -requires-port@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + flat-cache "^3.0.4" -reselect@^4.0.0: - version "4.1.8" - resolved "https://registry.yarnpkg.com/reselect/-/reselect-4.1.8.tgz#3f5dc671ea168dccdeb3e141236f69f02eaec524" - integrity sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + to-regex-range "^5.0.1" -resolve-from@^5.0.0: +find-up@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-pathname@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd" - integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== - -resolve@^1.12.0, resolve@^1.13.1, resolve@^1.14.2, resolve@^1.9.0: - version "1.22.8" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" - integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -resolve@^2.0.0-next.4: - version "2.0.0-next.5" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.5.tgz#6b0ec3107e671e52b68cd068ef327173b90dc03c" - integrity sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA== + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -responselike@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== - dependencies: - lowercase-keys "^1.0.0" + locate-path "^6.0.0" + path-exists "^4.0.0" -restore-cursor@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" - integrity sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q== +flat-cache@^3.0.4: + version "3.2.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.2.0.tgz#2c0c2d5040c99b1632771a9d105725c0115363ee" + integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== dependencies: - onetime "^2.0.0" - signal-exit "^3.0.2" - -retry@^0.13.1: - version "0.13.1" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" - integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== - -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + flatted "^3.2.9" + keyv "^4.5.3" + rimraf "^3.0.2" -rimraf@2.6.3: - version "2.6.3" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" - integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== - dependencies: - glob "^7.1.3" +flatted@^3.2.9: + version "3.3.1" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.1.tgz#21db470729a6734d4997002f439cb308987f567a" + integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw== -rimraf@^2.5.4, rimraf@^2.6.3: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" +follow-redirects@^1.15.6: + version "1.15.6" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" + integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== -rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== dependencies: - glob "^7.1.3" - -run-async@^2.2.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" - integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== -rxjs@^6.4.0: - version "6.6.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" - integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== - dependencies: - tslib "^1.9.0" +fsevents@~2.3.2, fsevents@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== -safe-array-concat@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.0.tgz#8d0cae9cb806d6d1c06e08ab13d847293ebe0692" - integrity sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg== - dependencies: - call-bind "^1.0.5" - get-intrinsic "^1.2.2" - has-symbols "^1.0.3" - isarray "^2.0.5" +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +glob-parent@^5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-regex-test@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.2.tgz#3ba32bdb3ea35f940ee87e5087c60ee786c3f6c5" - integrity sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ== + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: - call-bind "^1.0.5" - get-intrinsic "^1.2.2" - is-regex "^1.1.4" - -"safer-buffer@>= 2.1.2 < 3": - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + is-glob "^4.0.1" -scheduler@^0.19.1: - version "0.19.1" - resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.19.1.tgz#4f3e2ed2c1a7d65681f4c854fa8c5a1ccb40f196" - integrity sha512-n/zwRWRYSUj0/3g/otKDRPMh6qv2SYMWNq85IEa8iZyAv8od9zDYpGSnpBEjNgcMNq6Scbu5KfIPxNF72R/2EA== +glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" + is-glob "^4.0.3" -schema-utils@^2.5.0, schema-utils@^2.6.5, schema-utils@^2.7.0, schema-utils@^2.7.1: - version "2.7.1" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" - integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== +glob@^7.1.3: + version "7.2.3" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: - "@types/json-schema" "^7.0.5" - ajv "^6.12.4" - ajv-keywords "^3.5.2" + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" -schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" - integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== - dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" - -schema-utils@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" - integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== - dependencies: - "@types/json-schema" "^7.0.9" - ajv "^8.9.0" - ajv-formats "^2.1.1" - ajv-keywords "^5.1.0" +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -seek-bzip@^1.0.5: - version "1.0.6" - resolved "https://registry.yarnpkg.com/seek-bzip/-/seek-bzip-1.0.6.tgz#35c4171f55a680916b52a07859ecf3b5857f21c4" - integrity sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ== +globals@^13.19.0: + version "13.24.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" + integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== dependencies: - commander "^2.8.1" - -select-hose@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== + type-fest "^0.20.2" -selfsigned@^2.1.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.4.1.tgz#560d90565442a3ed35b674034cec4e95dceb4ae0" - integrity sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q== +globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: - "@types/node-forge" "^1.3.0" - node-forge "^1" + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" -semver-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/semver-regex/-/semver-regex-2.0.0.tgz#a93c2c5844539a770233379107b38c7b4ac9d338" - integrity sha512-mUdIBBvdn0PLOeP3TEkMH7HHeUP3GjsXCwKarjv/kGmUFOYg1VqEemKhoQpWMu6X2I8kHeuVdGibLGkVK+/5Qw== +graphemer@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" + integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -semver-truncate@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/semver-truncate/-/semver-truncate-1.1.2.tgz#57f41de69707a62709a7e0104ba2117109ea47e8" - integrity sha512-V1fGg9i4CL3qesB6U0L6XAm4xOJiHmt4QAacazumuasc03BvtFGIMCduv01JWQ69Nv+JST9TqhSCiJoxoY031w== - dependencies: - semver "^5.3.0" +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== -semver@^5.3.0, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: - version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" - integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== -semver@^6.0.0, semver@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== +ignore@^5.2.0, ignore@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" + integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw== -semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: - version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" - integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== +import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== dependencies: - lru-cache "^6.0.0" + parent-module "^1.0.0" + resolve-from "^4.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== - dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" - -serialize-javascript@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-5.0.1.tgz#7886ec848049a462467a97d3d918ebb2aaf934f4" - integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== - dependencies: - randombytes "^2.1.0" +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== -serialize-javascript@^6.0.1: - version "6.0.2" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" - integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: - randombytes "^2.1.0" + once "^1.3.0" + wrappy "1" -serve-index@^1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" - integrity sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw== - dependencies: - accepts "~1.3.4" - batch "0.6.1" - debug "2.6.9" - escape-html "~1.0.3" - http-errors "~1.6.2" - mime-types "~2.1.17" - parseurl "~1.3.2" - -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.18.0" +inherits@2: + version "2.0.4" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -set-function-length@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.0.tgz#2f81dc6c16c7059bda5ab7c82c11f03a515ed8e1" - integrity sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w== - dependencies: - define-data-property "^1.1.1" - function-bind "^1.1.2" - get-intrinsic "^1.2.2" - gopd "^1.0.1" - has-property-descriptors "^1.0.1" +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== -set-function-name@^2.0.0, set-function-name@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.1.tgz#12ce38b7954310b9f61faa12701620a0c882793a" - integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA== +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: + version "4.0.3" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: - define-data-property "^1.0.1" - functions-have-names "^1.2.3" - has-property-descriptors "^1.0.0" - -setprototypeof@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== - -setprototypeof@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + is-extglob "^2.1.1" -shallow-clone@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" - integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== - dependencies: - kind-of "^6.0.2" +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg== - dependencies: - shebang-regex "^1.0.0" +is-path-inside@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== -shebang-command@^2.0.0: +isexe@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== -shell-quote@^1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" - integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" + argparse "^2.0.1" -signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: - version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== -slice-ansi@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" - integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ== - dependencies: - ansi-styles "^3.2.0" - astral-regex "^1.0.0" - is-fullwidth-code-point "^2.0.0" - -sockjs@^0.3.24: - version "0.3.24" - resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" - integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== - dependencies: - faye-websocket "^0.11.3" - uuid "^8.3.2" - websocket-driver "^0.7.4" +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== -sort-keys-length@^1.0.0: +json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/sort-keys-length/-/sort-keys-length-1.0.1.tgz#9cb6f4f4e9e48155a6aa0671edd336ff1479a188" - integrity sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw== - dependencies: - sort-keys "^1.0.0" - -sort-keys@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" - integrity sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg== - dependencies: - is-plain-obj "^1.0.0" - -sort-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" - integrity sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg== - dependencies: - is-plain-obj "^1.0.0" - -source-list-map@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - -source-map-js@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" - integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== - -source-map-support@~0.5.20: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.7.3, source-map@^0.7.4: - version "0.7.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" - integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== - -source-map@~0.5.0: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== - -spdy-transport@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" - integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== - dependencies: - debug "^4.1.0" - detect-node "^2.0.4" - hpack.js "^2.1.6" - obuf "^1.1.2" - readable-stream "^3.0.6" - wbuf "^1.7.3" - -spdy@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" - integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== - dependencies: - debug "^4.1.0" - handle-thing "^2.0.0" - http-deceiver "^1.2.7" - select-hose "^2.0.0" - spdy-transport "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -ssri@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af" - integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== - dependencies: - minipass "^3.1.1" - -stable@^0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== - -strict-uri-encode@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" - integrity sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ== - -string-width@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" + resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== -string-width@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - -string.prototype.matchall@^4.0.8: - version "4.0.10" - resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.10.tgz#a1553eb532221d4180c51581d6072cd65d1ee100" - integrity sha512-rGXbGmOEosIQi6Qva94HUjgPs9vKW+dkG7Y8Q5O2OYkWL6wFaTRZO8zM4mhP94uX55wgyrXzfS2aGtGzUL7EJQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - has-symbols "^1.0.3" - internal-slot "^1.0.5" - regexp.prototype.flags "^1.5.0" - set-function-name "^2.0.0" - side-channel "^1.0.4" - -string.prototype.trim@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd" - integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -string.prototype.trimend@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz#1bb3afc5008661d73e2dc015cd4853732d6c471e" - integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -string.prototype.trimstart@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298" - integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" +json5@^2.2.3: + version "2.2.3" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== +keyv@^4.5.3: + version "4.5.4" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== dependencies: - safe-buffer "~5.2.0" + json-buffer "3.0.1" -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: - safe-buffer "~5.1.0" + prelude-ls "^1.2.1" + type-check "~0.4.0" -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow== +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: - ansi-regex "^3.0.0" + p-locate "^5.0.0" -strip-ansi@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== - dependencies: - ansi-regex "^4.1.0" +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== +loose-envify@^1.1.0: + version "1.4.0" + resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: - ansi-regex "^5.0.1" + js-tokens "^3.0.0 || ^4.0.0" -strip-dirs@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/strip-dirs/-/strip-dirs-2.1.0.tgz#4987736264fc344cf20f6c34aca9d13d1d4ed6c5" - integrity sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g== +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: - is-natural-number "^4.0.1" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + yallist "^3.0.2" -strip-json-comments@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== -strip-outer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/strip-outer/-/strip-outer-1.0.1.tgz#b2fd2abf6604b9d1e6013057195df836b8a9d631" - integrity sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg== +micromatch@^4.0.4: + version "4.0.7" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz" + integrity sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q== dependencies: - escape-string-regexp "^1.0.2" + braces "^3.0.3" + picomatch "^2.3.1" -strnum@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" - integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -style-loader@^1.1.3: - version "1.3.0" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.3.0.tgz#828b4a3b3b7e7aa5847ce7bae9e874512114249e" - integrity sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q== +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: - loader-utils "^2.0.0" - schema-utils "^2.7.0" + mime-db "1.52.0" -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== +minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: - has-flag "^3.0.0" + brace-expansion "^1.1.7" -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== +minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== dependencies: - has-flag "^4.0.0" + brace-expansion "^2.0.1" -supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" +ms@2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +nanoid@^3.3.7: + version "3.3.7" + resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz" + integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== -svgo@^2.1.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" - integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== - dependencies: - "@trysound/sax" "0.2.0" - commander "^7.2.0" - css-select "^4.1.3" - css-tree "^1.1.3" - csso "^4.2.0" - picocolors "^1.0.0" - stable "^0.1.8" +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== -table@^5.2.3: - version "5.4.6" - resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" - integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug== - dependencies: - ajv "^6.10.2" - lodash "^4.17.14" - slice-ansi "^2.1.0" - string-width "^3.0.0" - -tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" - integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== - -tar-stream@^1.5.2: - version "1.6.2" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" - integrity sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A== - dependencies: - bl "^1.0.0" - buffer-alloc "^1.2.0" - end-of-stream "^1.0.0" - fs-constants "^1.0.0" - readable-stream "^2.3.0" - to-buffer "^1.1.1" - xtend "^4.0.0" - -tar@^6.0.2: - version "6.2.0" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.0.tgz#b14ce49a79cb1cd23bc9b016302dea5474493f73" - integrity sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^5.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - -temp-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" - integrity sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ== +node-releases@^2.0.14: + version "2.0.18" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== -tempfile@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/tempfile/-/tempfile-2.0.0.tgz#6b0446856a9b1114d1856ffcbe509cccb0977265" - integrity sha512-ZOn6nJUgvgC09+doCEF3oB+r3ag7kUvlsXEGX069QRD60p+P3uP7XG9N2/at+EyIRGSN//ZY3LyEotA1YpmjuA== +once@^1.3.0: + version "1.4.0" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: - temp-dir "^1.0.0" - uuid "^3.0.1" + wrappy "1" -terser-webpack-plugin@^5.3.7: - version "5.3.10" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" - integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== - dependencies: - "@jridgewell/trace-mapping" "^0.3.20" - jest-worker "^27.4.5" - schema-utils "^3.1.1" - serialize-javascript "^6.0.1" - terser "^5.26.0" - -terser@^5.10.0, terser@^5.26.0: - version "5.27.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.27.0.tgz#70108689d9ab25fef61c4e93e808e9fd092bf20c" - integrity sha512-bi1HRwVRskAjheeYl291n3JC4GgO/Ty4z1nVs5AAsmonJulGxpSektecnNedrwK9C7vpvVtcX3cw00VSLt7U2A== +optionator@^0.9.3: + version "0.9.4" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" + integrity sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g== dependencies: - "@jridgewell/source-map" "^0.3.3" - acorn "^8.8.2" - commander "^2.20.0" - source-map-support "~0.5.20" + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.5" -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== +p-limit@^3.0.2: + version "3.1.0" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" -through@^2.3.6, through@^2.3.8, through@~2.3.6: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" -thunky@^1.0.2: - version "1.1.0" - resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" - integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== - -timed-out@^4.0.0, timed-out@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" - integrity sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA== - -tiny-invariant@^1.0.2: - version "1.3.1" - resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.3.1.tgz#8560808c916ef02ecfd55e66090df23a4b7aa642" - integrity sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw== - -tiny-warning@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" - integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== - -tmp@^0.0.33: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: - os-tmpdir "~1.0.2" + callsites "^3.0.0" -to-buffer@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80" - integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg== +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -toidentifier@1.0.1: +picocolors@^1.0.0, picocolors@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz" + integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== -trim-repeated@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-repeated/-/trim-repeated-1.0.0.tgz#e3646a2ea4e891312bf7eace6cfb05380bc01c21" - integrity sha512-pkonvlKk8/ZuR0D5tLW8ljt5I8kmxp2XKymhepUeOdCEfKpZaktSArkLHZt76OB1ZvO9bssUsDty4SWhLvZpLg== - dependencies: - escape-string-regexp "^1.0.2" +picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -ts-loader@^9.0.0: - version "9.5.1" - resolved "https://registry.yarnpkg.com/ts-loader/-/ts-loader-9.5.1.tgz#63d5912a86312f1fbe32cef0859fb8b2193d9b89" - integrity sha512-rNH3sK9kGZcH9dYzC7CewQm4NtxJTjSEVRJ2DyBZR7f8/wcta+iV44UPCXc5+nzDzivKtlzV6c9P4e+oFhDLYg== +postcss@^8.4.39: + version "8.4.40" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.40.tgz#eb81f2a4dd7668ed869a6db25999e02e9ad909d8" + integrity sha512-YF2kKIUzAofPMpfH6hOi2cGnv/HrUlfucspc7pDyvv7kGdqXrfj8SCl/t8owkEgKEuu8ZcRjSOxFxVLqwChZ2Q== dependencies: - chalk "^4.1.0" - enhanced-resolve "^5.0.0" - micromatch "^4.0.0" - semver "^7.3.4" - source-map "^0.7.4" - -tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tslib@^2.0.3: - version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" - integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== - -tsutils@^3.17.1: - version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" + nanoid "^3.3.7" + picocolors "^1.0.1" + source-map-js "^1.2.0" -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== - dependencies: - prelude-ls "~1.1.2" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== -type-fest@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1" - integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ== +punycode@^2.1.0: + version "2.3.1" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== -type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== -typed-array-buffer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60" - integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw== +react-dom@^18.3.1: + version "18.3.1" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.3.1.tgz#c2265d79511b57d479b3dd3fdfa51536494c5cb4" + integrity sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw== dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.1" - is-typed-array "^1.1.10" + loose-envify "^1.1.0" + scheduler "^0.23.2" -typed-array-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" - integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== - dependencies: - call-bind "^1.0.2" - for-each "^0.3.3" - has-proto "^1.0.1" - is-typed-array "^1.1.10" +react-refresh@^0.14.2: + version "0.14.2" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.14.2.tgz#3833da01ce32da470f1f936b9d477da5c7028bf9" + integrity sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA== -typed-array-byte-offset@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" - integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== +react@^18.3.1: + version "18.3.1" + resolved "https://registry.yarnpkg.com/react/-/react-18.3.1.tgz#49ab892009c53933625bd16b2533fc754cab2891" + integrity sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ== dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" - for-each "^0.3.3" - has-proto "^1.0.1" - is-typed-array "^1.1.10" + loose-envify "^1.1.0" -typed-array-length@^1.0.4: +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +reusify@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" - integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== + resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: - call-bind "^1.0.2" - for-each "^0.3.3" - is-typed-array "^1.1.9" - -typescript@^4.8.3: - version "4.9.5" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" - integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== - -uglify-js@3.4.x: - version "3.4.10" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.4.10.tgz#9ad9563d8eb3acdfb8d38597d2af1d815f6a755f" - integrity sha512-Y2VsbPVs0FIshJztycsO2SfPk7/KAF/T72qzv9u5EpQ4kB2hQoHlhNQTsNyy6ul7lQtqJN/AoWeS23OzEiEFxw== + glob "^7.1.3" + +rollup@^4.13.0: + version "4.19.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.19.0.tgz#83b08cc0b2bc38c26c194cb7f2cdabd84a2a8c02" + integrity sha512-5r7EYSQIowHsK4eTZ0Y81qpZuJz+MUuYeqmmYmRMl1nwhdmbiYqt5jwzf6u7wyOzJgYqtCRMtVRKOtHANBz7rA== dependencies: - commander "~2.19.0" - source-map "~0.6.1" + "@types/estree" "1.0.5" + optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.19.0" + "@rollup/rollup-android-arm64" "4.19.0" + "@rollup/rollup-darwin-arm64" "4.19.0" + "@rollup/rollup-darwin-x64" "4.19.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.19.0" + "@rollup/rollup-linux-arm-musleabihf" "4.19.0" + "@rollup/rollup-linux-arm64-gnu" "4.19.0" + "@rollup/rollup-linux-arm64-musl" "4.19.0" + "@rollup/rollup-linux-powerpc64le-gnu" "4.19.0" + "@rollup/rollup-linux-riscv64-gnu" "4.19.0" + "@rollup/rollup-linux-s390x-gnu" "4.19.0" + "@rollup/rollup-linux-x64-gnu" "4.19.0" + "@rollup/rollup-linux-x64-musl" "4.19.0" + "@rollup/rollup-win32-arm64-msvc" "4.19.0" + "@rollup/rollup-win32-ia32-msvc" "4.19.0" + "@rollup/rollup-win32-x64-msvc" "4.19.0" + fsevents "~2.3.2" -unbox-primitive@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" - integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: - call-bind "^1.0.2" - has-bigints "^1.0.2" - has-symbols "^1.0.3" - which-boxed-primitive "^1.0.2" - -unbzip2-stream@^1.0.9: - version "1.4.3" - resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" - integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== + queue-microtask "^1.2.2" + +scheduler@^0.23.2: + version "0.23.2" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.2.tgz#414ba64a3b282892e944cf2108ecc078d115cdc3" + integrity sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ== dependencies: - buffer "^5.2.1" - through "^2.3.8" + loose-envify "^1.1.0" -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -unicode-canonical-property-names-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" - integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== +semver@^7.6.0: + version "7.6.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" + integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== -unicode-match-property-ecmascript@^2.0.0: +shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" - integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: - unicode-canonical-property-names-ecmascript "^2.0.0" - unicode-property-aliases-ecmascript "^2.0.0" - -unicode-match-property-value-ecmascript@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" - integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== - -unicode-property-aliases-ecmascript@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" - integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + shebang-regex "^3.0.0" -unique-filename@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" - integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== - dependencies: - unique-slug "^2.0.0" +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -unique-slug@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" - integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== - dependencies: - imurmurhash "^0.1.4" +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== +source-map-js@^1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz" + integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg== -update-browserslist-db@^1.0.13: - version "1.0.13" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" - integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== +strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" + ansi-regex "^5.0.1" -upper-case@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-1.1.3.tgz#f6b4501c2ec4cdd26ba78be7222961de77621598" - integrity sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA== +strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: - punycode "^2.1.0" + has-flag "^3.0.0" -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - integrity sha512-BVA4lR5PIviy2PMseNd2jbFQ+jwSwQGdJejf5ctd1rEXt0Ypd7yanUK9+lYechVlN5VaTJGsu2U/3MDDu6KgBA== +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: - prepend-http "^1.0.1" + has-flag "^4.0.0" -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ== - dependencies: - prepend-http "^2.0.0" +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== -url-to-options@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9" - integrity sha512-0kQLIzG4fdk/G5NONku64rSH/x32NOA39LVQqlK8Le6lvTF6GGRJpqaQFGgU+CLwySIqBSMdwYM0sYcW9f6P4A== +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== -use-clipboard-copy@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/use-clipboard-copy/-/use-clipboard-copy-0.1.2.tgz#83b16292dfa8ea262be714252022a8b4ad1c28c5" - integrity sha512-EkauxqyX+us4+Mfif/f61ew89EAOWIArqFpHR0jSG4SwwuDZzDAOeqO7gkK0vi+DQVADeB1RB3xqU3U0oOO3NQ== +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: - clipboard-copy "^3.0.0" + is-number "^7.0.0" -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== +ts-api-utils@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" + integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== -utila@~0.4: +type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA== - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - -uuid@^3.0.1: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - -uuid@^8.3.2: - version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== - -value-equal@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" - integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== - -vary@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== - -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: - glob-to-regexp "^0.4.1" - graceful-fs "^4.1.2" + prelude-ls "^1.2.1" -wbuf@^1.1.0, wbuf@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" - integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== - dependencies: - minimalistic-assert "^1.0.0" - -webpack-cli@^4.6.0: - version "4.10.0" - resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-4.10.0.tgz#37c1d69c8d85214c5a65e589378f53aec64dab31" - integrity sha512-NLhDfH/h4O6UOy+0LSso42xvYypClINuMNBVVzX4vX98TmTaTUxwRbXdhucbFMd2qLaCTcLq/PdYrvi8onw90w== - dependencies: - "@discoveryjs/json-ext" "^0.5.0" - "@webpack-cli/configtest" "^1.2.0" - "@webpack-cli/info" "^1.5.0" - "@webpack-cli/serve" "^1.7.0" - colorette "^2.0.14" - commander "^7.0.0" - cross-spawn "^7.0.3" - fastest-levenshtein "^1.0.12" - import-local "^3.0.2" - interpret "^2.2.0" - rechoir "^0.7.0" - webpack-merge "^5.7.3" - -webpack-dev-middleware@^5.3.1: - version "5.3.3" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" - integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== - dependencies: - colorette "^2.0.10" - memfs "^3.4.3" - mime-types "^2.1.31" - range-parser "^1.2.1" - schema-utils "^4.0.0" - -webpack-dev-server@^4.5.0: - version "4.15.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz#8944b29c12760b3a45bdaa70799b17cb91b03df7" - integrity sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA== - dependencies: - "@types/bonjour" "^3.5.9" - "@types/connect-history-api-fallback" "^1.3.5" - "@types/express" "^4.17.13" - "@types/serve-index" "^1.9.1" - "@types/serve-static" "^1.13.10" - "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.5" - ansi-html-community "^0.0.8" - bonjour-service "^1.0.11" - chokidar "^3.5.3" - colorette "^2.0.10" - compression "^1.7.4" - connect-history-api-fallback "^2.0.0" - default-gateway "^6.0.3" - express "^4.17.3" - graceful-fs "^4.2.6" - html-entities "^2.3.2" - http-proxy-middleware "^2.0.3" - ipaddr.js "^2.0.1" - launch-editor "^2.6.0" - open "^8.0.9" - p-retry "^4.5.0" - rimraf "^3.0.2" - schema-utils "^4.0.0" - selfsigned "^2.1.1" - serve-index "^1.9.1" - sockjs "^0.3.24" - spdy "^4.0.2" - webpack-dev-middleware "^5.3.1" - ws "^8.13.0" - -webpack-merge@^5.7.0, webpack-merge@^5.7.3: - version "5.10.0" - resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.10.0.tgz#a3ad5d773241e9c682803abf628d4cd62b8a4177" - integrity sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA== - dependencies: - clone-deep "^4.0.1" - flat "^5.0.2" - wildcard "^2.0.0" - -webpack-sources@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" - integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -webpack-sources@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" - integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== - -webpack@^5.61.0: - version "5.89.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.89.0.tgz#56b8bf9a34356e93a6625770006490bf3a7f32dc" - integrity sha512-qyfIC10pOr70V+jkmud8tMfajraGCZMBWJtrmuBymQKCrLTRejBI8STDp1MCyZu/QTdZSeacCQYpYNQVOzX5kw== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^1.0.0" - "@webassemblyjs/ast" "^1.11.5" - "@webassemblyjs/wasm-edit" "^1.11.5" - "@webassemblyjs/wasm-parser" "^1.11.5" - acorn "^8.7.1" - acorn-import-assertions "^1.9.0" - browserslist "^4.14.5" - chrome-trace-event "^1.0.2" - enhanced-resolve "^5.15.0" - es-module-lexer "^1.2.1" - eslint-scope "5.1.1" - events "^3.2.0" - glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" - json-parse-even-better-errors "^2.3.1" - loader-runner "^4.2.0" - mime-types "^2.1.27" - neo-async "^2.6.2" - schema-utils "^3.2.0" - tapable "^2.1.1" - terser-webpack-plugin "^5.3.7" - watchpack "^2.4.0" - webpack-sources "^3.2.3" - -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: - version "0.7.4" - resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" - integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== - dependencies: - http-parser-js ">=0.5.1" - safe-buffer ">=5.1.0" - websocket-extensions ">=0.1.1" +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== -websocket-extensions@>=0.1.1: - version "0.1.4" - resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" - integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== +typescript@^5.2.2: + version "5.5.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.4.tgz#d9852d6c82bad2d2eda4fd74a5762a8f5909e9ba" + integrity sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q== -which-boxed-primitive@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== +update-browserslist-db@^1.1.0: + version "1.1.0" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz" + integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" + escalade "^3.1.2" + picocolors "^1.0.1" -which-builtin-type@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/which-builtin-type/-/which-builtin-type-1.1.3.tgz#b1b8443707cc58b6e9bf98d32110ff0c2cbd029b" - integrity sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw== - dependencies: - function.prototype.name "^1.1.5" - has-tostringtag "^1.0.0" - is-async-function "^2.0.0" - is-date-object "^1.0.5" - is-finalizationregistry "^1.0.2" - is-generator-function "^1.0.10" - is-regex "^1.1.4" - is-weakref "^1.0.2" - isarray "^2.0.5" - which-boxed-primitive "^1.0.2" - which-collection "^1.0.1" - which-typed-array "^1.1.9" - -which-collection@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" - integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== - dependencies: - is-map "^2.0.1" - is-set "^2.0.1" - is-weakmap "^2.0.1" - is-weakset "^2.0.1" - -which-typed-array@^1.1.11, which-typed-array@^1.1.13, which-typed-array@^1.1.9: - version "1.1.13" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" - integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.4" - for-each "^0.3.3" - gopd "^1.0.1" - has-tostringtag "^1.0.0" - -which@^1.2.9: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + punycode "^2.1.0" + +vite@^5.3.4: + version "5.3.5" + resolved "https://registry.yarnpkg.com/vite/-/vite-5.3.5.tgz#b847f846fb2b6cb6f6f4ed50a830186138cb83d8" + integrity sha512-MdjglKR6AQXQb9JGiS7Rc2wC6uMjcm7Go/NHNO63EwiJXfuk9PgqiP/n5IDJCziMkfw9n4Ubp7lttNwz+8ZVKA== dependencies: - isexe "^2.0.0" + esbuild "^0.21.3" + postcss "^8.4.39" + rollup "^4.13.0" + optionalDependencies: + fsevents "~2.3.3" which@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" -wildcard@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67" - integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ== - -word-wrap@~1.2.3: +word-wrap@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== wrappy@1: version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== -write@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3" - integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig== - dependencies: - mkdirp "^0.5.1" - -ws@^8.13.0: - version "8.16.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.16.0.tgz#d1cd774f36fbc07165066a60e40323eab6446fd4" - integrity sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ== - -xtend@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A== - yallist@^3.0.2: version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yauzl@^2.4.2: - version "2.10.0" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" - integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== - dependencies: - buffer-crc32 "~0.2.3" - fd-slicer "~1.1.0" - yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== From 7195c2ac571b3af1279ee21c7d18d04b7fec656a Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 29 Jul 2024 10:11:57 +0200 Subject: [PATCH 093/210] CH-17 update codefresh pipeline --- .../codefresh-template-dev.yaml | 20 ++++++++-------- .../codefresh-template-prod.yaml | 19 +++++++-------- .../codefresh-template-stage.yaml | 24 +++++++++---------- .../codefresh-template-test.yaml | 2 +- deployment/codefresh-test.yaml | 16 +++++++++++-- 5 files changed, 45 insertions(+), 36 deletions(-) diff --git a/deployment-configuration/codefresh-template-dev.yaml b/deployment-configuration/codefresh-template-dev.yaml index e11bcf6d..7673ff53 100644 --- a/deployment-configuration/codefresh-template-dev.yaml +++ b/deployment-configuration/codefresh-template-dev.yaml @@ -1,4 +1,4 @@ -version: '1.0' +version: "1.0" stages: - prepare - build @@ -11,8 +11,8 @@ steps: title: Clone main repository type: git-clone stage: prepare - repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' - revision: '${{CF_BRANCH}}' + repo: "${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}" + revision: "${{CF_BRANCH}}" git: github post_main_clone: title: Post main clone @@ -22,13 +22,13 @@ steps: - title: Cloning cloud-harness repository... type: git-clone stage: prepare - repo: 'https://github.com/MetaCell/cloud-harness.git' - revision: '${{CLOUDHARNESS_BRANCH}}' + repo: "https://github.com/MetaCell/cloud-harness.git" + revision: "${{CLOUDHARNESS_BRANCH}}" working_directory: . git: github prepare_deployment: title: "Prepare helm chart" - image: python:3.9.10 + image: python:3.12 stage: prepare working_directory: . commands: @@ -38,13 +38,13 @@ steps: - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: commands: - - 'helm template ./deployment/helm --debug -n ${{NAMESPACE}}' + - "helm template ./deployment/helm --debug -n ${{NAMESPACE}}" environment: - ACTION=auth - KUBE_CONTEXT=${{NAMESPACE}} image: codefresh/cfstep-helm:3.6.2 stage: prepare - title: 'View helm chart' + title: "View helm chart" build_base_images: title: Build base images type: parallel @@ -77,7 +77,7 @@ steps: chart_version: ${{CF_BUILD_ID}} cmd_ps: --wait --timeout 600s --create-namespace custom_value_files: - - ./deployment/helm/values.yaml + - ./deployment/helm/values.yaml build_test_images: title: Build test images type: parallel @@ -141,4 +141,4 @@ steps: when: condition: all: - whenVarExists: 'includes("${{DEPLOYMENT_PUBLISH_TAG}}", "{{DEPLOYMENT_PUBLISH_TAG}}") == false' \ No newline at end of file + whenVarExists: 'includes("${{DEPLOYMENT_PUBLISH_TAG}}", "{{DEPLOYMENT_PUBLISH_TAG}}") == false' diff --git a/deployment-configuration/codefresh-template-prod.yaml b/deployment-configuration/codefresh-template-prod.yaml index ce5ed02c..ab54a186 100644 --- a/deployment-configuration/codefresh-template-prod.yaml +++ b/deployment-configuration/codefresh-template-prod.yaml @@ -1,4 +1,4 @@ -version: '1.0' +version: "1.0" stages: - prepare - deploy @@ -7,8 +7,8 @@ steps: title: Clone main repository type: git-clone stage: prepare - repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' - revision: '${{CF_BRANCH}}' + repo: "${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}" + revision: "${{CF_BRANCH}}" git: github post_main_clone: title: Post main clone @@ -18,13 +18,13 @@ steps: - title: Cloning cloud-harness repository... type: git-clone stage: prepare - repo: 'https://github.com/MetaCell/cloud-harness.git' - revision: '${{CLOUDHARNESS_BRANCH}}' + repo: "https://github.com/MetaCell/cloud-harness.git" + revision: "${{CLOUDHARNESS_BRANCH}}" working_directory: . git: github prepare_deployment: title: "Prepare helm chart" - image: python:3.9.10 + image: python:3.12 stage: prepare working_directory: . commands: @@ -32,13 +32,13 @@ steps: - harness-deployment $PATHS -t ${{DEPLOYMENT_TAG}} -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -n ${{NAMESPACE}} -e $ENV $PARAMS prepare_deployment_view: commands: - - 'helm template ./deployment/helm --debug -n ${{NAMESPACE}}' + - "helm template ./deployment/helm --debug -n ${{NAMESPACE}}" environment: - ACTION=auth - KUBE_CONTEXT=${{NAMESPACE}} image: codefresh/cfstep-helm:3.6.2 stage: prepare - title: 'View helm chart' + title: "View helm chart" deployment: stage: deploy type: helm @@ -53,5 +53,4 @@ steps: chart_version: ${{DEPLOYMENT_TAG}} cmd_ps: --wait --timeout 600s --create-namespace custom_value_files: - - ./deployment/helm/values.yaml - + - ./deployment/helm/values.yaml diff --git a/deployment-configuration/codefresh-template-stage.yaml b/deployment-configuration/codefresh-template-stage.yaml index 7a1ff574..894d130c 100644 --- a/deployment-configuration/codefresh-template-stage.yaml +++ b/deployment-configuration/codefresh-template-stage.yaml @@ -1,4 +1,4 @@ -version: '1.0' +version: "1.0" stages: - prepare - build @@ -10,8 +10,8 @@ steps: title: Clone main repository type: git-clone stage: prepare - repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' - revision: '${{CF_BRANCH}}' + repo: "${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}" + revision: "${{CF_BRANCH}}" git: github post_main_clone: title: Post main clone @@ -21,27 +21,27 @@ steps: - title: Cloning cloud-harness repository... type: git-clone stage: prepare - repo: 'https://github.com/MetaCell/cloud-harness.git' - revision: '${{CLOUDHARNESS_BRANCH}}' + repo: "https://github.com/MetaCell/cloud-harness.git" + revision: "${{CLOUDHARNESS_BRANCH}}" working_directory: . git: github prepare_deployment: title: "Prepare helm chart" - image: python:3.9.10 + image: python:3.12 stage: prepare working_directory: . commands: - bash cloud-harness/install.sh - - harness-deployment $PATHS -t ${{DEPLOYMENT_TAG}} -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -n ${{NAMESPACE}} -e $ENV $PARAMS + - harness-deployment $PATHS -t ${{DEPLOYMENT_TAG}} -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -n ${{NAMESPACE}} -e $ENV $PARAMS prepare_deployment_view: commands: - - 'helm template ./deployment/helm --debug -n ${{NAMESPACE}}' + - "helm template ./deployment/helm --debug -n ${{NAMESPACE}}" environment: - ACTION=auth - KUBE_CONTEXT=${{NAMESPACE}} image: codefresh/cfstep-helm:3.6.2 stage: prepare - title: 'View helm chart' + title: "View helm chart" deployment: stage: deploy type: helm @@ -56,7 +56,7 @@ steps: chart_version: ${{DEPLOYMENT_TAG}} cmd_ps: --wait --timeout 600s --create-namespace custom_value_files: - - ./deployment/helm/values.yaml + - ./deployment/helm/values.yaml build_test_images: title: Build test images type: parallel @@ -118,8 +118,7 @@ steps: publish: type: parallel stage: publish - steps: - REPLACE_ME + steps: REPLACE_ME when: condition: all: @@ -140,4 +139,3 @@ steps: all: whenVarExists: 'includes("${{DEPLOYMENT_PUBLISH_TAG}}", "{{DEPLOYMENT_PUBLISH_TAG}}") == false' whenVarExists2: 'includes("${{REPO_TOKEN}}", "{{REPO_TOKEN}}") == false' - diff --git a/deployment-configuration/codefresh-template-test.yaml b/deployment-configuration/codefresh-template-test.yaml index bfaee7ad..9139211e 100644 --- a/deployment-configuration/codefresh-template-test.yaml +++ b/deployment-configuration/codefresh-template-test.yaml @@ -27,7 +27,7 @@ steps: git: github prepare_deployment: title: "Prepare helm chart" - image: python:3.9.10 + image: python:3.12 stage: prepare working_directory: . commands: diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml index 672a2b26..410925a5 100644 --- a/deployment/codefresh-test.yaml +++ b/deployment/codefresh-test.yaml @@ -13,13 +13,25 @@ steps: repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' revision: '${{CF_BRANCH}}' git: github + post_main_clone: + title: Post main clone + type: parallel + stage: prepare + steps: + - title: Cloning cloud-harness repository... + type: git-clone + stage: prepare + repo: https://github.com/MetaCell/cloud-harness.git + revision: '${{CLOUDHARNESS_BRANCH}}' + working_directory: . + git: github prepare_deployment: title: Prepare helm chart - image: python:3.9.10 + image: python:3.12 stage: prepare working_directory: . commands: - - bash install.sh + - bash cloud-harness/install.sh - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -e test --write-env -N -i samples - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export From 8b5faa42dbff67be9d2a8a38056ef8a619d1510a Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 29 Jul 2024 10:17:40 +0200 Subject: [PATCH 094/210] Fix test pipeline --- deployment/codefresh-test.yaml | 414 +++++++++++++++++---------------- 1 file changed, 217 insertions(+), 197 deletions(-) diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml index 410925a5..935d508e 100644 --- a/deployment/codefresh-test.yaml +++ b/deployment/codefresh-test.yaml @@ -1,47 +1,35 @@ -version: '1.0' +version: "1.0" stages: -- prepare -- build -- unittest -- deploy -- qa + - prepare + - build + - unittest + - deploy + - qa steps: main_clone: title: Clone main repository type: git-clone stage: prepare - repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' - revision: '${{CF_BRANCH}}' + repo: "${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}" + revision: "${{CF_BRANCH}}" git: github - post_main_clone: - title: Post main clone - type: parallel - stage: prepare - steps: - - title: Cloning cloud-harness repository... - type: git-clone - stage: prepare - repo: https://github.com/MetaCell/cloud-harness.git - revision: '${{CLOUDHARNESS_BRANCH}}' - working_directory: . - git: github prepare_deployment: title: Prepare helm chart image: python:3.12 stage: prepare working_directory: . commands: - - bash cloud-harness/install.sh - - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}} - -rs ${{REGISTRY_SECRET}} -e test --write-env -N -i samples - - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export - - cat ${{CF_VOLUME_PATH}}/env_vars_to_export + - bash install.sh + - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}} + -rs ${{REGISTRY_SECRET}} -e test --write-env -N -i samples + - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export + - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: commands: - - helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} + - helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} environment: - - ACTION=auth - - KUBE_CONTEXT=test-${{CF_BUILD_ID}} + - ACTION=auth + - KUBE_CONTEXT=test-${{CF_BUILD_ID}} image: codefresh/cfstep-helm:3.6.2 stage: prepare title: View helm chart @@ -54,43 +42,47 @@ steps: type: build stage: build dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/cloudharness-base title: Cloudharness base working_directory: ./. - tag: '${{CLOUDHARNESS_BASE_TAG}}' + tag: "${{CLOUDHARNESS_BASE_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}') == false cloudharness-frontend-build: type: build stage: build dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/cloudharness-frontend-build title: Cloudharness frontend build working_directory: ./. - tag: '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' + tag: "${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}', + buildDoesNotExist: + includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}', '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}', + forceNoCache: + includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false build_static_images: title: Build static images @@ -101,23 +93,25 @@ steps: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/cloudharness-flask title: Cloudharness flask working_directory: ./infrastructure/common-images/cloudharness-flask - tag: '${{CLOUDHARNESS_FLASK_TAG}}' + tag: "${{CLOUDHARNESS_FLASK_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}') == true - forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}') == false build_application_images: type: parallel @@ -127,241 +121,263 @@ steps: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/nfsserver title: Nfsserver working_directory: ./applications/nfsserver - tag: '${{NFSSERVER_TAG}}' + tag: "${{NFSSERVER_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{NFSSERVER_TAG_EXISTS}}', '{{NFSSERVER_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{NFSSERVER_TAG_EXISTS}}', '{{NFSSERVER_TAG_EXISTS}}') == true - forceNoCache: includes('${{NFSSERVER_TAG_FORCE_BUILD}}', '{{NFSSERVER_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{NFSSERVER_TAG_FORCE_BUILD}}', '{{NFSSERVER_TAG_FORCE_BUILD}}') == false accounts: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/accounts title: Accounts working_directory: ./applications/accounts - tag: '${{ACCOUNTS_TAG}}' + tag: "${{ACCOUNTS_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') == true - forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') == false samples: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} + - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} image_name: cloudharness/samples title: Samples working_directory: ./applications/samples - tag: '${{SAMPLES_TAG}}' + tag: "${{SAMPLES_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}') == false samples-print-file: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/samples-print-file title: Samples print file working_directory: ./applications/samples/tasks/print-file - tag: '${{SAMPLES_PRINT_FILE_TAG}}' + tag: "${{SAMPLES_PRINT_FILE_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}') == false samples-secret: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/samples-secret title: Samples secret working_directory: ./applications/samples/tasks/secret - tag: '${{SAMPLES_SECRET_TAG}}' + tag: "${{SAMPLES_SECRET_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}') == false samples-sum: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/samples-sum title: Samples sum working_directory: ./applications/samples/tasks/sum - tag: '${{SAMPLES_SUM_TAG}}' + tag: "${{SAMPLES_SUM_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}') == true - forceNoCache: includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}') == false common: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} image_name: cloudharness/common title: Common working_directory: ./applications/common/server - tag: '${{COMMON_TAG}}' + tag: "${{COMMON_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}') == true - forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}') == false workflows-send-result-event: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/workflows-send-result-event title: Workflows send result event working_directory: ./applications/workflows/tasks/send-result-event - tag: '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}' + tag: "${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}', + buildDoesNotExist: + includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}', '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}', + forceNoCache: + includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}', '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false workflows-extract-download: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/workflows-extract-download title: Workflows extract download working_directory: ./applications/workflows/tasks/extract-download - tag: '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}' + tag: "${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}', + buildDoesNotExist: + includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}', '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}', + forceNoCache: + includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}', '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false workflows-notify-queue: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/workflows-notify-queue title: Workflows notify queue working_directory: ./applications/workflows/tasks/notify-queue - tag: '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}' + tag: "${{WORKFLOWS_NOTIFY_QUEUE_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}', + buildDoesNotExist: + includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}', '{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}', + forceNoCache: + includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}', '{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false workflows: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} image_name: cloudharness/workflows title: Workflows working_directory: ./applications/workflows/server - tag: '${{WORKFLOWS_TAG}}' + tag: "${{WORKFLOWS_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}') == true - forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}') == false tests_unit: stage: unittest @@ -370,8 +386,8 @@ steps: samples_ut: title: Unit tests for samples commands: - - pytest /usr/src/app/samples/test - image: '${{REGISTRY}}/cloudharness/samples:${{SAMPLES_TAG}}' + - pytest /usr/src/app/samples/test + image: "${{REGISTRY}}/cloudharness/samples:${{SAMPLES_TAG}}" deployment: stage: deploy type: helm @@ -381,14 +397,14 @@ steps: helm_version: 3.6.2 chart_name: deployment/helm release_name: test-${{NAMESPACE_BASENAME}} - kube_context: '${{CLUSTER_NAME}}' + kube_context: "${{CLUSTER_NAME}}" namespace: test-${{NAMESPACE_BASENAME}} - chart_version: '${{CF_SHORT_REVISION}}' + chart_version: "${{CF_SHORT_REVISION}}" cmd_ps: --timeout 600s --create-namespace custom_value_files: - - ./deployment/helm/values.yaml + - ./deployment/helm/values.yaml custom_values: - - apps_samples_harness_secrets_asecret=${{ASECRET}} + - apps_samples_harness_secrets_asecret=${{ASECRET}} build_test_images: title: Build test images type: parallel @@ -398,130 +414,134 @@ steps: type: build stage: build dockerfile: Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/test-e2e title: Test e2e working_directory: ./test/test-e2e - tag: '${{TEST_E2E_TAG}}' + tag: "${{TEST_E2E_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}') == true - forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}') == false test-api: type: build stage: build dockerfile: test/test-api/Dockerfile - registry: '${{CODEFRESH_REGISTRY}}' + registry: "${{CODEFRESH_REGISTRY}}" buildkit: true build_arguments: - - DOMAIN=${{DOMAIN}} - - NOCACHE=${{CF_BUILD_ID}} - - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} + - DOMAIN=${{DOMAIN}} + - NOCACHE=${{CF_BUILD_ID}} + - REGISTRY=${{REGISTRY}}/cloudharness/ + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/test-api title: Test api working_directory: ./. - tag: '${{TEST_API_TAG}}' + tag: "${{TEST_API_TAG}}" when: condition: any: - buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}') + buildDoesNotExist: + includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}') == true - forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}') + forceNoCache: + includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}') == false wait_deployment: stage: qa title: Wait deployment to be ready image: codefresh/kubectl commands: - - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}} - - kubectl rollout status deployment/accounts - - kubectl rollout status deployment/samples - - kubectl rollout status deployment/common - - kubectl rollout status deployment/workflows - - sleep 60 + - kubectl config use-context ${{CLUSTER_NAME}} + - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}} + - kubectl rollout status deployment/accounts + - kubectl rollout status deployment/samples + - kubectl rollout status deployment/common + - kubectl rollout status deployment/workflows + - sleep 60 tests_api: stage: qa title: Api tests working_directory: /home/test - image: '${{REGISTRY}}/cloudharness/test-api:${{TEST_API_TAG}}' + image: "${{REGISTRY}}/cloudharness/test-api:${{TEST_API_TAG}}" fail_fast: false commands: - - echo $APP_NAME + - echo $APP_NAME scale: samples_api_test: title: samples api test volumes: - - '${{CF_REPO_NAME}}/applications/samples:/home/test' - - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' + - "${{CF_REPO_NAME}}/applications/samples:/home/test" + - "${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml" environment: - - APP_URL=https://samples.${{DOMAIN}}/api - - USERNAME=sample@testuser.com - - PASSWORD=test + - APP_URL=https://samples.${{DOMAIN}}/api + - USERNAME=sample@testuser.com + - PASSWORD=test commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --hypothesis-suppress-health-check=too_slow - --hypothesis-deadline=180000 --request-timeout=180000 --hypothesis-max-examples=2 - --show-errors-tracebacks - - pytest -v test/api + - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url + https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --hypothesis-suppress-health-check=too_slow + --hypothesis-deadline=180000 --request-timeout=180000 --hypothesis-max-examples=2 + --show-errors-tracebacks + - pytest -v test/api common_api_test: title: common api test volumes: - - '${{CF_REPO_NAME}}/applications/common:/home/test' - - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' + - "${{CF_REPO_NAME}}/applications/common:/home/test" + - "${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml" environment: - - APP_URL=https://common.${{DOMAIN}}/api + - APP_URL=https://common.${{DOMAIN}}/api commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://common.${{DOMAIN}}/api -c all + - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url + https://common.${{DOMAIN}}/api -c all workflows_api_test: title: workflows api test volumes: - - '${{CF_REPO_NAME}}/applications/workflows:/home/test' - - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' + - "${{CF_REPO_NAME}}/applications/workflows:/home/test" + - "${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml" environment: - - APP_URL=https://workflows.${{DOMAIN}}/api + - APP_URL=https://workflows.${{DOMAIN}}/api commands: - - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url - https://workflows.${{DOMAIN}}/api -c all + - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url + https://workflows.${{DOMAIN}}/api -c all hooks: on_fail: exec: image: alpine commands: - - cf_export FAILED=failed + - cf_export FAILED=failed tests_e2e: stage: qa title: End to end tests working_directory: /home/test - image: '${{REGISTRY}}/cloudharness/test-e2e:${{TEST_E2E_TAG}}' + image: "${{REGISTRY}}/cloudharness/test-e2e:${{TEST_E2E_TAG}}" fail_fast: false commands: - - yarn test + - yarn test scale: samples_e2e_test: title: samples e2e test volumes: - - '${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples' + - "${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples" environment: - - APP_URL=https://samples.${{DOMAIN}} - - USERNAME=sample@testuser.com - - PASSWORD=test + - APP_URL=https://samples.${{DOMAIN}} + - USERNAME=sample@testuser.com + - PASSWORD=test hooks: on_fail: exec: image: alpine commands: - - cf_export FAILED=failed + - cf_export FAILED=failed approval: type: pending-approval stage: qa @@ -529,7 +549,7 @@ steps: description: The pipeline will fail after ${{WAIT_ON_FAIL}} minutes timeout: timeUnit: minutes - duration: '${{WAIT_ON_FAIL}}' + duration: "${{WAIT_ON_FAIL}}" finalState: denied when: condition: @@ -541,5 +561,5 @@ steps: image: codefresh/kubectl stage: qa commands: - - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl delete ns test-${{NAMESPACE_BASENAME}} + - kubectl config use-context ${{CLUSTER_NAME}} + - kubectl delete ns test-${{NAMESPACE_BASENAME}} From 8f76ae2187840f126a195d1495f12c1d9f41d3b7 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 29 Jul 2024 11:29:50 +0200 Subject: [PATCH 095/210] Fix build issue --- applications/samples/frontend/package.json | 1 + applications/samples/frontend/yarn.lock | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/applications/samples/frontend/package.json b/applications/samples/frontend/package.json index 9ccdc4a6..aa383ae3 100644 --- a/applications/samples/frontend/package.json +++ b/applications/samples/frontend/package.json @@ -18,6 +18,7 @@ "react-dom": "^18.3.1" }, "devDependencies": { + "@types/node": "^22.0.0", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", "@typescript-eslint/eslint-plugin": "^7.15.0", diff --git a/applications/samples/frontend/yarn.lock b/applications/samples/frontend/yarn.lock index eae974cc..3b6f86c4 100644 --- a/applications/samples/frontend/yarn.lock +++ b/applications/samples/frontend/yarn.lock @@ -549,6 +549,13 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== +"@types/node@^22.0.0": + version "22.0.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.0.0.tgz#04862a2a71e62264426083abe1e27e87cac05a30" + integrity sha512-VT7KSYudcPOzP5Q0wfbowyNLaVR8QWUdw+088uFWwfvpY6uCWaXpqV6ieLAu9WBcnTa7H4Z5RLK8I5t2FuOcqw== + dependencies: + undici-types "~6.11.1" + "@types/prop-types@*": version "15.7.12" resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.12.tgz#12bb1e2be27293c1406acb6af1c3f3a1481d98c6" @@ -1658,6 +1665,11 @@ typescript@^5.2.2: resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.4.tgz#d9852d6c82bad2d2eda4fd74a5762a8f5909e9ba" integrity sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q== +undici-types@~6.11.1: + version "6.11.1" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.11.1.tgz#432ea6e8efd54a48569705a699e62d8f4981b197" + integrity sha512-mIDEX2ek50x0OlRgxryxsenE5XaQD4on5U2inY7RApK3SOJpofyw7uW2AyfMKkhAxXIceo2DeWGVGwyvng1GNQ== + update-browserslist-db@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz" From f58f7eddfc370a246f984568a76b0c77d9df45b9 Mon Sep 17 00:00:00 2001 From: Jake Conkerton-Darby Date: Mon, 29 Jul 2024 15:49:47 +0100 Subject: [PATCH 096/210] CH-91 Updated the harness model to add an envmap deprecating the env property --- .gitignore | 3 +- docs/model/ApiTestsConfig.md | 46 ----- docs/model/ApplicationAccountsConfig.md | 44 ----- docs/model/ApplicationConfig.md | 17 -- docs/model/ApplicationDependenciesConfig.md | 72 -------- docs/model/ApplicationHarnessConfig.md | 93 ---------- docs/model/ApplicationProbe.md | 20 --- docs/model/ApplicationTestConfig.md | 17 -- docs/model/ApplicationUser.md | 44 ----- docs/model/ApplicationsConfigsMap.md | 14 -- docs/model/AutoArtifactSpec.md | 16 -- docs/model/BackupConfig.md | 23 --- docs/model/CDCEvent.md | 21 --- docs/model/CDCEventMeta.md | 33 ---- docs/model/CpuMemoryConfig.md | 16 -- docs/model/DatabaseDeploymentConfig.md | 37 ---- docs/model/DeploymentAutoArtifactConfig.md | 33 ---- docs/model/DeploymentResourcesConf.md | 16 -- docs/model/DeploymentVolumeSpec.md | 33 ---- docs/model/E2ETestsConfig.md | 18 -- docs/model/FileResourcesConfig.md | 17 -- docs/model/Filename.md | 9 - docs/model/FreeObject.md | 14 -- docs/model/GitDependencyConfig.md | 19 -- docs/model/HarnessMainConfig.md | 42 ----- docs/model/IngressConfig.md | 43 ----- docs/model/JupyterHubConfig.md | 32 ---- docs/model/NameValue.md | 16 -- docs/model/PathSpecifier.md | 9 - docs/model/Quota.md | 14 -- docs/model/RegistryConfig.md | 16 -- docs/model/ServiceAutoArtifactConfig.md | 29 ---- docs/model/SimpleMap.md | 14 -- docs/model/URL.md | 9 - docs/model/UnitTestsConfig.md | 30 ---- docs/model/UriRoleMappingConfig.md | 32 ---- docs/model/User.md | 129 -------------- docs/model/UserCredential.md | 23 --- docs/model/UserGroup.md | 82 --------- docs/model/UserRole.md | 33 ---- libraries/models/api/openapi.yaml | 92 +++++----- .../cloudharness_model/models/__init__.py | 8 - .../models/api_tests_config.py | 21 +-- .../models/application_accounts_config.py | 13 +- .../models/application_config.py | 9 +- .../models/application_dependencies_config.py | 21 +-- .../models/application_harness_config.py | 89 +++++----- .../models/application_probe.py | 21 +-- .../models/application_test_config.py | 17 +- .../models/application_user.py | 21 +-- .../models/auto_artifact_spec.py | 13 +- .../models/backup_config.py | 41 ++--- .../cloudharness_model/models/base_model.py | 68 ++++++++ .../cloudharness_model/models/cdc_event.py | 25 ++- .../models/cdc_event_meta.py | 33 ++-- .../models/cpu_memory_config.py | 13 +- .../models/database_deployment_config.py | 163 +++++++++--------- .../models/deployment_auto_artifact_config.py | 147 ++++++++-------- .../models/deployment_resources_conf.py | 13 +- .../models/deployment_volume_spec.py | 139 ++++++++------- .../models/e2_e_tests_config.py | 21 +-- .../models/file_resources_config.py | 17 +- .../models/git_dependency_config.py | 17 +- .../models/harness_main_config.py | 99 +++++++---- .../models/ingress_config.py | 127 +++++++------- .../ingress_config_all_of_letsencrypt.py | 9 +- .../models/jupyter_hub_config.py | 21 +-- .../cloudharness_model/models/name_value.py | 13 +- .../models/registry_config.py | 13 +- .../models/service_auto_artifact_config.py | 75 ++++---- .../models/unit_tests_config.py | 13 +- .../models/uri_role_mapping_config.py | 13 +- .../models/cloudharness_model/models/user.py | 81 +++++---- .../models/user_credential.py | 41 ++--- .../cloudharness_model/models/user_group.py | 37 ++-- .../cloudharness_model/models/user_role.py | 33 ++-- .../models/cloudharness_model/typing_utils.py | 2 - libraries/models/tox.ini | 2 +- 78 files changed, 798 insertions(+), 2031 deletions(-) delete mode 100644 docs/model/ApiTestsConfig.md delete mode 100644 docs/model/ApplicationAccountsConfig.md delete mode 100644 docs/model/ApplicationConfig.md delete mode 100644 docs/model/ApplicationDependenciesConfig.md delete mode 100644 docs/model/ApplicationHarnessConfig.md delete mode 100644 docs/model/ApplicationProbe.md delete mode 100644 docs/model/ApplicationTestConfig.md delete mode 100644 docs/model/ApplicationUser.md delete mode 100644 docs/model/ApplicationsConfigsMap.md delete mode 100644 docs/model/AutoArtifactSpec.md delete mode 100644 docs/model/BackupConfig.md delete mode 100644 docs/model/CDCEvent.md delete mode 100644 docs/model/CDCEventMeta.md delete mode 100644 docs/model/CpuMemoryConfig.md delete mode 100644 docs/model/DatabaseDeploymentConfig.md delete mode 100644 docs/model/DeploymentAutoArtifactConfig.md delete mode 100644 docs/model/DeploymentResourcesConf.md delete mode 100644 docs/model/DeploymentVolumeSpec.md delete mode 100644 docs/model/E2ETestsConfig.md delete mode 100644 docs/model/FileResourcesConfig.md delete mode 100644 docs/model/Filename.md delete mode 100644 docs/model/FreeObject.md delete mode 100644 docs/model/GitDependencyConfig.md delete mode 100644 docs/model/HarnessMainConfig.md delete mode 100644 docs/model/IngressConfig.md delete mode 100644 docs/model/JupyterHubConfig.md delete mode 100644 docs/model/NameValue.md delete mode 100644 docs/model/PathSpecifier.md delete mode 100644 docs/model/Quota.md delete mode 100644 docs/model/RegistryConfig.md delete mode 100644 docs/model/ServiceAutoArtifactConfig.md delete mode 100644 docs/model/SimpleMap.md delete mode 100644 docs/model/URL.md delete mode 100644 docs/model/UnitTestsConfig.md delete mode 100644 docs/model/UriRoleMappingConfig.md delete mode 100644 docs/model/User.md delete mode 100644 docs/model/UserCredential.md delete mode 100644 docs/model/UserGroup.md delete mode 100644 docs/model/UserRole.md create mode 100644 libraries/models/cloudharness_model/models/base_model.py diff --git a/.gitignore b/.gitignore index 90f643c0..774c7315 100644 --- a/.gitignore +++ b/.gitignore @@ -17,4 +17,5 @@ skaffold.yaml /deployment.yaml .hypothesis __pycache__ -.env \ No newline at end of file +.env +/.venv \ No newline at end of file diff --git a/docs/model/ApiTestsConfig.md b/docs/model/ApiTestsConfig.md deleted file mode 100644 index fab49ebc..00000000 --- a/docs/model/ApiTestsConfig.md +++ /dev/null @@ -1,46 +0,0 @@ -# cloudharness_model.model.api_tests_config.ApiTestsConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[checks](#checks)** | list, tuple, | tuple, | One of the Schemathesis checks: - not_a_server_error. The response has 5xx HTTP status; - status_code_conformance. The response status is not defined in the API schema; - content_type_conformance. The response content type is not defined in the API schema; - response_schema_conformance. The response content does not conform to the schema defined for this specific response; - response_headers_conformance. The response headers does not contain all defined headers. | -**autotest** | bool, | BoolClass, | Specify whether to run the common smoke tests | -**enabled** | bool, | BoolClass, | Enables api tests for this application (default: false) | -**[runParams](#runParams)** | list, tuple, | tuple, | Additional schemathesis parameters | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# checks - -One of the Schemathesis checks: - not_a_server_error. The response has 5xx HTTP status; - status_code_conformance. The response status is not defined in the API schema; - content_type_conformance. The response content type is not defined in the API schema; - response_schema_conformance. The response content does not conform to the schema defined for this specific response; - response_headers_conformance. The response headers does not contain all defined headers. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | One of the Schemathesis checks: - not_a_server_error. The response has 5xx HTTP status; - status_code_conformance. The response status is not defined in the API schema; - content_type_conformance. The response content type is not defined in the API schema; - response_schema_conformance. The response content does not conform to the schema defined for this specific response; - response_headers_conformance. The response headers does not contain all defined headers. | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# runParams - -Additional schemathesis parameters - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Additional schemathesis parameters | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationAccountsConfig.md b/docs/model/ApplicationAccountsConfig.md deleted file mode 100644 index 5cbbdf7d..00000000 --- a/docs/model/ApplicationAccountsConfig.md +++ /dev/null @@ -1,44 +0,0 @@ -# cloudharness_model.model.application_accounts_config.ApplicationAccountsConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[roles](#roles)** | list, tuple, | tuple, | Specify roles to be created in this deployment specific for this application | [optional] -**[users](#users)** | list, tuple, | tuple, | Defines test users to be added to the deployment, specific for this application | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# roles - -Specify roles to be created in this deployment specific for this application - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Specify roles to be created in this deployment specific for this application | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# users - -Defines test users to be added to the deployment, specific for this application - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Defines test users to be added to the deployment, specific for this application | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**ApplicationUser**](ApplicationUser.md) | [**ApplicationUser**](ApplicationUser.md) | [**ApplicationUser**](ApplicationUser.md) | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationConfig.md b/docs/model/ApplicationConfig.md deleted file mode 100644 index 667d4b9a..00000000 --- a/docs/model/ApplicationConfig.md +++ /dev/null @@ -1,17 +0,0 @@ -# cloudharness_model.model.application_config.ApplicationConfig - -Place here the values to configure your application helm templates. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Place here the values to configure your application helm templates. | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**harness** | [**ApplicationHarnessConfig**](ApplicationHarnessConfig.md) | [**ApplicationHarnessConfig**](ApplicationHarnessConfig.md) | | -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationDependenciesConfig.md b/docs/model/ApplicationDependenciesConfig.md deleted file mode 100644 index a59b2cec..00000000 --- a/docs/model/ApplicationDependenciesConfig.md +++ /dev/null @@ -1,72 +0,0 @@ -# cloudharness_model.model.application_dependencies_config.ApplicationDependenciesConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[hard](#hard)** | list, tuple, | tuple, | Hard dependencies indicate that the application may not start without these other applications. | [optional] -**[soft](#soft)** | list, tuple, | tuple, | Soft dependencies indicate that the application will work partially without these other applications. | [optional] -**[build](#build)** | list, tuple, | tuple, | Hard dependencies indicate that the application Docker image build requires these base/common images | [optional] -**[git](#git)** | list, tuple, | tuple, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# hard - -Hard dependencies indicate that the application may not start without these other applications. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Hard dependencies indicate that the application may not start without these other applications. | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# soft - -Soft dependencies indicate that the application will work partially without these other applications. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Soft dependencies indicate that the application will work partially without these other applications. | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# build - -Hard dependencies indicate that the application Docker image build requires these base/common images - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Hard dependencies indicate that the application Docker image build requires these base/common images | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# git - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**GitDependencyConfig**](GitDependencyConfig.md) | [**GitDependencyConfig**](GitDependencyConfig.md) | [**GitDependencyConfig**](GitDependencyConfig.md) | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationHarnessConfig.md b/docs/model/ApplicationHarnessConfig.md deleted file mode 100644 index 8e8a251d..00000000 --- a/docs/model/ApplicationHarnessConfig.md +++ /dev/null @@ -1,93 +0,0 @@ -# cloudharness_model.model.application_harness_config.ApplicationHarnessConfig - -Define helm variables that allow CloudHarness to enable and configure your application's deployment - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Define helm variables that allow CloudHarness to enable and configure your application's deployment | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**deployment** | [**DeploymentAutoArtifactConfig**](DeploymentAutoArtifactConfig.md) | [**DeploymentAutoArtifactConfig**](DeploymentAutoArtifactConfig.md) | | [optional] -**service** | [**ServiceAutoArtifactConfig**](ServiceAutoArtifactConfig.md) | [**ServiceAutoArtifactConfig**](ServiceAutoArtifactConfig.md) | | [optional] -**subdomain** | str, | str, | If specified, an ingress will be created at [subdomain].[.Values.domain] | [optional] -**[aliases](#aliases)** | list, tuple, | tuple, | If specified, an ingress will be created at [alias].[.Values.domain] for each alias | [optional] -**domain** | str, | str, | If specified, an ingress will be created at [domain] | [optional] -**dependencies** | [**ApplicationDependenciesConfig**](ApplicationDependenciesConfig.md) | [**ApplicationDependenciesConfig**](ApplicationDependenciesConfig.md) | | [optional] -**secured** | bool, | BoolClass, | When true, the application is shielded with a getekeeper | [optional] -**[uri_role_mapping](#uri_role_mapping)** | list, tuple, | tuple, | Map uri/roles to secure with the Gatekeeper (if `secured: true`) | [optional] -**secrets** | [**SimpleMap**](SimpleMap.md) | [**SimpleMap**](SimpleMap.md) | | [optional] -**[use_services](#use_services)** | list, tuple, | tuple, | Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` | [optional] -**database** | [**DatabaseDeploymentConfig**](DatabaseDeploymentConfig.md) | [**DatabaseDeploymentConfig**](DatabaseDeploymentConfig.md) | | [optional] -**[resources](#resources)** | list, tuple, | tuple, | Application file resources. Maps from deploy/resources folder and mounts as configmaps | [optional] -**readinessProbe** | [**ApplicationProbe**](ApplicationProbe.md) | [**ApplicationProbe**](ApplicationProbe.md) | | [optional] -**startupProbe** | [**ApplicationProbe**](ApplicationProbe.md) | [**ApplicationProbe**](ApplicationProbe.md) | | [optional] -**livenessProbe** | [**ApplicationProbe**](ApplicationProbe.md) | [**ApplicationProbe**](ApplicationProbe.md) | | [optional] -**sourceRoot** | [**Filename**](Filename.md) | [**Filename**](Filename.md) | | [optional] -**name** | str, | str, | Application's name. Do not edit, the value is automatically set from the application directory's name | [optional] -**jupyterhub** | [**JupyterHubConfig**](JupyterHubConfig.md) | [**JupyterHubConfig**](JupyterHubConfig.md) | | [optional] -**accounts** | [**ApplicationAccountsConfig**](ApplicationAccountsConfig.md) | [**ApplicationAccountsConfig**](ApplicationAccountsConfig.md) | | [optional] -**test** | [**ApplicationTestConfig**](ApplicationTestConfig.md) | [**ApplicationTestConfig**](ApplicationTestConfig.md) | | [optional] -**quotas** | [**Quota**](Quota.md) | [**Quota**](Quota.md) | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# aliases - -If specified, an ingress will be created at [alias].[.Values.domain] for each alias - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | If specified, an ingress will be created at [alias].[.Values.domain] for each alias | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# uri_role_mapping - -Map uri/roles to secure with the Gatekeeper (if `secured: true`) - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Map uri/roles to secure with the Gatekeeper (if `secured: true`) | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**UriRoleMappingConfig**](UriRoleMappingConfig.md) | [**UriRoleMappingConfig**](UriRoleMappingConfig.md) | [**UriRoleMappingConfig**](UriRoleMappingConfig.md) | | - -# use_services - -Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# resources - -Application file resources. Maps from deploy/resources folder and mounts as configmaps - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Application file resources. Maps from deploy/resources folder and mounts as configmaps | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**FileResourcesConfig**](FileResourcesConfig.md) | [**FileResourcesConfig**](FileResourcesConfig.md) | [**FileResourcesConfig**](FileResourcesConfig.md) | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationProbe.md b/docs/model/ApplicationProbe.md deleted file mode 100644 index a3375c46..00000000 --- a/docs/model/ApplicationProbe.md +++ /dev/null @@ -1,20 +0,0 @@ -# cloudharness_model.model.application_probe.ApplicationProbe - -Define a Kubernetes probe See also the [official documentation](https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/) - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Define a Kubernetes probe See also the [official documentation](https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/) | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**path** | str, | str, | | -**periodSeconds** | decimal.Decimal, int, float, | decimal.Decimal, | | [optional] -**failureThreshold** | decimal.Decimal, int, float, | decimal.Decimal, | | [optional] -**initialDelaySeconds** | decimal.Decimal, int, float, | decimal.Decimal, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationTestConfig.md b/docs/model/ApplicationTestConfig.md deleted file mode 100644 index 96aced5a..00000000 --- a/docs/model/ApplicationTestConfig.md +++ /dev/null @@ -1,17 +0,0 @@ -# cloudharness_model.model.application_test_config.ApplicationTestConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**unit** | [**UnitTestsConfig**](UnitTestsConfig.md) | [**UnitTestsConfig**](UnitTestsConfig.md) | | -**e2e** | [**E2ETestsConfig**](E2ETestsConfig.md) | [**E2ETestsConfig**](E2ETestsConfig.md) | | -**api** | [**ApiTestsConfig**](ApiTestsConfig.md) | [**ApiTestsConfig**](ApiTestsConfig.md) | | -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationUser.md b/docs/model/ApplicationUser.md deleted file mode 100644 index c55a18e8..00000000 --- a/docs/model/ApplicationUser.md +++ /dev/null @@ -1,44 +0,0 @@ -# cloudharness_model.model.application_user.ApplicationUser - -Defines a user - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Defines a user | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**username** | str, | str, | | -**password** | str, | str, | | [optional] -**[clientRoles](#clientRoles)** | list, tuple, | tuple, | | [optional] -**[realmRoles](#realmRoles)** | list, tuple, | tuple, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# clientRoles - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# realmRoles - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ApplicationsConfigsMap.md b/docs/model/ApplicationsConfigsMap.md deleted file mode 100644 index 7bfc31e5..00000000 --- a/docs/model/ApplicationsConfigsMap.md +++ /dev/null @@ -1,14 +0,0 @@ -# cloudharness_model.model.applications_configs_map.ApplicationsConfigsMap - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | [**ApplicationConfig**](ApplicationConfig.md) | [**ApplicationConfig**](ApplicationConfig.md) | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/AutoArtifactSpec.md b/docs/model/AutoArtifactSpec.md deleted file mode 100644 index 7d149b70..00000000 --- a/docs/model/AutoArtifactSpec.md +++ /dev/null @@ -1,16 +0,0 @@ -# cloudharness_model.model.auto_artifact_spec.AutoArtifactSpec - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**auto** | bool, | BoolClass, | When true, enables automatic template | -**name** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/BackupConfig.md b/docs/model/BackupConfig.md deleted file mode 100644 index 29cbce3b..00000000 --- a/docs/model/BackupConfig.md +++ /dev/null @@ -1,23 +0,0 @@ -# cloudharness_model.model.backup_config.BackupConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**resources** | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | | -**dir** | [**Filename**](Filename.md) | [**Filename**](Filename.md) | | -**active** | bool, | BoolClass, | | [optional] -**keep_days** | decimal.Decimal, int, | decimal.Decimal, | | [optional] -**keep_weeks** | decimal.Decimal, int, | decimal.Decimal, | | [optional] -**keep_months** | decimal.Decimal, int, | decimal.Decimal, | | [optional] -**schedule** | str, | str, | Cron expression | [optional] -**suffix** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | The file suffix added to backup files | [optional] -**volumesize** | str, | str, | The volume size for backups (all backups share the same volume) | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/CDCEvent.md b/docs/model/CDCEvent.md deleted file mode 100644 index c33e1081..00000000 --- a/docs/model/CDCEvent.md +++ /dev/null @@ -1,21 +0,0 @@ -# cloudharness_model.model.cdc_event.CDCEvent - -A message sent to the orchestration queue. Applications can listen to these events to react to data change events happening on other applications. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | A message sent to the orchestration queue. Applications can listen to these events to react to data change events happening on other applications. | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**uid** | str, | str, | the unique identifier attribute of the object | -**meta** | [**CDCEventMeta**](CDCEventMeta.md) | [**CDCEventMeta**](CDCEventMeta.md) | | -**message_type** | str, | str, | the type of the message (relates to the object type) e.g. jobs | -**operation** | str, | str, | the operation on the object e.g. create / update / delete | must be one of ["create", "update", "delete", "other", ] -**resource** | [**FreeObject**](FreeObject.md) | [**FreeObject**](FreeObject.md) | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/CDCEventMeta.md b/docs/model/CDCEventMeta.md deleted file mode 100644 index 747246d3..00000000 --- a/docs/model/CDCEventMeta.md +++ /dev/null @@ -1,33 +0,0 @@ -# cloudharness_model.model.cdc_event_meta.CDCEventMeta - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**app_name** | str, | str, | The name of the application/microservice sending the message | -**user** | [**User**](User.md) | [**User**](User.md) | | [optional] -**[args](#args)** | list, tuple, | tuple, | the caller function arguments | [optional] -**kwargs** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | the caller function keyword arguments | [optional] -**description** | str, | str, | General description -- for human consumption | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# args - -the caller function arguments - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | the caller function arguments | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**FreeObject**](FreeObject.md) | [**FreeObject**](FreeObject.md) | [**FreeObject**](FreeObject.md) | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/CpuMemoryConfig.md b/docs/model/CpuMemoryConfig.md deleted file mode 100644 index f64cd7d3..00000000 --- a/docs/model/CpuMemoryConfig.md +++ /dev/null @@ -1,16 +0,0 @@ -# cloudharness_model.model.cpu_memory_config.CpuMemoryConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**cpu** | str, | str, | | [optional] -**memory** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/DatabaseDeploymentConfig.md b/docs/model/DatabaseDeploymentConfig.md deleted file mode 100644 index 864d5ef1..00000000 --- a/docs/model/DatabaseDeploymentConfig.md +++ /dev/null @@ -1,37 +0,0 @@ -# cloudharness_model.model.database_deployment_config.DatabaseDeploymentConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Composed Schemas (allOf/anyOf/oneOf/not) -#### allOf -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[all_of_0](#all_of_0) | dict, frozendict.frozendict, | frozendict.frozendict, | | -[AutoArtifactSpec](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | | - -# all_of_0 - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**type** | str, | str, | Define the database type. One of (mongo, postgres, neo4j, sqlite3) | [optional] -**size** | str, | str, | Specify database disk size | [optional] -**user** | str, | str, | database username | [optional] -**pass** | str, | str, | Database password | [optional] -**image_ref** | str, | str, | Used for referencing images from the build | [optional] -**mongo** | [**FreeObject**](FreeObject.md) | [**FreeObject**](FreeObject.md) | | [optional] -**postgres** | [**FreeObject**](FreeObject.md) | [**FreeObject**](FreeObject.md) | | [optional] -**neo4j** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | Neo4j database specific configuration | [optional] -**resources** | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/DeploymentAutoArtifactConfig.md b/docs/model/DeploymentAutoArtifactConfig.md deleted file mode 100644 index 4367e949..00000000 --- a/docs/model/DeploymentAutoArtifactConfig.md +++ /dev/null @@ -1,33 +0,0 @@ -# cloudharness_model.model.deployment_auto_artifact_config.DeploymentAutoArtifactConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Composed Schemas (allOf/anyOf/oneOf/not) -#### allOf -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[all_of_0](#all_of_0) | dict, frozendict.frozendict, | frozendict.frozendict, | | -[AutoArtifactSpec](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | | - -# all_of_0 - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**port** | str, | str, | Deployment port | [optional] -**replicas** | decimal.Decimal, int, | decimal.Decimal, | Number of replicas | [optional] -**image** | str, | str, | Image name to use in the deployment. Leave it blank to set from the application's Docker file | [optional] -**resources** | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | | [optional] -**volume** | [**DeploymentVolumeSpec**](DeploymentVolumeSpec.md) | [**DeploymentVolumeSpec**](DeploymentVolumeSpec.md) | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/DeploymentResourcesConf.md b/docs/model/DeploymentResourcesConf.md deleted file mode 100644 index 6ca517a3..00000000 --- a/docs/model/DeploymentResourcesConf.md +++ /dev/null @@ -1,16 +0,0 @@ -# cloudharness_model.model.deployment_resources_conf.DeploymentResourcesConf - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**requests** | [**CpuMemoryConfig**](CpuMemoryConfig.md) | [**CpuMemoryConfig**](CpuMemoryConfig.md) | | [optional] -**limits** | [**CpuMemoryConfig**](CpuMemoryConfig.md) | [**CpuMemoryConfig**](CpuMemoryConfig.md) | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/DeploymentVolumeSpec.md b/docs/model/DeploymentVolumeSpec.md deleted file mode 100644 index 14900102..00000000 --- a/docs/model/DeploymentVolumeSpec.md +++ /dev/null @@ -1,33 +0,0 @@ -# cloudharness_model.model.deployment_volume_spec.DeploymentVolumeSpec - -Defines a volume attached to the deployment. Automatically created the volume claim and mounts. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Defines a volume attached to the deployment. Automatically created the volume claim and mounts. | - -### Composed Schemas (allOf/anyOf/oneOf/not) -#### allOf -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[all_of_0](#all_of_0) | dict, frozendict.frozendict, | frozendict.frozendict, | | -[AutoArtifactSpec](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | | - -# all_of_0 - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**mountpath** | str, | str, | The mount path for the volume | -**size** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | The volume size. E.g. 5Gi | [optional] -**usenfs** | bool, | BoolClass, | Set to `true` to use the nfs on the created volume and mount as ReadWriteMany. | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/E2ETestsConfig.md b/docs/model/E2ETestsConfig.md deleted file mode 100644 index 8b7383d1..00000000 --- a/docs/model/E2ETestsConfig.md +++ /dev/null @@ -1,18 +0,0 @@ -# cloudharness_model.model.e2_e_tests_config.E2ETestsConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**smoketest** | bool, | BoolClass, | Specify whether to run the common smoke tests | -**enabled** | bool, | BoolClass, | Enables end to end testing for this application (default: false) | -**ignoreConsoleErrors** | bool, | BoolClass, | | [optional] -**ignoreRequestErrors** | bool, | BoolClass, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/FileResourcesConfig.md b/docs/model/FileResourcesConfig.md deleted file mode 100644 index 6936d87c..00000000 --- a/docs/model/FileResourcesConfig.md +++ /dev/null @@ -1,17 +0,0 @@ -# cloudharness_model.model.file_resources_config.FileResourcesConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**dst** | str, | str, | | -**src** | [**Filename**](Filename.md) | [**Filename**](Filename.md) | | -**name** | [**Filename**](Filename.md) | [**Filename**](Filename.md) | | -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/Filename.md b/docs/model/Filename.md deleted file mode 100644 index ccbf7b0e..00000000 --- a/docs/model/Filename.md +++ /dev/null @@ -1,9 +0,0 @@ -# cloudharness_model.model.filename.Filename - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/FreeObject.md b/docs/model/FreeObject.md deleted file mode 100644 index e131e846..00000000 --- a/docs/model/FreeObject.md +++ /dev/null @@ -1,14 +0,0 @@ -# cloudharness_model.model.free_object.FreeObject - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/GitDependencyConfig.md b/docs/model/GitDependencyConfig.md deleted file mode 100644 index 5faef206..00000000 --- a/docs/model/GitDependencyConfig.md +++ /dev/null @@ -1,19 +0,0 @@ -# cloudharness_model.model.git_dependency_config.GitDependencyConfig - -Defines a git repo to be cloned inside the application path - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Defines a git repo to be cloned inside the application path | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**branch_tag** | str, | str, | | -**url** | str, | str, | | -**path** | str, | str, | Defines the path where the repo is cloned. default: /git | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/HarnessMainConfig.md b/docs/model/HarnessMainConfig.md deleted file mode 100644 index fc0f29da..00000000 --- a/docs/model/HarnessMainConfig.md +++ /dev/null @@ -1,42 +0,0 @@ -# cloudharness_model.model.harness_main_config.HarnessMainConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**mainapp** | str, | str, | Defines the app to map to the root domain | -**domain** | str, | str, | The root domain | -**namespace** | str, | str, | The K8s namespace. | -**secured_gatekeepers** | bool, | BoolClass, | Enables/disables Gatekeepers on secured applications. Set to false for testing/development | -**local** | bool, | BoolClass, | If set to true, local DNS mapping is added to pods. | -**apps** | [**ApplicationsConfigsMap**](ApplicationsConfigsMap.md) | [**ApplicationsConfigsMap**](ApplicationsConfigsMap.md) | | -**registry** | [**RegistryConfig**](RegistryConfig.md) | [**RegistryConfig**](RegistryConfig.md) | | [optional] -**tag** | str, | str, | Docker tag used to push/pull the built images. | [optional] -**[env](#env)** | list, tuple, | tuple, | Environmental variables added to all pods | [optional] -**privenv** | [**NameValue**](NameValue.md) | [**NameValue**](NameValue.md) | | [optional] -**backup** | [**BackupConfig**](BackupConfig.md) | [**BackupConfig**](BackupConfig.md) | | [optional] -**name** | str, | str, | Base name | [optional] -**task-images** | [**SimpleMap**](SimpleMap.md) | [**SimpleMap**](SimpleMap.md) | | [optional] -**build_hash** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# env - -Environmental variables added to all pods - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Environmental variables added to all pods | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**NameValue**](NameValue.md) | [**NameValue**](NameValue.md) | [**NameValue**](NameValue.md) | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/IngressConfig.md b/docs/model/IngressConfig.md deleted file mode 100644 index fcdc67ea..00000000 --- a/docs/model/IngressConfig.md +++ /dev/null @@ -1,43 +0,0 @@ -# cloudharness_model.model.ingress_config.IngressConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Composed Schemas (allOf/anyOf/oneOf/not) -#### allOf -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[all_of_0](#all_of_0) | dict, frozendict.frozendict, | frozendict.frozendict, | | -[AutoArtifactSpec](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | | - -# all_of_0 - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**ssl_redirect** | bool, | BoolClass, | | [optional] -**[letsencrypt](#letsencrypt)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# letsencrypt - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**email** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/JupyterHubConfig.md b/docs/model/JupyterHubConfig.md deleted file mode 100644 index 59ac9c84..00000000 --- a/docs/model/JupyterHubConfig.md +++ /dev/null @@ -1,32 +0,0 @@ -# cloudharness_model.model.jupyter_hub_config.JupyterHubConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[args](#args)** | list, tuple, | tuple, | arguments passed to the container | [optional] -**extraConfig** | [**SimpleMap**](SimpleMap.md) | [**SimpleMap**](SimpleMap.md) | | [optional] -**spawnerExtraConfig** | [**FreeObject**](FreeObject.md) | [**FreeObject**](FreeObject.md) | | [optional] -**applicationHook** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | change the hook function (advanced) Specify the Python name of the function (full module path, the module must be installed in the Docker image) | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# args - -arguments passed to the container - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | arguments passed to the container | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/NameValue.md b/docs/model/NameValue.md deleted file mode 100644 index deb57553..00000000 --- a/docs/model/NameValue.md +++ /dev/null @@ -1,16 +0,0 @@ -# cloudharness_model.model.name_value.NameValue - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**name** | str, | str, | | -**value** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/PathSpecifier.md b/docs/model/PathSpecifier.md deleted file mode 100644 index e8062e36..00000000 --- a/docs/model/PathSpecifier.md +++ /dev/null @@ -1,9 +0,0 @@ -# cloudharness_model.model.path_specifier.PathSpecifier - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/Quota.md b/docs/model/Quota.md deleted file mode 100644 index 8124fe92..00000000 --- a/docs/model/Quota.md +++ /dev/null @@ -1,14 +0,0 @@ -# cloudharness_model.model.quota.Quota - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/RegistryConfig.md b/docs/model/RegistryConfig.md deleted file mode 100644 index c63d22b6..00000000 --- a/docs/model/RegistryConfig.md +++ /dev/null @@ -1,16 +0,0 @@ -# cloudharness_model.model.registry_config.RegistryConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**name** | str, | str, | | -**secret** | str, | str, | Optional secret used for pulling from docker registry. | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/ServiceAutoArtifactConfig.md b/docs/model/ServiceAutoArtifactConfig.md deleted file mode 100644 index 6ac6050c..00000000 --- a/docs/model/ServiceAutoArtifactConfig.md +++ /dev/null @@ -1,29 +0,0 @@ -# cloudharness_model.model.service_auto_artifact_config.ServiceAutoArtifactConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Composed Schemas (allOf/anyOf/oneOf/not) -#### allOf -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[all_of_0](#all_of_0) | dict, frozendict.frozendict, | frozendict.frozendict, | | -[AutoArtifactSpec](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | [**AutoArtifactSpec**](AutoArtifactSpec.md) | | - -# all_of_0 - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**port** | decimal.Decimal, int, | decimal.Decimal, | Service port | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/SimpleMap.md b/docs/model/SimpleMap.md deleted file mode 100644 index 37074b56..00000000 --- a/docs/model/SimpleMap.md +++ /dev/null @@ -1,14 +0,0 @@ -# cloudharness_model.model.simple_map.SimpleMap - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/URL.md b/docs/model/URL.md deleted file mode 100644 index ac2bc6d8..00000000 --- a/docs/model/URL.md +++ /dev/null @@ -1,9 +0,0 @@ -# cloudharness_model.model.url.URL - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/UnitTestsConfig.md b/docs/model/UnitTestsConfig.md deleted file mode 100644 index 1c2f16d6..00000000 --- a/docs/model/UnitTestsConfig.md +++ /dev/null @@ -1,30 +0,0 @@ -# cloudharness_model.model.unit_tests_config.UnitTestsConfig - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[commands](#commands)** | list, tuple, | tuple, | Commands to run unit tests | -**enabled** | bool, | BoolClass, | Enables unit tests for this application (default: true) | -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# commands - -Commands to run unit tests - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Commands to run unit tests | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/UriRoleMappingConfig.md b/docs/model/UriRoleMappingConfig.md deleted file mode 100644 index ff645e73..00000000 --- a/docs/model/UriRoleMappingConfig.md +++ /dev/null @@ -1,32 +0,0 @@ -# cloudharness_model.model.uri_role_mapping_config.UriRoleMappingConfig - -Defines the application Gatekeeper configuration, if enabled (i.e. `secured: true`. - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | Defines the application Gatekeeper configuration, if enabled (i.e. `secured: true`. | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[roles](#roles)** | list, tuple, | tuple, | Roles allowed to access the present uri | -**uri** | [**PathSpecifier**](PathSpecifier.md) | [**PathSpecifier**](PathSpecifier.md) | | -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# roles - -Roles allowed to access the present uri - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | Roles allowed to access the present uri | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/User.md b/docs/model/User.md deleted file mode 100644 index cd49ceaa..00000000 --- a/docs/model/User.md +++ /dev/null @@ -1,129 +0,0 @@ -# cloudharness_model.model.user.User - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[access](#access)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**[attributes](#attributes)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**[clientRoles](#clientRoles)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**createdTimestamp** | decimal.Decimal, int, | decimal.Decimal, | | [optional] value must be a 64 bit integer -**[credentials](#credentials)** | list, tuple, | tuple, | | [optional] -**[disableableCredentialTypes](#disableableCredentialTypes)** | list, tuple, | tuple, | | [optional] -**email** | str, | str, | | [optional] -**emailVerified** | bool, | BoolClass, | | [optional] -**enabled** | bool, | BoolClass, | | [optional] -**federationLink** | str, | str, | | [optional] -**firstName** | str, | str, | | [optional] -**[groups](#groups)** | list, tuple, | tuple, | | [optional] -**id** | str, | str, | | [optional] -**lastName** | str, | str, | | [optional] -**[realmRoles](#realmRoles)** | list, tuple, | tuple, | | [optional] -**[requiredActions](#requiredActions)** | list, tuple, | tuple, | | [optional] -**serviceAccountClientId** | str, | str, | | [optional] -**username** | str, | str, | | [optional] -**additionalProperties** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# access - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# attributes - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# clientRoles - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# credentials - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**UserCredential**](UserCredential.md) | [**UserCredential**](UserCredential.md) | [**UserCredential**](UserCredential.md) | | - -# disableableCredentialTypes - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# groups - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# realmRoles - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# requiredActions - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/UserCredential.md b/docs/model/UserCredential.md deleted file mode 100644 index d29edc20..00000000 --- a/docs/model/UserCredential.md +++ /dev/null @@ -1,23 +0,0 @@ -# cloudharness_model.model.user_credential.UserCredential - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**createdDate** | decimal.Decimal, int, | decimal.Decimal, | | [optional] value must be a 64 bit integer -**credentialData** | str, | str, | | [optional] -**id** | str, | str, | | [optional] -**priority** | decimal.Decimal, int, | decimal.Decimal, | | [optional] value must be a 32 bit integer -**secretData** | str, | str, | | [optional] -**temporary** | bool, | BoolClass, | | [optional] -**type** | str, | str, | | [optional] -**userLabel** | str, | str, | | [optional] -**value** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/UserGroup.md b/docs/model/UserGroup.md deleted file mode 100644 index 87aee711..00000000 --- a/docs/model/UserGroup.md +++ /dev/null @@ -1,82 +0,0 @@ -# cloudharness_model.model.user_group.UserGroup - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[access](#access)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**[attributes](#attributes)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**[clientRoles](#clientRoles)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**id** | str, | str, | | [optional] -**name** | str, | str, | | [optional] -**path** | str, | str, | | [optional] -**[realmRoles](#realmRoles)** | list, tuple, | tuple, | | [optional] -**[subGroups](#subGroups)** | list, tuple, | tuple, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# access - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# attributes - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# clientRoles - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# realmRoles - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -items | str, | str, | | - -# subGroups - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -list, tuple, | tuple, | | - -### Tuple Items -Class Name | Input Type | Accessed Type | Description | Notes -------------- | ------------- | ------------- | ------------- | ------------- -[**UserGroup**](UserGroup.md) | [**UserGroup**](UserGroup.md) | [**UserGroup**](UserGroup.md) | | - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/docs/model/UserRole.md b/docs/model/UserRole.md deleted file mode 100644 index ba0d5fe0..00000000 --- a/docs/model/UserRole.md +++ /dev/null @@ -1,33 +0,0 @@ -# cloudharness_model.model.user_role.UserRole - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**[attributes](#attributes)** | dict, frozendict.frozendict, | frozendict.frozendict, | | [optional] -**clientRole** | bool, | BoolClass, | | [optional] -**composite** | bool, | BoolClass, | | [optional] -**containerId** | str, | str, | | [optional] -**description** | str, | str, | | [optional] -**id** | str, | str, | | [optional] -**name** | str, | str, | | [optional] -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -# attributes - -## Model Type Info -Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- -dict, frozendict.frozendict, | frozendict.frozendict, | | - -### Dictionary Keys -Key | Input Type | Accessed Type | Description | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**any_string_name** | dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional] - -[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md) - diff --git a/libraries/models/api/openapi.yaml b/libraries/models/api/openapi.yaml index 8b961031..3b115091 100644 --- a/libraries/models/api/openapi.yaml +++ b/libraries/models/api/openapi.yaml @@ -163,49 +163,6 @@ components: description: '' type: object additionalProperties: true - DatabaseDeploymentConfig: - description: '' - type: object - allOf: - - - type: object - properties: - type: - description: |- - Define the database type. - - One of (mongo, postgres, neo4j, sqlite3) - pattern: ^(mongo|postgres|neo4j|sqlite3)$ - type: string - example: '"neo4j"' - size: - description: Specify database disk size - type: string - example: 1Gi - user: - description: database username - type: string - pass: - format: password - description: Database password - type: string - image_ref: - description: Used for referencing images from the build - type: string - example: 'image_ref: myownpgimage' - mongo: - $ref: '#/components/schemas/FreeObject' - description: Mongo db specific configuration - postgres: - $ref: '#/components/schemas/FreeObject' - description: Postgres database specific configuration - neo4j: - description: Neo4j database specific configuration - resources: - $ref: '#/components/schemas/DeploymentResourcesConf' - description: Database deployment resources - - - $ref: '#/components/schemas/AutoArtifactSpec' ApplicationsConfigsMap: description: '' type: object @@ -823,7 +780,7 @@ components: $ref: '#/components/schemas/ApplicationsConfigsMap' description: '' env: - description: Environmental variables added to all pods + description: 'Environmental variables added to all pods (deprecated, please use envmap)' type: array items: $ref: '#/components/schemas/NameValue' @@ -842,6 +799,9 @@ components: build_hash: description: '' type: string + envmap: + $ref: '#/components/schemas/SimpleMap' + description: Environmental variables added to all pods additionalProperties: true SimpleMap: description: '' @@ -873,3 +833,47 @@ components: url: 'https://github.com/MetaCell/nwb-explorer.git' branch_tag: master path: /git + DatabaseDeploymentConfig: + description: '' + type: object + allOf: + - + type: object + properties: + type: + description: |- + Define the database type. + + One of (mongo, postgres, neo4j, sqlite3) + pattern: ^(mongo|postgres|neo4j|sqlite3)$ + type: string + example: '"neo4j"' + size: + description: Specify database disk size + type: string + example: 1Gi + user: + description: database username + type: string + pass: + format: password + description: Database password + type: string + image_ref: + description: Used for referencing images from the build + type: string + example: 'image_ref: myownpgimage' + mongo: + $ref: '#/components/schemas/FreeObject' + description: Mongo db specific configuration + postgres: + $ref: '#/components/schemas/FreeObject' + description: Postgres database specific configuration + neo4j: + description: Neo4j database specific configuration + resources: + $ref: '#/components/schemas/DeploymentResourcesConf' + description: Database deployment resources + - + $ref: '#/components/schemas/AutoArtifactSpec' + additionalAttributes: true diff --git a/libraries/models/cloudharness_model/models/__init__.py b/libraries/models/cloudharness_model/models/__init__.py index dbfb1175..488c3e6f 100644 --- a/libraries/models/cloudharness_model/models/__init__.py +++ b/libraries/models/cloudharness_model/models/__init__.py @@ -1,7 +1,4 @@ -# coding: utf-8 - # flake8: noqa -from __future__ import absolute_import # import models into model package from cloudharness_model.models.api_tests_config import ApiTestsConfig from cloudharness_model.models.application_accounts_config import ApplicationAccountsConfig @@ -17,24 +14,19 @@ from cloudharness_model.models.cdc_event_meta import CDCEventMeta from cloudharness_model.models.cpu_memory_config import CpuMemoryConfig from cloudharness_model.models.database_deployment_config import DatabaseDeploymentConfig -from cloudharness_model.models.database_deployment_config_all_of import DatabaseDeploymentConfigAllOf from cloudharness_model.models.deployment_auto_artifact_config import DeploymentAutoArtifactConfig -from cloudharness_model.models.deployment_auto_artifact_config_all_of import DeploymentAutoArtifactConfigAllOf from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf from cloudharness_model.models.deployment_volume_spec import DeploymentVolumeSpec -from cloudharness_model.models.deployment_volume_spec_all_of import DeploymentVolumeSpecAllOf from cloudharness_model.models.e2_e_tests_config import E2ETestsConfig from cloudharness_model.models.file_resources_config import FileResourcesConfig from cloudharness_model.models.git_dependency_config import GitDependencyConfig from cloudharness_model.models.harness_main_config import HarnessMainConfig from cloudharness_model.models.ingress_config import IngressConfig -from cloudharness_model.models.ingress_config_all_of import IngressConfigAllOf from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt from cloudharness_model.models.jupyter_hub_config import JupyterHubConfig from cloudharness_model.models.name_value import NameValue from cloudharness_model.models.registry_config import RegistryConfig from cloudharness_model.models.service_auto_artifact_config import ServiceAutoArtifactConfig -from cloudharness_model.models.service_auto_artifact_config_all_of import ServiceAutoArtifactConfigAllOf from cloudharness_model.models.unit_tests_config import UnitTestsConfig from cloudharness_model.models.uri_role_mapping_config import UriRoleMappingConfig from cloudharness_model.models.user import User diff --git a/libraries/models/cloudharness_model/models/api_tests_config.py b/libraries/models/cloudharness_model/models/api_tests_config.py index d5b2bdf6..a4476c08 100644 --- a/libraries/models/cloudharness_model/models/api_tests_config.py +++ b/libraries/models/cloudharness_model/models/api_tests_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -58,7 +55,7 @@ def from_dict(cls, dikt) -> 'ApiTestsConfig': return util.deserialize_model(dikt, cls) @property - def enabled(self): + def enabled(self) -> bool: """Gets the enabled of this ApiTestsConfig. Enables api tests for this application (default: false) # noqa: E501 @@ -69,7 +66,7 @@ def enabled(self): return self._enabled @enabled.setter - def enabled(self, enabled): + def enabled(self, enabled: bool): """Sets the enabled of this ApiTestsConfig. Enables api tests for this application (default: false) # noqa: E501 @@ -83,7 +80,7 @@ def enabled(self, enabled): self._enabled = enabled @property - def autotest(self): + def autotest(self) -> bool: """Gets the autotest of this ApiTestsConfig. Specify whether to run the common smoke tests # noqa: E501 @@ -94,7 +91,7 @@ def autotest(self): return self._autotest @autotest.setter - def autotest(self, autotest): + def autotest(self, autotest: bool): """Sets the autotest of this ApiTestsConfig. Specify whether to run the common smoke tests # noqa: E501 @@ -108,7 +105,7 @@ def autotest(self, autotest): self._autotest = autotest @property - def run_params(self): + def run_params(self) -> List[str]: """Gets the run_params of this ApiTestsConfig. Additional schemathesis parameters # noqa: E501 @@ -119,7 +116,7 @@ def run_params(self): return self._run_params @run_params.setter - def run_params(self, run_params): + def run_params(self, run_params: List[str]): """Sets the run_params of this ApiTestsConfig. Additional schemathesis parameters # noqa: E501 @@ -131,7 +128,7 @@ def run_params(self, run_params): self._run_params = run_params @property - def checks(self): + def checks(self) -> List[str]: """Gets the checks of this ApiTestsConfig. One of the Schemathesis checks: - not_a_server_error. The response has 5xx HTTP status; - status_code_conformance. The response status is not defined in the API schema; - content_type_conformance. The response content type is not defined in the API schema; - response_schema_conformance. The response content does not conform to the schema defined for this specific response; - response_headers_conformance. The response headers does not contain all defined headers. # noqa: E501 @@ -142,7 +139,7 @@ def checks(self): return self._checks @checks.setter - def checks(self, checks): + def checks(self, checks: List[str]): """Sets the checks of this ApiTestsConfig. One of the Schemathesis checks: - not_a_server_error. The response has 5xx HTTP status; - status_code_conformance. The response status is not defined in the API schema; - content_type_conformance. The response content type is not defined in the API schema; - response_schema_conformance. The response content does not conform to the schema defined for this specific response; - response_headers_conformance. The response headers does not contain all defined headers. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/application_accounts_config.py b/libraries/models/cloudharness_model/models/application_accounts_config.py index 082f6853..9a97db85 100644 --- a/libraries/models/cloudharness_model/models/application_accounts_config.py +++ b/libraries/models/cloudharness_model/models/application_accounts_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.application_user import ApplicationUser from cloudharness_model import util @@ -50,7 +47,7 @@ def from_dict(cls, dikt) -> 'ApplicationAccountsConfig': return util.deserialize_model(dikt, cls) @property - def roles(self): + def roles(self) -> List[str]: """Gets the roles of this ApplicationAccountsConfig. Specify roles to be created in this deployment specific for this application # noqa: E501 @@ -61,7 +58,7 @@ def roles(self): return self._roles @roles.setter - def roles(self, roles): + def roles(self, roles: List[str]): """Sets the roles of this ApplicationAccountsConfig. Specify roles to be created in this deployment specific for this application # noqa: E501 @@ -73,7 +70,7 @@ def roles(self, roles): self._roles = roles @property - def users(self): + def users(self) -> List[ApplicationUser]: """Gets the users of this ApplicationAccountsConfig. Defines test users to be added to the deployment, specific for this application # noqa: E501 @@ -84,7 +81,7 @@ def users(self): return self._users @users.setter - def users(self, users): + def users(self, users: List[ApplicationUser]): """Sets the users of this ApplicationAccountsConfig. Defines test users to be added to the deployment, specific for this application # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/application_config.py b/libraries/models/cloudharness_model/models/application_config.py index 2ea86d6e..f423ee3b 100644 --- a/libraries/models/cloudharness_model/models/application_config.py +++ b/libraries/models/cloudharness_model/models/application_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.application_harness_config import ApplicationHarnessConfig from cloudharness_model import util @@ -45,7 +42,7 @@ def from_dict(cls, dikt) -> 'ApplicationConfig': return util.deserialize_model(dikt, cls) @property - def harness(self): + def harness(self) -> ApplicationHarnessConfig: """Gets the harness of this ApplicationConfig. @@ -55,7 +52,7 @@ def harness(self): return self._harness @harness.setter - def harness(self, harness): + def harness(self, harness: ApplicationHarnessConfig): """Sets the harness of this ApplicationConfig. diff --git a/libraries/models/cloudharness_model/models/application_dependencies_config.py b/libraries/models/cloudharness_model/models/application_dependencies_config.py index 7f4df0ba..a09df86f 100644 --- a/libraries/models/cloudharness_model/models/application_dependencies_config.py +++ b/libraries/models/cloudharness_model/models/application_dependencies_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.git_dependency_config import GitDependencyConfig from cloudharness_model import util @@ -60,7 +57,7 @@ def from_dict(cls, dikt) -> 'ApplicationDependenciesConfig': return util.deserialize_model(dikt, cls) @property - def hard(self): + def hard(self) -> List[str]: """Gets the hard of this ApplicationDependenciesConfig. Hard dependencies indicate that the application may not start without these other applications. # noqa: E501 @@ -71,7 +68,7 @@ def hard(self): return self._hard @hard.setter - def hard(self, hard): + def hard(self, hard: List[str]): """Sets the hard of this ApplicationDependenciesConfig. Hard dependencies indicate that the application may not start without these other applications. # noqa: E501 @@ -83,7 +80,7 @@ def hard(self, hard): self._hard = hard @property - def soft(self): + def soft(self) -> List[str]: """Gets the soft of this ApplicationDependenciesConfig. Soft dependencies indicate that the application will work partially without these other applications. # noqa: E501 @@ -94,7 +91,7 @@ def soft(self): return self._soft @soft.setter - def soft(self, soft): + def soft(self, soft: List[str]): """Sets the soft of this ApplicationDependenciesConfig. Soft dependencies indicate that the application will work partially without these other applications. # noqa: E501 @@ -106,7 +103,7 @@ def soft(self, soft): self._soft = soft @property - def build(self): + def build(self) -> List[str]: """Gets the build of this ApplicationDependenciesConfig. Hard dependencies indicate that the application Docker image build requires these base/common images # noqa: E501 @@ -117,7 +114,7 @@ def build(self): return self._build @build.setter - def build(self, build): + def build(self, build: List[str]): """Sets the build of this ApplicationDependenciesConfig. Hard dependencies indicate that the application Docker image build requires these base/common images # noqa: E501 @@ -129,7 +126,7 @@ def build(self, build): self._build = build @property - def git(self): + def git(self) -> List[GitDependencyConfig]: """Gets the git of this ApplicationDependenciesConfig. # noqa: E501 @@ -140,7 +137,7 @@ def git(self): return self._git @git.setter - def git(self, git): + def git(self, git: List[GitDependencyConfig]): """Sets the git of this ApplicationDependenciesConfig. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/application_harness_config.py b/libraries/models/cloudharness_model/models/application_harness_config.py index ba287654..ad85a40b 100644 --- a/libraries/models/cloudharness_model/models/application_harness_config.py +++ b/libraries/models/cloudharness_model/models/application_harness_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.application_accounts_config import ApplicationAccountsConfig from cloudharness_model.models.application_dependencies_config import ApplicationDependenciesConfig from cloudharness_model.models.application_probe import ApplicationProbe @@ -165,7 +162,7 @@ def from_dict(cls, dikt) -> 'ApplicationHarnessConfig': return util.deserialize_model(dikt, cls) @property - def deployment(self): + def deployment(self) -> DeploymentAutoArtifactConfig: """Gets the deployment of this ApplicationHarnessConfig. @@ -175,7 +172,7 @@ def deployment(self): return self._deployment @deployment.setter - def deployment(self, deployment): + def deployment(self, deployment: DeploymentAutoArtifactConfig): """Sets the deployment of this ApplicationHarnessConfig. @@ -186,7 +183,7 @@ def deployment(self, deployment): self._deployment = deployment @property - def service(self): + def service(self) -> ServiceAutoArtifactConfig: """Gets the service of this ApplicationHarnessConfig. @@ -196,7 +193,7 @@ def service(self): return self._service @service.setter - def service(self, service): + def service(self, service: ServiceAutoArtifactConfig): """Sets the service of this ApplicationHarnessConfig. @@ -207,7 +204,7 @@ def service(self, service): self._service = service @property - def subdomain(self): + def subdomain(self) -> str: """Gets the subdomain of this ApplicationHarnessConfig. If specified, an ingress will be created at [subdomain].[.Values.domain] # noqa: E501 @@ -218,7 +215,7 @@ def subdomain(self): return self._subdomain @subdomain.setter - def subdomain(self, subdomain): + def subdomain(self, subdomain: str): """Sets the subdomain of this ApplicationHarnessConfig. If specified, an ingress will be created at [subdomain].[.Values.domain] # noqa: E501 @@ -230,7 +227,7 @@ def subdomain(self, subdomain): self._subdomain = subdomain @property - def aliases(self): + def aliases(self) -> List[str]: """Gets the aliases of this ApplicationHarnessConfig. If specified, an ingress will be created at [alias].[.Values.domain] for each alias # noqa: E501 @@ -241,7 +238,7 @@ def aliases(self): return self._aliases @aliases.setter - def aliases(self, aliases): + def aliases(self, aliases: List[str]): """Sets the aliases of this ApplicationHarnessConfig. If specified, an ingress will be created at [alias].[.Values.domain] for each alias # noqa: E501 @@ -253,7 +250,7 @@ def aliases(self, aliases): self._aliases = aliases @property - def domain(self): + def domain(self) -> str: """Gets the domain of this ApplicationHarnessConfig. If specified, an ingress will be created at [domain] # noqa: E501 @@ -264,7 +261,7 @@ def domain(self): return self._domain @domain.setter - def domain(self, domain): + def domain(self, domain: str): """Sets the domain of this ApplicationHarnessConfig. If specified, an ingress will be created at [domain] # noqa: E501 @@ -276,7 +273,7 @@ def domain(self, domain): self._domain = domain @property - def dependencies(self): + def dependencies(self) -> ApplicationDependenciesConfig: """Gets the dependencies of this ApplicationHarnessConfig. @@ -286,7 +283,7 @@ def dependencies(self): return self._dependencies @dependencies.setter - def dependencies(self, dependencies): + def dependencies(self, dependencies: ApplicationDependenciesConfig): """Sets the dependencies of this ApplicationHarnessConfig. @@ -297,7 +294,7 @@ def dependencies(self, dependencies): self._dependencies = dependencies @property - def secured(self): + def secured(self) -> bool: """Gets the secured of this ApplicationHarnessConfig. When true, the application is shielded with a getekeeper # noqa: E501 @@ -308,7 +305,7 @@ def secured(self): return self._secured @secured.setter - def secured(self, secured): + def secured(self, secured: bool): """Sets the secured of this ApplicationHarnessConfig. When true, the application is shielded with a getekeeper # noqa: E501 @@ -320,7 +317,7 @@ def secured(self, secured): self._secured = secured @property - def uri_role_mapping(self): + def uri_role_mapping(self) -> List[UriRoleMappingConfig]: """Gets the uri_role_mapping of this ApplicationHarnessConfig. Map uri/roles to secure with the Gatekeeper (if `secured: true`) # noqa: E501 @@ -331,7 +328,7 @@ def uri_role_mapping(self): return self._uri_role_mapping @uri_role_mapping.setter - def uri_role_mapping(self, uri_role_mapping): + def uri_role_mapping(self, uri_role_mapping: List[UriRoleMappingConfig]): """Sets the uri_role_mapping of this ApplicationHarnessConfig. Map uri/roles to secure with the Gatekeeper (if `secured: true`) # noqa: E501 @@ -343,7 +340,7 @@ def uri_role_mapping(self, uri_role_mapping): self._uri_role_mapping = uri_role_mapping @property - def secrets(self): + def secrets(self) -> Dict[str, object]: """Gets the secrets of this ApplicationHarnessConfig. # noqa: E501 @@ -354,7 +351,7 @@ def secrets(self): return self._secrets @secrets.setter - def secrets(self, secrets): + def secrets(self, secrets: Dict[str, object]): """Sets the secrets of this ApplicationHarnessConfig. # noqa: E501 @@ -366,7 +363,7 @@ def secrets(self, secrets): self._secrets = secrets @property - def use_services(self): + def use_services(self) -> List[str]: """Gets the use_services of this ApplicationHarnessConfig. Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` # noqa: E501 @@ -377,7 +374,7 @@ def use_services(self): return self._use_services @use_services.setter - def use_services(self, use_services): + def use_services(self, use_services: List[str]): """Sets the use_services of this ApplicationHarnessConfig. Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` # noqa: E501 @@ -389,7 +386,7 @@ def use_services(self, use_services): self._use_services = use_services @property - def database(self): + def database(self) -> DatabaseDeploymentConfig: """Gets the database of this ApplicationHarnessConfig. @@ -399,7 +396,7 @@ def database(self): return self._database @database.setter - def database(self, database): + def database(self, database: DatabaseDeploymentConfig): """Sets the database of this ApplicationHarnessConfig. @@ -410,7 +407,7 @@ def database(self, database): self._database = database @property - def resources(self): + def resources(self) -> List[FileResourcesConfig]: """Gets the resources of this ApplicationHarnessConfig. Application file resources. Maps from deploy/resources folder and mounts as configmaps # noqa: E501 @@ -421,7 +418,7 @@ def resources(self): return self._resources @resources.setter - def resources(self, resources): + def resources(self, resources: List[FileResourcesConfig]): """Sets the resources of this ApplicationHarnessConfig. Application file resources. Maps from deploy/resources folder and mounts as configmaps # noqa: E501 @@ -433,7 +430,7 @@ def resources(self, resources): self._resources = resources @property - def readiness_probe(self): + def readiness_probe(self) -> ApplicationProbe: """Gets the readiness_probe of this ApplicationHarnessConfig. @@ -443,7 +440,7 @@ def readiness_probe(self): return self._readiness_probe @readiness_probe.setter - def readiness_probe(self, readiness_probe): + def readiness_probe(self, readiness_probe: ApplicationProbe): """Sets the readiness_probe of this ApplicationHarnessConfig. @@ -454,7 +451,7 @@ def readiness_probe(self, readiness_probe): self._readiness_probe = readiness_probe @property - def startup_probe(self): + def startup_probe(self) -> ApplicationProbe: """Gets the startup_probe of this ApplicationHarnessConfig. @@ -464,7 +461,7 @@ def startup_probe(self): return self._startup_probe @startup_probe.setter - def startup_probe(self, startup_probe): + def startup_probe(self, startup_probe: ApplicationProbe): """Sets the startup_probe of this ApplicationHarnessConfig. @@ -475,7 +472,7 @@ def startup_probe(self, startup_probe): self._startup_probe = startup_probe @property - def liveness_probe(self): + def liveness_probe(self) -> ApplicationProbe: """Gets the liveness_probe of this ApplicationHarnessConfig. @@ -485,7 +482,7 @@ def liveness_probe(self): return self._liveness_probe @liveness_probe.setter - def liveness_probe(self, liveness_probe): + def liveness_probe(self, liveness_probe: ApplicationProbe): """Sets the liveness_probe of this ApplicationHarnessConfig. @@ -496,7 +493,7 @@ def liveness_probe(self, liveness_probe): self._liveness_probe = liveness_probe @property - def source_root(self): + def source_root(self) -> str: """Gets the source_root of this ApplicationHarnessConfig. # noqa: E501 @@ -507,7 +504,7 @@ def source_root(self): return self._source_root @source_root.setter - def source_root(self, source_root): + def source_root(self, source_root: str): """Sets the source_root of this ApplicationHarnessConfig. # noqa: E501 @@ -521,7 +518,7 @@ def source_root(self, source_root): self._source_root = source_root @property - def name(self): + def name(self) -> str: """Gets the name of this ApplicationHarnessConfig. Application's name. Do not edit, the value is automatically set from the application directory's name # noqa: E501 @@ -532,7 +529,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this ApplicationHarnessConfig. Application's name. Do not edit, the value is automatically set from the application directory's name # noqa: E501 @@ -544,7 +541,7 @@ def name(self, name): self._name = name @property - def jupyterhub(self): + def jupyterhub(self) -> JupyterHubConfig: """Gets the jupyterhub of this ApplicationHarnessConfig. @@ -554,7 +551,7 @@ def jupyterhub(self): return self._jupyterhub @jupyterhub.setter - def jupyterhub(self, jupyterhub): + def jupyterhub(self, jupyterhub: JupyterHubConfig): """Sets the jupyterhub of this ApplicationHarnessConfig. @@ -565,7 +562,7 @@ def jupyterhub(self, jupyterhub): self._jupyterhub = jupyterhub @property - def accounts(self): + def accounts(self) -> ApplicationAccountsConfig: """Gets the accounts of this ApplicationHarnessConfig. @@ -575,7 +572,7 @@ def accounts(self): return self._accounts @accounts.setter - def accounts(self, accounts): + def accounts(self, accounts: ApplicationAccountsConfig): """Sets the accounts of this ApplicationHarnessConfig. @@ -586,7 +583,7 @@ def accounts(self, accounts): self._accounts = accounts @property - def test(self): + def test(self) -> ApplicationTestConfig: """Gets the test of this ApplicationHarnessConfig. @@ -596,7 +593,7 @@ def test(self): return self._test @test.setter - def test(self, test): + def test(self, test: ApplicationTestConfig): """Sets the test of this ApplicationHarnessConfig. @@ -607,7 +604,7 @@ def test(self, test): self._test = test @property - def quotas(self): + def quotas(self) -> Dict[str, object]: """Gets the quotas of this ApplicationHarnessConfig. # noqa: E501 @@ -618,7 +615,7 @@ def quotas(self): return self._quotas @quotas.setter - def quotas(self, quotas): + def quotas(self, quotas: Dict[str, object]): """Sets the quotas of this ApplicationHarnessConfig. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/application_probe.py b/libraries/models/cloudharness_model/models/application_probe.py index b1819df4..75864acc 100644 --- a/libraries/models/cloudharness_model/models/application_probe.py +++ b/libraries/models/cloudharness_model/models/application_probe.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -58,7 +55,7 @@ def from_dict(cls, dikt) -> 'ApplicationProbe': return util.deserialize_model(dikt, cls) @property - def path(self): + def path(self) -> str: """Gets the path of this ApplicationProbe. # noqa: E501 @@ -69,7 +66,7 @@ def path(self): return self._path @path.setter - def path(self, path): + def path(self, path: str): """Sets the path of this ApplicationProbe. # noqa: E501 @@ -83,7 +80,7 @@ def path(self, path): self._path = path @property - def period_seconds(self): + def period_seconds(self) -> float: """Gets the period_seconds of this ApplicationProbe. # noqa: E501 @@ -94,7 +91,7 @@ def period_seconds(self): return self._period_seconds @period_seconds.setter - def period_seconds(self, period_seconds): + def period_seconds(self, period_seconds: float): """Sets the period_seconds of this ApplicationProbe. # noqa: E501 @@ -106,7 +103,7 @@ def period_seconds(self, period_seconds): self._period_seconds = period_seconds @property - def failure_threshold(self): + def failure_threshold(self) -> float: """Gets the failure_threshold of this ApplicationProbe. # noqa: E501 @@ -117,7 +114,7 @@ def failure_threshold(self): return self._failure_threshold @failure_threshold.setter - def failure_threshold(self, failure_threshold): + def failure_threshold(self, failure_threshold: float): """Sets the failure_threshold of this ApplicationProbe. # noqa: E501 @@ -129,7 +126,7 @@ def failure_threshold(self, failure_threshold): self._failure_threshold = failure_threshold @property - def initial_delay_seconds(self): + def initial_delay_seconds(self) -> float: """Gets the initial_delay_seconds of this ApplicationProbe. # noqa: E501 @@ -140,7 +137,7 @@ def initial_delay_seconds(self): return self._initial_delay_seconds @initial_delay_seconds.setter - def initial_delay_seconds(self, initial_delay_seconds): + def initial_delay_seconds(self, initial_delay_seconds: float): """Sets the initial_delay_seconds of this ApplicationProbe. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/application_test_config.py b/libraries/models/cloudharness_model/models/application_test_config.py index a9133dd0..3ad31523 100644 --- a/libraries/models/cloudharness_model/models/application_test_config.py +++ b/libraries/models/cloudharness_model/models/application_test_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.api_tests_config import ApiTestsConfig from cloudharness_model.models.e2_e_tests_config import E2ETestsConfig from cloudharness_model.models.unit_tests_config import UnitTestsConfig @@ -59,7 +56,7 @@ def from_dict(cls, dikt) -> 'ApplicationTestConfig': return util.deserialize_model(dikt, cls) @property - def unit(self): + def unit(self) -> UnitTestsConfig: """Gets the unit of this ApplicationTestConfig. @@ -69,7 +66,7 @@ def unit(self): return self._unit @unit.setter - def unit(self, unit): + def unit(self, unit: UnitTestsConfig): """Sets the unit of this ApplicationTestConfig. @@ -82,7 +79,7 @@ def unit(self, unit): self._unit = unit @property - def api(self): + def api(self) -> ApiTestsConfig: """Gets the api of this ApplicationTestConfig. @@ -92,7 +89,7 @@ def api(self): return self._api @api.setter - def api(self, api): + def api(self, api: ApiTestsConfig): """Sets the api of this ApplicationTestConfig. @@ -105,7 +102,7 @@ def api(self, api): self._api = api @property - def e2e(self): + def e2e(self) -> E2ETestsConfig: """Gets the e2e of this ApplicationTestConfig. @@ -115,7 +112,7 @@ def e2e(self): return self._e2e @e2e.setter - def e2e(self, e2e): + def e2e(self, e2e: E2ETestsConfig): """Sets the e2e of this ApplicationTestConfig. diff --git a/libraries/models/cloudharness_model/models/application_user.py b/libraries/models/cloudharness_model/models/application_user.py index 823be12b..991d8ea3 100644 --- a/libraries/models/cloudharness_model/models/application_user.py +++ b/libraries/models/cloudharness_model/models/application_user.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -58,7 +55,7 @@ def from_dict(cls, dikt) -> 'ApplicationUser': return util.deserialize_model(dikt, cls) @property - def username(self): + def username(self) -> str: """Gets the username of this ApplicationUser. # noqa: E501 @@ -69,7 +66,7 @@ def username(self): return self._username @username.setter - def username(self, username): + def username(self, username: str): """Sets the username of this ApplicationUser. # noqa: E501 @@ -83,7 +80,7 @@ def username(self, username): self._username = username @property - def password(self): + def password(self) -> str: """Gets the password of this ApplicationUser. # noqa: E501 @@ -94,7 +91,7 @@ def password(self): return self._password @password.setter - def password(self, password): + def password(self, password: str): """Sets the password of this ApplicationUser. # noqa: E501 @@ -106,7 +103,7 @@ def password(self, password): self._password = password @property - def client_roles(self): + def client_roles(self) -> List[str]: """Gets the client_roles of this ApplicationUser. # noqa: E501 @@ -117,7 +114,7 @@ def client_roles(self): return self._client_roles @client_roles.setter - def client_roles(self, client_roles): + def client_roles(self, client_roles: List[str]): """Sets the client_roles of this ApplicationUser. # noqa: E501 @@ -129,7 +126,7 @@ def client_roles(self, client_roles): self._client_roles = client_roles @property - def realm_roles(self): + def realm_roles(self) -> List[str]: """Gets the realm_roles of this ApplicationUser. # noqa: E501 @@ -140,7 +137,7 @@ def realm_roles(self): return self._realm_roles @realm_roles.setter - def realm_roles(self, realm_roles): + def realm_roles(self, realm_roles: List[str]): """Sets the realm_roles of this ApplicationUser. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/auto_artifact_spec.py b/libraries/models/cloudharness_model/models/auto_artifact_spec.py index 9aebfd4f..c0c89ac3 100644 --- a/libraries/models/cloudharness_model/models/auto_artifact_spec.py +++ b/libraries/models/cloudharness_model/models/auto_artifact_spec.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -48,7 +45,7 @@ def from_dict(cls, dikt) -> 'AutoArtifactSpec': return util.deserialize_model(dikt, cls) @property - def auto(self): + def auto(self) -> bool: """Gets the auto of this AutoArtifactSpec. When true, enables automatic template # noqa: E501 @@ -59,7 +56,7 @@ def auto(self): return self._auto @auto.setter - def auto(self, auto): + def auto(self, auto: bool): """Sets the auto of this AutoArtifactSpec. When true, enables automatic template # noqa: E501 @@ -73,7 +70,7 @@ def auto(self, auto): self._auto = auto @property - def name(self): + def name(self) -> str: """Gets the name of this AutoArtifactSpec. # noqa: E501 @@ -84,7 +81,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this AutoArtifactSpec. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/backup_config.py b/libraries/models/cloudharness_model/models/backup_config.py index 254c52f7..0613c70d 100644 --- a/libraries/models/cloudharness_model/models/backup_config.py +++ b/libraries/models/cloudharness_model/models/backup_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf import re from cloudharness_model import util @@ -87,7 +84,7 @@ def from_dict(cls, dikt) -> 'BackupConfig': return util.deserialize_model(dikt, cls) @property - def active(self): + def active(self) -> bool: """Gets the active of this BackupConfig. # noqa: E501 @@ -98,7 +95,7 @@ def active(self): return self._active @active.setter - def active(self, active): + def active(self, active: bool): """Sets the active of this BackupConfig. # noqa: E501 @@ -110,7 +107,7 @@ def active(self, active): self._active = active @property - def keep_days(self): + def keep_days(self) -> int: """Gets the keep_days of this BackupConfig. # noqa: E501 @@ -121,7 +118,7 @@ def keep_days(self): return self._keep_days @keep_days.setter - def keep_days(self, keep_days): + def keep_days(self, keep_days: int): """Sets the keep_days of this BackupConfig. # noqa: E501 @@ -133,7 +130,7 @@ def keep_days(self, keep_days): self._keep_days = keep_days @property - def keep_weeks(self): + def keep_weeks(self) -> int: """Gets the keep_weeks of this BackupConfig. # noqa: E501 @@ -144,7 +141,7 @@ def keep_weeks(self): return self._keep_weeks @keep_weeks.setter - def keep_weeks(self, keep_weeks): + def keep_weeks(self, keep_weeks: int): """Sets the keep_weeks of this BackupConfig. # noqa: E501 @@ -156,7 +153,7 @@ def keep_weeks(self, keep_weeks): self._keep_weeks = keep_weeks @property - def keep_months(self): + def keep_months(self) -> int: """Gets the keep_months of this BackupConfig. # noqa: E501 @@ -167,7 +164,7 @@ def keep_months(self): return self._keep_months @keep_months.setter - def keep_months(self, keep_months): + def keep_months(self, keep_months: int): """Sets the keep_months of this BackupConfig. # noqa: E501 @@ -179,7 +176,7 @@ def keep_months(self, keep_months): self._keep_months = keep_months @property - def schedule(self): + def schedule(self) -> str: """Gets the schedule of this BackupConfig. Cron expression # noqa: E501 @@ -190,7 +187,7 @@ def schedule(self): return self._schedule @schedule.setter - def schedule(self, schedule): + def schedule(self, schedule: str): """Sets the schedule of this BackupConfig. Cron expression # noqa: E501 @@ -204,7 +201,7 @@ def schedule(self, schedule): self._schedule = schedule @property - def suffix(self): + def suffix(self) -> object: """Gets the suffix of this BackupConfig. The file suffix added to backup files # noqa: E501 @@ -215,7 +212,7 @@ def suffix(self): return self._suffix @suffix.setter - def suffix(self, suffix): + def suffix(self, suffix: object): """Sets the suffix of this BackupConfig. The file suffix added to backup files # noqa: E501 @@ -227,7 +224,7 @@ def suffix(self, suffix): self._suffix = suffix @property - def volumesize(self): + def volumesize(self) -> str: """Gets the volumesize of this BackupConfig. The volume size for backups (all backups share the same volume) # noqa: E501 @@ -238,7 +235,7 @@ def volumesize(self): return self._volumesize @volumesize.setter - def volumesize(self, volumesize): + def volumesize(self, volumesize: str): """Sets the volumesize of this BackupConfig. The volume size for backups (all backups share the same volume) # noqa: E501 @@ -250,7 +247,7 @@ def volumesize(self, volumesize): self._volumesize = volumesize @property - def dir(self): + def dir(self) -> str: """Gets the dir of this BackupConfig. # noqa: E501 @@ -261,7 +258,7 @@ def dir(self): return self._dir @dir.setter - def dir(self, dir): + def dir(self, dir: str): """Sets the dir of this BackupConfig. # noqa: E501 @@ -277,7 +274,7 @@ def dir(self, dir): self._dir = dir @property - def resources(self): + def resources(self) -> DeploymentResourcesConf: """Gets the resources of this BackupConfig. @@ -287,7 +284,7 @@ def resources(self): return self._resources @resources.setter - def resources(self, resources): + def resources(self, resources: DeploymentResourcesConf): """Sets the resources of this BackupConfig. diff --git a/libraries/models/cloudharness_model/models/base_model.py b/libraries/models/cloudharness_model/models/base_model.py new file mode 100644 index 00000000..0938bbc9 --- /dev/null +++ b/libraries/models/cloudharness_model/models/base_model.py @@ -0,0 +1,68 @@ +import pprint + +import typing + +from cloudharness_model import util + +T = typing.TypeVar('T') + + +class Model: + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types: typing.Dict[str, type] = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map: typing.Dict[str, str] = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr in self.openapi_types: + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/libraries/models/cloudharness_model/models/cdc_event.py b/libraries/models/cloudharness_model/models/cdc_event.py index 1ba4f017..b8861ad1 100644 --- a/libraries/models/cloudharness_model/models/cdc_event.py +++ b/libraries/models/cloudharness_model/models/cdc_event.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.cdc_event_meta import CDCEventMeta from cloudharness_model import util @@ -65,7 +62,7 @@ def from_dict(cls, dikt) -> 'CDCEvent': return util.deserialize_model(dikt, cls) @property - def operation(self): + def operation(self) -> str: """Gets the operation of this CDCEvent. the operation on the object e.g. create / update / delete # noqa: E501 @@ -76,7 +73,7 @@ def operation(self): return self._operation @operation.setter - def operation(self, operation): + def operation(self, operation: str): """Sets the operation of this CDCEvent. the operation on the object e.g. create / update / delete # noqa: E501 @@ -94,7 +91,7 @@ def operation(self, operation): self._operation = operation @property - def uid(self): + def uid(self) -> str: """Gets the uid of this CDCEvent. the unique identifier attribute of the object # noqa: E501 @@ -105,7 +102,7 @@ def uid(self): return self._uid @uid.setter - def uid(self, uid): + def uid(self, uid: str): """Sets the uid of this CDCEvent. the unique identifier attribute of the object # noqa: E501 @@ -119,7 +116,7 @@ def uid(self, uid): self._uid = uid @property - def message_type(self): + def message_type(self) -> str: """Gets the message_type of this CDCEvent. the type of the message (relates to the object type) e.g. jobs # noqa: E501 @@ -130,7 +127,7 @@ def message_type(self): return self._message_type @message_type.setter - def message_type(self, message_type): + def message_type(self, message_type: str): """Sets the message_type of this CDCEvent. the type of the message (relates to the object type) e.g. jobs # noqa: E501 @@ -144,7 +141,7 @@ def message_type(self, message_type): self._message_type = message_type @property - def resource(self): + def resource(self) -> Dict[str, object]: """Gets the resource of this CDCEvent. # noqa: E501 @@ -155,7 +152,7 @@ def resource(self): return self._resource @resource.setter - def resource(self, resource): + def resource(self, resource: Dict[str, object]): """Sets the resource of this CDCEvent. # noqa: E501 @@ -167,7 +164,7 @@ def resource(self, resource): self._resource = resource @property - def meta(self): + def meta(self) -> CDCEventMeta: """Gets the meta of this CDCEvent. @@ -177,7 +174,7 @@ def meta(self): return self._meta @meta.setter - def meta(self, meta): + def meta(self, meta: CDCEventMeta): """Sets the meta of this CDCEvent. diff --git a/libraries/models/cloudharness_model/models/cdc_event_meta.py b/libraries/models/cloudharness_model/models/cdc_event_meta.py index d88e57d1..78db4e05 100644 --- a/libraries/models/cloudharness_model/models/cdc_event_meta.py +++ b/libraries/models/cloudharness_model/models/cdc_event_meta.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.user import User from cloudharness_model import util @@ -25,7 +22,7 @@ def __init__(self, app_name=None, user=None, args=None, kwargs=None, description :param user: The user of this CDCEventMeta. # noqa: E501 :type user: User :param args: The args of this CDCEventMeta. # noqa: E501 - :type args: List[Dict] + :type args: List[Dict[str, object]] :param kwargs: The kwargs of this CDCEventMeta. # noqa: E501 :type kwargs: object :param description: The description of this CDCEventMeta. # noqa: E501 @@ -34,7 +31,7 @@ def __init__(self, app_name=None, user=None, args=None, kwargs=None, description self.openapi_types = { 'app_name': str, 'user': User, - 'args': List[Dict], + 'args': List[Dict[str, object]], 'kwargs': object, 'description': str } @@ -65,7 +62,7 @@ def from_dict(cls, dikt) -> 'CDCEventMeta': return util.deserialize_model(dikt, cls) @property - def app_name(self): + def app_name(self) -> str: """Gets the app_name of this CDCEventMeta. The name of the application/microservice sending the message # noqa: E501 @@ -76,7 +73,7 @@ def app_name(self): return self._app_name @app_name.setter - def app_name(self, app_name): + def app_name(self, app_name: str): """Sets the app_name of this CDCEventMeta. The name of the application/microservice sending the message # noqa: E501 @@ -90,7 +87,7 @@ def app_name(self, app_name): self._app_name = app_name @property - def user(self): + def user(self) -> User: """Gets the user of this CDCEventMeta. @@ -100,7 +97,7 @@ def user(self): return self._user @user.setter - def user(self, user): + def user(self, user: User): """Sets the user of this CDCEventMeta. @@ -111,30 +108,30 @@ def user(self, user): self._user = user @property - def args(self): + def args(self) -> List[Dict[str, object]]: """Gets the args of this CDCEventMeta. the caller function arguments # noqa: E501 :return: The args of this CDCEventMeta. - :rtype: List[Dict] + :rtype: List[Dict[str, object]] """ return self._args @args.setter - def args(self, args): + def args(self, args: List[Dict[str, object]]): """Sets the args of this CDCEventMeta. the caller function arguments # noqa: E501 :param args: The args of this CDCEventMeta. - :type args: List[Dict] + :type args: List[Dict[str, object]] """ self._args = args @property - def kwargs(self): + def kwargs(self) -> object: """Gets the kwargs of this CDCEventMeta. the caller function keyword arguments # noqa: E501 @@ -145,7 +142,7 @@ def kwargs(self): return self._kwargs @kwargs.setter - def kwargs(self, kwargs): + def kwargs(self, kwargs: object): """Sets the kwargs of this CDCEventMeta. the caller function keyword arguments # noqa: E501 @@ -157,7 +154,7 @@ def kwargs(self, kwargs): self._kwargs = kwargs @property - def description(self): + def description(self) -> str: """Gets the description of this CDCEventMeta. General description -- for human consumption # noqa: E501 @@ -168,7 +165,7 @@ def description(self): return self._description @description.setter - def description(self, description): + def description(self, description: str): """Sets the description of this CDCEventMeta. General description -- for human consumption # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/cpu_memory_config.py b/libraries/models/cloudharness_model/models/cpu_memory_config.py index cab9bf7c..8f4f1e70 100644 --- a/libraries/models/cloudharness_model/models/cpu_memory_config.py +++ b/libraries/models/cloudharness_model/models/cpu_memory_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -48,7 +45,7 @@ def from_dict(cls, dikt) -> 'CpuMemoryConfig': return util.deserialize_model(dikt, cls) @property - def cpu(self): + def cpu(self) -> str: """Gets the cpu of this CpuMemoryConfig. # noqa: E501 @@ -59,7 +56,7 @@ def cpu(self): return self._cpu @cpu.setter - def cpu(self, cpu): + def cpu(self, cpu: str): """Sets the cpu of this CpuMemoryConfig. # noqa: E501 @@ -71,7 +68,7 @@ def cpu(self, cpu): self._cpu = cpu @property - def memory(self): + def memory(self) -> str: """Gets the memory of this CpuMemoryConfig. # noqa: E501 @@ -82,7 +79,7 @@ def memory(self): return self._memory @memory.setter - def memory(self, memory): + def memory(self, memory: str): """Sets the memory of this CpuMemoryConfig. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/database_deployment_config.py b/libraries/models/cloudharness_model/models/database_deployment_config.py index 3786fb89..1be424a2 100644 --- a/libraries/models/cloudharness_model/models/database_deployment_config.py +++ b/libraries/models/cloudharness_model/models/database_deployment_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf import re from cloudharness_model import util @@ -19,9 +16,13 @@ class DatabaseDeploymentConfig(Model): Do not edit the class manually. """ - def __init__(self, type=None, size=None, user=None, _pass=None, image_ref=None, mongo=None, postgres=None, neo4j=None, resources=None, auto=None, name=None): # noqa: E501 + def __init__(self, auto=None, name=None, type=None, size=None, user=None, _pass=None, image_ref=None, mongo=None, postgres=None, neo4j=None, resources=None): # noqa: E501 """DatabaseDeploymentConfig - a model defined in OpenAPI + :param auto: The auto of this DatabaseDeploymentConfig. # noqa: E501 + :type auto: bool + :param name: The name of this DatabaseDeploymentConfig. # noqa: E501 + :type name: str :param type: The type of this DatabaseDeploymentConfig. # noqa: E501 :type type: str :param size: The size of this DatabaseDeploymentConfig. # noqa: E501 @@ -40,12 +41,10 @@ def __init__(self, type=None, size=None, user=None, _pass=None, image_ref=None, :type neo4j: object :param resources: The resources of this DatabaseDeploymentConfig. # noqa: E501 :type resources: DeploymentResourcesConf - :param auto: The auto of this DatabaseDeploymentConfig. # noqa: E501 - :type auto: bool - :param name: The name of this DatabaseDeploymentConfig. # noqa: E501 - :type name: str """ self.openapi_types = { + 'auto': bool, + 'name': str, 'type': str, 'size': str, 'user': str, @@ -54,12 +53,12 @@ def __init__(self, type=None, size=None, user=None, _pass=None, image_ref=None, 'mongo': Dict[str, object], 'postgres': Dict[str, object], 'neo4j': object, - 'resources': DeploymentResourcesConf, - 'auto': bool, - 'name': str + 'resources': DeploymentResourcesConf } self.attribute_map = { + 'auto': 'auto', + 'name': 'name', 'type': 'type', 'size': 'size', 'user': 'user', @@ -68,11 +67,11 @@ def __init__(self, type=None, size=None, user=None, _pass=None, image_ref=None, 'mongo': 'mongo', 'postgres': 'postgres', 'neo4j': 'neo4j', - 'resources': 'resources', - 'auto': 'auto', - 'name': 'name' + 'resources': 'resources' } + self._auto = auto + self._name = name self._type = type self._size = size self._user = user @@ -82,8 +81,6 @@ def __init__(self, type=None, size=None, user=None, _pass=None, image_ref=None, self._postgres = postgres self._neo4j = neo4j self._resources = resources - self._auto = auto - self._name = name @classmethod def from_dict(cls, dikt) -> 'DatabaseDeploymentConfig': @@ -97,7 +94,55 @@ def from_dict(cls, dikt) -> 'DatabaseDeploymentConfig': return util.deserialize_model(dikt, cls) @property - def type(self): + def auto(self) -> bool: + """Gets the auto of this DatabaseDeploymentConfig. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this DatabaseDeploymentConfig. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto: bool): + """Sets the auto of this DatabaseDeploymentConfig. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this DatabaseDeploymentConfig. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self) -> str: + """Gets the name of this DatabaseDeploymentConfig. + + # noqa: E501 + + :return: The name of this DatabaseDeploymentConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name: str): + """Sets the name of this DatabaseDeploymentConfig. + + # noqa: E501 + + :param name: The name of this DatabaseDeploymentConfig. + :type name: str + """ + + self._name = name + + @property + def type(self) -> str: """Gets the type of this DatabaseDeploymentConfig. Define the database type. One of (mongo, postgres, neo4j, sqlite3) # noqa: E501 @@ -108,7 +153,7 @@ def type(self): return self._type @type.setter - def type(self, type): + def type(self, type: str): """Sets the type of this DatabaseDeploymentConfig. Define the database type. One of (mongo, postgres, neo4j, sqlite3) # noqa: E501 @@ -122,7 +167,7 @@ def type(self, type): self._type = type @property - def size(self): + def size(self) -> str: """Gets the size of this DatabaseDeploymentConfig. Specify database disk size # noqa: E501 @@ -133,7 +178,7 @@ def size(self): return self._size @size.setter - def size(self, size): + def size(self, size: str): """Sets the size of this DatabaseDeploymentConfig. Specify database disk size # noqa: E501 @@ -145,7 +190,7 @@ def size(self, size): self._size = size @property - def user(self): + def user(self) -> str: """Gets the user of this DatabaseDeploymentConfig. database username # noqa: E501 @@ -156,7 +201,7 @@ def user(self): return self._user @user.setter - def user(self, user): + def user(self, user: str): """Sets the user of this DatabaseDeploymentConfig. database username # noqa: E501 @@ -168,7 +213,7 @@ def user(self, user): self._user = user @property - def _pass(self): + def _pass(self) -> str: """Gets the _pass of this DatabaseDeploymentConfig. Database password # noqa: E501 @@ -179,7 +224,7 @@ def _pass(self): return self.__pass @_pass.setter - def _pass(self, _pass): + def _pass(self, _pass: str): """Sets the _pass of this DatabaseDeploymentConfig. Database password # noqa: E501 @@ -191,7 +236,7 @@ def _pass(self, _pass): self.__pass = _pass @property - def image_ref(self): + def image_ref(self) -> str: """Gets the image_ref of this DatabaseDeploymentConfig. Used for referencing images from the build # noqa: E501 @@ -202,7 +247,7 @@ def image_ref(self): return self._image_ref @image_ref.setter - def image_ref(self, image_ref): + def image_ref(self, image_ref: str): """Sets the image_ref of this DatabaseDeploymentConfig. Used for referencing images from the build # noqa: E501 @@ -214,7 +259,7 @@ def image_ref(self, image_ref): self._image_ref = image_ref @property - def mongo(self): + def mongo(self) -> Dict[str, object]: """Gets the mongo of this DatabaseDeploymentConfig. # noqa: E501 @@ -225,7 +270,7 @@ def mongo(self): return self._mongo @mongo.setter - def mongo(self, mongo): + def mongo(self, mongo: Dict[str, object]): """Sets the mongo of this DatabaseDeploymentConfig. # noqa: E501 @@ -237,7 +282,7 @@ def mongo(self, mongo): self._mongo = mongo @property - def postgres(self): + def postgres(self) -> Dict[str, object]: """Gets the postgres of this DatabaseDeploymentConfig. # noqa: E501 @@ -248,7 +293,7 @@ def postgres(self): return self._postgres @postgres.setter - def postgres(self, postgres): + def postgres(self, postgres: Dict[str, object]): """Sets the postgres of this DatabaseDeploymentConfig. # noqa: E501 @@ -260,7 +305,7 @@ def postgres(self, postgres): self._postgres = postgres @property - def neo4j(self): + def neo4j(self) -> object: """Gets the neo4j of this DatabaseDeploymentConfig. Neo4j database specific configuration # noqa: E501 @@ -271,7 +316,7 @@ def neo4j(self): return self._neo4j @neo4j.setter - def neo4j(self, neo4j): + def neo4j(self, neo4j: object): """Sets the neo4j of this DatabaseDeploymentConfig. Neo4j database specific configuration # noqa: E501 @@ -283,7 +328,7 @@ def neo4j(self, neo4j): self._neo4j = neo4j @property - def resources(self): + def resources(self) -> DeploymentResourcesConf: """Gets the resources of this DatabaseDeploymentConfig. @@ -293,7 +338,7 @@ def resources(self): return self._resources @resources.setter - def resources(self, resources): + def resources(self, resources: DeploymentResourcesConf): """Sets the resources of this DatabaseDeploymentConfig. @@ -302,51 +347,3 @@ def resources(self, resources): """ self._resources = resources - - @property - def auto(self): - """Gets the auto of this DatabaseDeploymentConfig. - - When true, enables automatic template # noqa: E501 - - :return: The auto of this DatabaseDeploymentConfig. - :rtype: bool - """ - return self._auto - - @auto.setter - def auto(self, auto): - """Sets the auto of this DatabaseDeploymentConfig. - - When true, enables automatic template # noqa: E501 - - :param auto: The auto of this DatabaseDeploymentConfig. - :type auto: bool - """ - if auto is None: - raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 - - self._auto = auto - - @property - def name(self): - """Gets the name of this DatabaseDeploymentConfig. - - # noqa: E501 - - :return: The name of this DatabaseDeploymentConfig. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this DatabaseDeploymentConfig. - - # noqa: E501 - - :param name: The name of this DatabaseDeploymentConfig. - :type name: str - """ - - self._name = name diff --git a/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py b/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py index c72ac477..ab31de80 100644 --- a/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py +++ b/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf from cloudharness_model.models.deployment_volume_spec import DeploymentVolumeSpec import re @@ -21,9 +18,13 @@ class DeploymentAutoArtifactConfig(Model): Do not edit the class manually. """ - def __init__(self, port=None, replicas=None, image=None, resources=None, volume=None, auto=None, name=None): # noqa: E501 + def __init__(self, auto=None, name=None, port=None, replicas=None, image=None, resources=None, volume=None): # noqa: E501 """DeploymentAutoArtifactConfig - a model defined in OpenAPI + :param auto: The auto of this DeploymentAutoArtifactConfig. # noqa: E501 + :type auto: bool + :param name: The name of this DeploymentAutoArtifactConfig. # noqa: E501 + :type name: str :param port: The port of this DeploymentAutoArtifactConfig. # noqa: E501 :type port: str :param replicas: The replicas of this DeploymentAutoArtifactConfig. # noqa: E501 @@ -34,38 +35,34 @@ def __init__(self, port=None, replicas=None, image=None, resources=None, volume= :type resources: DeploymentResourcesConf :param volume: The volume of this DeploymentAutoArtifactConfig. # noqa: E501 :type volume: DeploymentVolumeSpec - :param auto: The auto of this DeploymentAutoArtifactConfig. # noqa: E501 - :type auto: bool - :param name: The name of this DeploymentAutoArtifactConfig. # noqa: E501 - :type name: str """ self.openapi_types = { + 'auto': bool, + 'name': str, 'port': str, 'replicas': int, 'image': str, 'resources': DeploymentResourcesConf, - 'volume': DeploymentVolumeSpec, - 'auto': bool, - 'name': str + 'volume': DeploymentVolumeSpec } self.attribute_map = { + 'auto': 'auto', + 'name': 'name', 'port': 'port', 'replicas': 'replicas', 'image': 'image', 'resources': 'resources', - 'volume': 'volume', - 'auto': 'auto', - 'name': 'name' + 'volume': 'volume' } + self._auto = auto + self._name = name self._port = port self._replicas = replicas self._image = image self._resources = resources self._volume = volume - self._auto = auto - self._name = name @classmethod def from_dict(cls, dikt) -> 'DeploymentAutoArtifactConfig': @@ -79,7 +76,55 @@ def from_dict(cls, dikt) -> 'DeploymentAutoArtifactConfig': return util.deserialize_model(dikt, cls) @property - def port(self): + def auto(self) -> bool: + """Gets the auto of this DeploymentAutoArtifactConfig. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this DeploymentAutoArtifactConfig. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto: bool): + """Sets the auto of this DeploymentAutoArtifactConfig. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this DeploymentAutoArtifactConfig. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self) -> str: + """Gets the name of this DeploymentAutoArtifactConfig. + + # noqa: E501 + + :return: The name of this DeploymentAutoArtifactConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name: str): + """Sets the name of this DeploymentAutoArtifactConfig. + + # noqa: E501 + + :param name: The name of this DeploymentAutoArtifactConfig. + :type name: str + """ + + self._name = name + + @property + def port(self) -> str: """Gets the port of this DeploymentAutoArtifactConfig. Deployment port # noqa: E501 @@ -90,7 +135,7 @@ def port(self): return self._port @port.setter - def port(self, port): + def port(self, port: str): """Sets the port of this DeploymentAutoArtifactConfig. Deployment port # noqa: E501 @@ -102,7 +147,7 @@ def port(self, port): self._port = port @property - def replicas(self): + def replicas(self) -> int: """Gets the replicas of this DeploymentAutoArtifactConfig. Number of replicas # noqa: E501 @@ -113,7 +158,7 @@ def replicas(self): return self._replicas @replicas.setter - def replicas(self, replicas): + def replicas(self, replicas: int): """Sets the replicas of this DeploymentAutoArtifactConfig. Number of replicas # noqa: E501 @@ -125,7 +170,7 @@ def replicas(self, replicas): self._replicas = replicas @property - def image(self): + def image(self) -> str: """Gets the image of this DeploymentAutoArtifactConfig. Image name to use in the deployment. Leave it blank to set from the application's Docker file # noqa: E501 @@ -136,7 +181,7 @@ def image(self): return self._image @image.setter - def image(self, image): + def image(self, image: str): """Sets the image of this DeploymentAutoArtifactConfig. Image name to use in the deployment. Leave it blank to set from the application's Docker file # noqa: E501 @@ -150,7 +195,7 @@ def image(self, image): self._image = image @property - def resources(self): + def resources(self) -> DeploymentResourcesConf: """Gets the resources of this DeploymentAutoArtifactConfig. @@ -160,7 +205,7 @@ def resources(self): return self._resources @resources.setter - def resources(self, resources): + def resources(self, resources: DeploymentResourcesConf): """Sets the resources of this DeploymentAutoArtifactConfig. @@ -171,7 +216,7 @@ def resources(self, resources): self._resources = resources @property - def volume(self): + def volume(self) -> DeploymentVolumeSpec: """Gets the volume of this DeploymentAutoArtifactConfig. @@ -181,7 +226,7 @@ def volume(self): return self._volume @volume.setter - def volume(self, volume): + def volume(self, volume: DeploymentVolumeSpec): """Sets the volume of this DeploymentAutoArtifactConfig. @@ -190,51 +235,3 @@ def volume(self, volume): """ self._volume = volume - - @property - def auto(self): - """Gets the auto of this DeploymentAutoArtifactConfig. - - When true, enables automatic template # noqa: E501 - - :return: The auto of this DeploymentAutoArtifactConfig. - :rtype: bool - """ - return self._auto - - @auto.setter - def auto(self, auto): - """Sets the auto of this DeploymentAutoArtifactConfig. - - When true, enables automatic template # noqa: E501 - - :param auto: The auto of this DeploymentAutoArtifactConfig. - :type auto: bool - """ - if auto is None: - raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 - - self._auto = auto - - @property - def name(self): - """Gets the name of this DeploymentAutoArtifactConfig. - - # noqa: E501 - - :return: The name of this DeploymentAutoArtifactConfig. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this DeploymentAutoArtifactConfig. - - # noqa: E501 - - :param name: The name of this DeploymentAutoArtifactConfig. - :type name: str - """ - - self._name = name diff --git a/libraries/models/cloudharness_model/models/deployment_resources_conf.py b/libraries/models/cloudharness_model/models/deployment_resources_conf.py index 520e2008..27c09148 100644 --- a/libraries/models/cloudharness_model/models/deployment_resources_conf.py +++ b/libraries/models/cloudharness_model/models/deployment_resources_conf.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.cpu_memory_config import CpuMemoryConfig from cloudharness_model import util @@ -50,7 +47,7 @@ def from_dict(cls, dikt) -> 'DeploymentResourcesConf': return util.deserialize_model(dikt, cls) @property - def requests(self): + def requests(self) -> CpuMemoryConfig: """Gets the requests of this DeploymentResourcesConf. @@ -60,7 +57,7 @@ def requests(self): return self._requests @requests.setter - def requests(self, requests): + def requests(self, requests: CpuMemoryConfig): """Sets the requests of this DeploymentResourcesConf. @@ -71,7 +68,7 @@ def requests(self, requests): self._requests = requests @property - def limits(self): + def limits(self) -> CpuMemoryConfig: """Gets the limits of this DeploymentResourcesConf. @@ -81,7 +78,7 @@ def limits(self): return self._limits @limits.setter - def limits(self, limits): + def limits(self, limits: CpuMemoryConfig): """Sets the limits of this DeploymentResourcesConf. diff --git a/libraries/models/cloudharness_model/models/deployment_volume_spec.py b/libraries/models/cloudharness_model/models/deployment_volume_spec.py index 631bc55d..7fc64914 100644 --- a/libraries/models/cloudharness_model/models/deployment_volume_spec.py +++ b/libraries/models/cloudharness_model/models/deployment_volume_spec.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -15,41 +12,41 @@ class DeploymentVolumeSpec(Model): Do not edit the class manually. """ - def __init__(self, mountpath=None, size=None, usenfs=None, auto=None, name=None): # noqa: E501 + def __init__(self, auto=None, name=None, mountpath=None, size=None, usenfs=None): # noqa: E501 """DeploymentVolumeSpec - a model defined in OpenAPI + :param auto: The auto of this DeploymentVolumeSpec. # noqa: E501 + :type auto: bool + :param name: The name of this DeploymentVolumeSpec. # noqa: E501 + :type name: str :param mountpath: The mountpath of this DeploymentVolumeSpec. # noqa: E501 :type mountpath: str :param size: The size of this DeploymentVolumeSpec. # noqa: E501 :type size: object :param usenfs: The usenfs of this DeploymentVolumeSpec. # noqa: E501 :type usenfs: bool - :param auto: The auto of this DeploymentVolumeSpec. # noqa: E501 - :type auto: bool - :param name: The name of this DeploymentVolumeSpec. # noqa: E501 - :type name: str """ self.openapi_types = { + 'auto': bool, + 'name': str, 'mountpath': str, 'size': object, - 'usenfs': bool, - 'auto': bool, - 'name': str + 'usenfs': bool } self.attribute_map = { + 'auto': 'auto', + 'name': 'name', 'mountpath': 'mountpath', 'size': 'size', - 'usenfs': 'usenfs', - 'auto': 'auto', - 'name': 'name' + 'usenfs': 'usenfs' } + self._auto = auto + self._name = name self._mountpath = mountpath self._size = size self._usenfs = usenfs - self._auto = auto - self._name = name @classmethod def from_dict(cls, dikt) -> 'DeploymentVolumeSpec': @@ -63,7 +60,55 @@ def from_dict(cls, dikt) -> 'DeploymentVolumeSpec': return util.deserialize_model(dikt, cls) @property - def mountpath(self): + def auto(self) -> bool: + """Gets the auto of this DeploymentVolumeSpec. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this DeploymentVolumeSpec. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto: bool): + """Sets the auto of this DeploymentVolumeSpec. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this DeploymentVolumeSpec. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self) -> str: + """Gets the name of this DeploymentVolumeSpec. + + # noqa: E501 + + :return: The name of this DeploymentVolumeSpec. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name: str): + """Sets the name of this DeploymentVolumeSpec. + + # noqa: E501 + + :param name: The name of this DeploymentVolumeSpec. + :type name: str + """ + + self._name = name + + @property + def mountpath(self) -> str: """Gets the mountpath of this DeploymentVolumeSpec. The mount path for the volume # noqa: E501 @@ -74,7 +119,7 @@ def mountpath(self): return self._mountpath @mountpath.setter - def mountpath(self, mountpath): + def mountpath(self, mountpath: str): """Sets the mountpath of this DeploymentVolumeSpec. The mount path for the volume # noqa: E501 @@ -88,7 +133,7 @@ def mountpath(self, mountpath): self._mountpath = mountpath @property - def size(self): + def size(self) -> object: """Gets the size of this DeploymentVolumeSpec. The volume size. E.g. 5Gi # noqa: E501 @@ -99,7 +144,7 @@ def size(self): return self._size @size.setter - def size(self, size): + def size(self, size: object): """Sets the size of this DeploymentVolumeSpec. The volume size. E.g. 5Gi # noqa: E501 @@ -111,7 +156,7 @@ def size(self, size): self._size = size @property - def usenfs(self): + def usenfs(self) -> bool: """Gets the usenfs of this DeploymentVolumeSpec. Set to `true` to use the nfs on the created volume and mount as ReadWriteMany. # noqa: E501 @@ -122,7 +167,7 @@ def usenfs(self): return self._usenfs @usenfs.setter - def usenfs(self, usenfs): + def usenfs(self, usenfs: bool): """Sets the usenfs of this DeploymentVolumeSpec. Set to `true` to use the nfs on the created volume and mount as ReadWriteMany. # noqa: E501 @@ -132,51 +177,3 @@ def usenfs(self, usenfs): """ self._usenfs = usenfs - - @property - def auto(self): - """Gets the auto of this DeploymentVolumeSpec. - - When true, enables automatic template # noqa: E501 - - :return: The auto of this DeploymentVolumeSpec. - :rtype: bool - """ - return self._auto - - @auto.setter - def auto(self, auto): - """Sets the auto of this DeploymentVolumeSpec. - - When true, enables automatic template # noqa: E501 - - :param auto: The auto of this DeploymentVolumeSpec. - :type auto: bool - """ - if auto is None: - raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 - - self._auto = auto - - @property - def name(self): - """Gets the name of this DeploymentVolumeSpec. - - # noqa: E501 - - :return: The name of this DeploymentVolumeSpec. - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this DeploymentVolumeSpec. - - # noqa: E501 - - :param name: The name of this DeploymentVolumeSpec. - :type name: str - """ - - self._name = name diff --git a/libraries/models/cloudharness_model/models/e2_e_tests_config.py b/libraries/models/cloudharness_model/models/e2_e_tests_config.py index e856fe9d..6ae48d93 100644 --- a/libraries/models/cloudharness_model/models/e2_e_tests_config.py +++ b/libraries/models/cloudharness_model/models/e2_e_tests_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -58,7 +55,7 @@ def from_dict(cls, dikt) -> 'E2ETestsConfig': return util.deserialize_model(dikt, cls) @property - def enabled(self): + def enabled(self) -> bool: """Gets the enabled of this E2ETestsConfig. Enables end to end testing for this application (default: false) # noqa: E501 @@ -69,7 +66,7 @@ def enabled(self): return self._enabled @enabled.setter - def enabled(self, enabled): + def enabled(self, enabled: bool): """Sets the enabled of this E2ETestsConfig. Enables end to end testing for this application (default: false) # noqa: E501 @@ -83,7 +80,7 @@ def enabled(self, enabled): self._enabled = enabled @property - def smoketest(self): + def smoketest(self) -> bool: """Gets the smoketest of this E2ETestsConfig. Specify whether to run the common smoke tests # noqa: E501 @@ -94,7 +91,7 @@ def smoketest(self): return self._smoketest @smoketest.setter - def smoketest(self, smoketest): + def smoketest(self, smoketest: bool): """Sets the smoketest of this E2ETestsConfig. Specify whether to run the common smoke tests # noqa: E501 @@ -108,7 +105,7 @@ def smoketest(self, smoketest): self._smoketest = smoketest @property - def ignore_console_errors(self): + def ignore_console_errors(self) -> bool: """Gets the ignore_console_errors of this E2ETestsConfig. # noqa: E501 @@ -119,7 +116,7 @@ def ignore_console_errors(self): return self._ignore_console_errors @ignore_console_errors.setter - def ignore_console_errors(self, ignore_console_errors): + def ignore_console_errors(self, ignore_console_errors: bool): """Sets the ignore_console_errors of this E2ETestsConfig. # noqa: E501 @@ -131,7 +128,7 @@ def ignore_console_errors(self, ignore_console_errors): self._ignore_console_errors = ignore_console_errors @property - def ignore_request_errors(self): + def ignore_request_errors(self) -> bool: """Gets the ignore_request_errors of this E2ETestsConfig. # noqa: E501 @@ -142,7 +139,7 @@ def ignore_request_errors(self): return self._ignore_request_errors @ignore_request_errors.setter - def ignore_request_errors(self, ignore_request_errors): + def ignore_request_errors(self, ignore_request_errors: bool): """Sets the ignore_request_errors of this E2ETestsConfig. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/file_resources_config.py b/libraries/models/cloudharness_model/models/file_resources_config.py index da5993b4..fff73422 100644 --- a/libraries/models/cloudharness_model/models/file_resources_config.py +++ b/libraries/models/cloudharness_model/models/file_resources_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model import re from cloudharness_model import util @@ -55,7 +52,7 @@ def from_dict(cls, dikt) -> 'FileResourcesConfig': return util.deserialize_model(dikt, cls) @property - def name(self): + def name(self) -> str: """Gets the name of this FileResourcesConfig. # noqa: E501 @@ -66,7 +63,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this FileResourcesConfig. # noqa: E501 @@ -82,7 +79,7 @@ def name(self, name): self._name = name @property - def src(self): + def src(self) -> str: """Gets the src of this FileResourcesConfig. # noqa: E501 @@ -93,7 +90,7 @@ def src(self): return self._src @src.setter - def src(self, src): + def src(self, src: str): """Sets the src of this FileResourcesConfig. # noqa: E501 @@ -109,7 +106,7 @@ def src(self, src): self._src = src @property - def dst(self): + def dst(self) -> str: """Gets the dst of this FileResourcesConfig. # noqa: E501 @@ -120,7 +117,7 @@ def dst(self): return self._dst @dst.setter - def dst(self, dst): + def dst(self, dst: str): """Sets the dst of this FileResourcesConfig. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/git_dependency_config.py b/libraries/models/cloudharness_model/models/git_dependency_config.py index b2a17690..8e3acf61 100644 --- a/libraries/models/cloudharness_model/models/git_dependency_config.py +++ b/libraries/models/cloudharness_model/models/git_dependency_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -53,7 +50,7 @@ def from_dict(cls, dikt) -> 'GitDependencyConfig': return util.deserialize_model(dikt, cls) @property - def url(self): + def url(self) -> str: """Gets the url of this GitDependencyConfig. @@ -63,7 +60,7 @@ def url(self): return self._url @url.setter - def url(self, url): + def url(self, url: str): """Sets the url of this GitDependencyConfig. @@ -76,7 +73,7 @@ def url(self, url): self._url = url @property - def branch_tag(self): + def branch_tag(self) -> str: """Gets the branch_tag of this GitDependencyConfig. @@ -86,7 +83,7 @@ def branch_tag(self): return self._branch_tag @branch_tag.setter - def branch_tag(self, branch_tag): + def branch_tag(self, branch_tag: str): """Sets the branch_tag of this GitDependencyConfig. @@ -99,7 +96,7 @@ def branch_tag(self, branch_tag): self._branch_tag = branch_tag @property - def path(self): + def path(self) -> str: """Gets the path of this GitDependencyConfig. Defines the path where the repo is cloned. default: /git # noqa: E501 @@ -110,7 +107,7 @@ def path(self): return self._path @path.setter - def path(self, path): + def path(self, path: str): """Sets the path of this GitDependencyConfig. Defines the path where the repo is cloned. default: /git # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/harness_main_config.py b/libraries/models/cloudharness_model/models/harness_main_config.py index 7f18e82d..6691ed07 100644 --- a/libraries/models/cloudharness_model/models/harness_main_config.py +++ b/libraries/models/cloudharness_model/models/harness_main_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.application_config import ApplicationConfig from cloudharness_model.models.backup_config import BackupConfig from cloudharness_model.models.name_value import NameValue @@ -23,7 +20,7 @@ class HarnessMainConfig(Model): Do not edit the class manually. """ - def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace=None, mainapp=None, registry=None, tag=None, apps=None, env=None, privenv=None, backup=None, name=None, task_images=None, build_hash=None): # noqa: E501 + def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace=None, mainapp=None, registry=None, tag=None, apps=None, env=None, privenv=None, backup=None, name=None, task_images=None, build_hash=None, envmap=None): # noqa: E501 """HarnessMainConfig - a model defined in OpenAPI :param local: The local of this HarnessMainConfig. # noqa: E501 @@ -54,6 +51,8 @@ def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace= :type task_images: Dict[str, object] :param build_hash: The build_hash of this HarnessMainConfig. # noqa: E501 :type build_hash: str + :param envmap: The envmap of this HarnessMainConfig. # noqa: E501 + :type envmap: Dict[str, object] """ self.openapi_types = { 'local': bool, @@ -69,7 +68,8 @@ def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace= 'backup': BackupConfig, 'name': str, 'task_images': Dict[str, object], - 'build_hash': str + 'build_hash': str, + 'envmap': Dict[str, object] } self.attribute_map = { @@ -86,7 +86,8 @@ def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace= 'backup': 'backup', 'name': 'name', 'task_images': 'task-images', - 'build_hash': 'build_hash' + 'build_hash': 'build_hash', + 'envmap': 'envmap' } self._local = local @@ -103,6 +104,7 @@ def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace= self._name = name self._task_images = task_images self._build_hash = build_hash + self._envmap = envmap @classmethod def from_dict(cls, dikt) -> 'HarnessMainConfig': @@ -116,7 +118,7 @@ def from_dict(cls, dikt) -> 'HarnessMainConfig': return util.deserialize_model(dikt, cls) @property - def local(self): + def local(self) -> bool: """Gets the local of this HarnessMainConfig. If set to true, local DNS mapping is added to pods. # noqa: E501 @@ -127,7 +129,7 @@ def local(self): return self._local @local.setter - def local(self, local): + def local(self, local: bool): """Sets the local of this HarnessMainConfig. If set to true, local DNS mapping is added to pods. # noqa: E501 @@ -141,7 +143,7 @@ def local(self, local): self._local = local @property - def secured_gatekeepers(self): + def secured_gatekeepers(self) -> bool: """Gets the secured_gatekeepers of this HarnessMainConfig. Enables/disables Gatekeepers on secured applications. Set to false for testing/development # noqa: E501 @@ -152,7 +154,7 @@ def secured_gatekeepers(self): return self._secured_gatekeepers @secured_gatekeepers.setter - def secured_gatekeepers(self, secured_gatekeepers): + def secured_gatekeepers(self, secured_gatekeepers: bool): """Sets the secured_gatekeepers of this HarnessMainConfig. Enables/disables Gatekeepers on secured applications. Set to false for testing/development # noqa: E501 @@ -166,7 +168,7 @@ def secured_gatekeepers(self, secured_gatekeepers): self._secured_gatekeepers = secured_gatekeepers @property - def domain(self): + def domain(self) -> str: """Gets the domain of this HarnessMainConfig. The root domain # noqa: E501 @@ -177,7 +179,7 @@ def domain(self): return self._domain @domain.setter - def domain(self, domain): + def domain(self, domain: str): """Sets the domain of this HarnessMainConfig. The root domain # noqa: E501 @@ -191,7 +193,7 @@ def domain(self, domain): self._domain = domain @property - def namespace(self): + def namespace(self) -> str: """Gets the namespace of this HarnessMainConfig. The K8s namespace. # noqa: E501 @@ -202,7 +204,7 @@ def namespace(self): return self._namespace @namespace.setter - def namespace(self, namespace): + def namespace(self, namespace: str): """Sets the namespace of this HarnessMainConfig. The K8s namespace. # noqa: E501 @@ -216,7 +218,7 @@ def namespace(self, namespace): self._namespace = namespace @property - def mainapp(self): + def mainapp(self) -> str: """Gets the mainapp of this HarnessMainConfig. Defines the app to map to the root domain # noqa: E501 @@ -227,7 +229,7 @@ def mainapp(self): return self._mainapp @mainapp.setter - def mainapp(self, mainapp): + def mainapp(self, mainapp: str): """Sets the mainapp of this HarnessMainConfig. Defines the app to map to the root domain # noqa: E501 @@ -241,7 +243,7 @@ def mainapp(self, mainapp): self._mainapp = mainapp @property - def registry(self): + def registry(self) -> RegistryConfig: """Gets the registry of this HarnessMainConfig. @@ -251,7 +253,7 @@ def registry(self): return self._registry @registry.setter - def registry(self, registry): + def registry(self, registry: RegistryConfig): """Sets the registry of this HarnessMainConfig. @@ -262,7 +264,7 @@ def registry(self, registry): self._registry = registry @property - def tag(self): + def tag(self) -> str: """Gets the tag of this HarnessMainConfig. Docker tag used to push/pull the built images. # noqa: E501 @@ -273,7 +275,7 @@ def tag(self): return self._tag @tag.setter - def tag(self, tag): + def tag(self, tag: str): """Sets the tag of this HarnessMainConfig. Docker tag used to push/pull the built images. # noqa: E501 @@ -285,7 +287,7 @@ def tag(self, tag): self._tag = tag @property - def apps(self): + def apps(self) -> Dict[str, ApplicationConfig]: """Gets the apps of this HarnessMainConfig. # noqa: E501 @@ -296,7 +298,7 @@ def apps(self): return self._apps @apps.setter - def apps(self, apps): + def apps(self, apps: Dict[str, ApplicationConfig]): """Sets the apps of this HarnessMainConfig. # noqa: E501 @@ -310,10 +312,10 @@ def apps(self, apps): self._apps = apps @property - def env(self): + def env(self) -> List[NameValue]: """Gets the env of this HarnessMainConfig. - Environmental variables added to all pods # noqa: E501 + Environmental variables added to all pods (deprecated, please use envmap) # noqa: E501 :return: The env of this HarnessMainConfig. :rtype: List[NameValue] @@ -321,10 +323,10 @@ def env(self): return self._env @env.setter - def env(self, env): + def env(self, env: List[NameValue]): """Sets the env of this HarnessMainConfig. - Environmental variables added to all pods # noqa: E501 + Environmental variables added to all pods (deprecated, please use envmap) # noqa: E501 :param env: The env of this HarnessMainConfig. :type env: List[NameValue] @@ -333,7 +335,7 @@ def env(self, env): self._env = env @property - def privenv(self): + def privenv(self) -> NameValue: """Gets the privenv of this HarnessMainConfig. @@ -343,7 +345,7 @@ def privenv(self): return self._privenv @privenv.setter - def privenv(self, privenv): + def privenv(self, privenv: NameValue): """Sets the privenv of this HarnessMainConfig. @@ -354,7 +356,7 @@ def privenv(self, privenv): self._privenv = privenv @property - def backup(self): + def backup(self) -> BackupConfig: """Gets the backup of this HarnessMainConfig. @@ -364,7 +366,7 @@ def backup(self): return self._backup @backup.setter - def backup(self, backup): + def backup(self, backup: BackupConfig): """Sets the backup of this HarnessMainConfig. @@ -375,7 +377,7 @@ def backup(self, backup): self._backup = backup @property - def name(self): + def name(self) -> str: """Gets the name of this HarnessMainConfig. Base name # noqa: E501 @@ -386,7 +388,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this HarnessMainConfig. Base name # noqa: E501 @@ -398,7 +400,7 @@ def name(self, name): self._name = name @property - def task_images(self): + def task_images(self) -> Dict[str, object]: """Gets the task_images of this HarnessMainConfig. # noqa: E501 @@ -409,7 +411,7 @@ def task_images(self): return self._task_images @task_images.setter - def task_images(self, task_images): + def task_images(self, task_images: Dict[str, object]): """Sets the task_images of this HarnessMainConfig. # noqa: E501 @@ -421,7 +423,7 @@ def task_images(self, task_images): self._task_images = task_images @property - def build_hash(self): + def build_hash(self) -> str: """Gets the build_hash of this HarnessMainConfig. # noqa: E501 @@ -432,7 +434,7 @@ def build_hash(self): return self._build_hash @build_hash.setter - def build_hash(self, build_hash): + def build_hash(self, build_hash: str): """Sets the build_hash of this HarnessMainConfig. # noqa: E501 @@ -442,3 +444,26 @@ def build_hash(self, build_hash): """ self._build_hash = build_hash + + @property + def envmap(self) -> Dict[str, object]: + """Gets the envmap of this HarnessMainConfig. + + # noqa: E501 + + :return: The envmap of this HarnessMainConfig. + :rtype: Dict[str, object] + """ + return self._envmap + + @envmap.setter + def envmap(self, envmap: Dict[str, object]): + """Sets the envmap of this HarnessMainConfig. + + # noqa: E501 + + :param envmap: The envmap of this HarnessMainConfig. + :type envmap: Dict[str, object] + """ + + self._envmap = envmap diff --git a/libraries/models/cloudharness_model/models/ingress_config.py b/libraries/models/cloudharness_model/models/ingress_config.py index 6253941a..e0da288f 100644 --- a/libraries/models/cloudharness_model/models/ingress_config.py +++ b/libraries/models/cloudharness_model/models/ingress_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt from cloudharness_model import util @@ -17,36 +14,36 @@ class IngressConfig(Model): Do not edit the class manually. """ - def __init__(self, ssl_redirect=None, letsencrypt=None, auto=None, name=None): # noqa: E501 + def __init__(self, auto=None, name=None, ssl_redirect=None, letsencrypt=None): # noqa: E501 """IngressConfig - a model defined in OpenAPI - :param ssl_redirect: The ssl_redirect of this IngressConfig. # noqa: E501 - :type ssl_redirect: bool - :param letsencrypt: The letsencrypt of this IngressConfig. # noqa: E501 - :type letsencrypt: IngressConfigAllOfLetsencrypt :param auto: The auto of this IngressConfig. # noqa: E501 :type auto: bool :param name: The name of this IngressConfig. # noqa: E501 :type name: str + :param ssl_redirect: The ssl_redirect of this IngressConfig. # noqa: E501 + :type ssl_redirect: bool + :param letsencrypt: The letsencrypt of this IngressConfig. # noqa: E501 + :type letsencrypt: IngressConfigAllOfLetsencrypt """ self.openapi_types = { - 'ssl_redirect': bool, - 'letsencrypt': IngressConfigAllOfLetsencrypt, 'auto': bool, - 'name': str + 'name': str, + 'ssl_redirect': bool, + 'letsencrypt': IngressConfigAllOfLetsencrypt } self.attribute_map = { - 'ssl_redirect': 'ssl_redirect', - 'letsencrypt': 'letsencrypt', 'auto': 'auto', - 'name': 'name' + 'name': 'name', + 'ssl_redirect': 'ssl_redirect', + 'letsencrypt': 'letsencrypt' } - self._ssl_redirect = ssl_redirect - self._letsencrypt = letsencrypt self._auto = auto self._name = name + self._ssl_redirect = ssl_redirect + self._letsencrypt = letsencrypt @classmethod def from_dict(cls, dikt) -> 'IngressConfig': @@ -60,51 +57,7 @@ def from_dict(cls, dikt) -> 'IngressConfig': return util.deserialize_model(dikt, cls) @property - def ssl_redirect(self): - """Gets the ssl_redirect of this IngressConfig. - - # noqa: E501 - - :return: The ssl_redirect of this IngressConfig. - :rtype: bool - """ - return self._ssl_redirect - - @ssl_redirect.setter - def ssl_redirect(self, ssl_redirect): - """Sets the ssl_redirect of this IngressConfig. - - # noqa: E501 - - :param ssl_redirect: The ssl_redirect of this IngressConfig. - :type ssl_redirect: bool - """ - - self._ssl_redirect = ssl_redirect - - @property - def letsencrypt(self): - """Gets the letsencrypt of this IngressConfig. - - - :return: The letsencrypt of this IngressConfig. - :rtype: IngressConfigAllOfLetsencrypt - """ - return self._letsencrypt - - @letsencrypt.setter - def letsencrypt(self, letsencrypt): - """Sets the letsencrypt of this IngressConfig. - - - :param letsencrypt: The letsencrypt of this IngressConfig. - :type letsencrypt: IngressConfigAllOfLetsencrypt - """ - - self._letsencrypt = letsencrypt - - @property - def auto(self): + def auto(self) -> bool: """Gets the auto of this IngressConfig. When true, enables automatic template # noqa: E501 @@ -115,7 +68,7 @@ def auto(self): return self._auto @auto.setter - def auto(self, auto): + def auto(self, auto: bool): """Sets the auto of this IngressConfig. When true, enables automatic template # noqa: E501 @@ -129,7 +82,7 @@ def auto(self, auto): self._auto = auto @property - def name(self): + def name(self) -> str: """Gets the name of this IngressConfig. # noqa: E501 @@ -140,7 +93,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this IngressConfig. # noqa: E501 @@ -150,3 +103,47 @@ def name(self, name): """ self._name = name + + @property + def ssl_redirect(self) -> bool: + """Gets the ssl_redirect of this IngressConfig. + + # noqa: E501 + + :return: The ssl_redirect of this IngressConfig. + :rtype: bool + """ + return self._ssl_redirect + + @ssl_redirect.setter + def ssl_redirect(self, ssl_redirect: bool): + """Sets the ssl_redirect of this IngressConfig. + + # noqa: E501 + + :param ssl_redirect: The ssl_redirect of this IngressConfig. + :type ssl_redirect: bool + """ + + self._ssl_redirect = ssl_redirect + + @property + def letsencrypt(self) -> IngressConfigAllOfLetsencrypt: + """Gets the letsencrypt of this IngressConfig. + + + :return: The letsencrypt of this IngressConfig. + :rtype: IngressConfigAllOfLetsencrypt + """ + return self._letsencrypt + + @letsencrypt.setter + def letsencrypt(self, letsencrypt: IngressConfigAllOfLetsencrypt): + """Sets the letsencrypt of this IngressConfig. + + + :param letsencrypt: The letsencrypt of this IngressConfig. + :type letsencrypt: IngressConfigAllOfLetsencrypt + """ + + self._letsencrypt = letsencrypt diff --git a/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py b/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py index 0e8a5ab8..467e18a2 100644 --- a/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py +++ b/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -43,7 +40,7 @@ def from_dict(cls, dikt) -> 'IngressConfigAllOfLetsencrypt': return util.deserialize_model(dikt, cls) @property - def email(self): + def email(self) -> str: """Gets the email of this IngressConfigAllOfLetsencrypt. @@ -53,7 +50,7 @@ def email(self): return self._email @email.setter - def email(self, email): + def email(self, email: str): """Sets the email of this IngressConfigAllOfLetsencrypt. diff --git a/libraries/models/cloudharness_model/models/jupyter_hub_config.py b/libraries/models/cloudharness_model/models/jupyter_hub_config.py index 1f04fa73..5aaa9850 100644 --- a/libraries/models/cloudharness_model/models/jupyter_hub_config.py +++ b/libraries/models/cloudharness_model/models/jupyter_hub_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -58,7 +55,7 @@ def from_dict(cls, dikt) -> 'JupyterHubConfig': return util.deserialize_model(dikt, cls) @property - def args(self): + def args(self) -> List[str]: """Gets the args of this JupyterHubConfig. arguments passed to the container # noqa: E501 @@ -69,7 +66,7 @@ def args(self): return self._args @args.setter - def args(self, args): + def args(self, args: List[str]): """Sets the args of this JupyterHubConfig. arguments passed to the container # noqa: E501 @@ -81,7 +78,7 @@ def args(self, args): self._args = args @property - def extra_config(self): + def extra_config(self) -> Dict[str, object]: """Gets the extra_config of this JupyterHubConfig. # noqa: E501 @@ -92,7 +89,7 @@ def extra_config(self): return self._extra_config @extra_config.setter - def extra_config(self, extra_config): + def extra_config(self, extra_config: Dict[str, object]): """Sets the extra_config of this JupyterHubConfig. # noqa: E501 @@ -104,7 +101,7 @@ def extra_config(self, extra_config): self._extra_config = extra_config @property - def spawner_extra_config(self): + def spawner_extra_config(self) -> Dict[str, object]: """Gets the spawner_extra_config of this JupyterHubConfig. # noqa: E501 @@ -115,7 +112,7 @@ def spawner_extra_config(self): return self._spawner_extra_config @spawner_extra_config.setter - def spawner_extra_config(self, spawner_extra_config): + def spawner_extra_config(self, spawner_extra_config: Dict[str, object]): """Sets the spawner_extra_config of this JupyterHubConfig. # noqa: E501 @@ -127,7 +124,7 @@ def spawner_extra_config(self, spawner_extra_config): self._spawner_extra_config = spawner_extra_config @property - def application_hook(self): + def application_hook(self) -> object: """Gets the application_hook of this JupyterHubConfig. change the hook function (advanced) Specify the Python name of the function (full module path, the module must be installed in the Docker image) # noqa: E501 @@ -138,7 +135,7 @@ def application_hook(self): return self._application_hook @application_hook.setter - def application_hook(self, application_hook): + def application_hook(self, application_hook: object): """Sets the application_hook of this JupyterHubConfig. change the hook function (advanced) Specify the Python name of the function (full module path, the module must be installed in the Docker image) # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/name_value.py b/libraries/models/cloudharness_model/models/name_value.py index f4b75cdd..abf07fe6 100644 --- a/libraries/models/cloudharness_model/models/name_value.py +++ b/libraries/models/cloudharness_model/models/name_value.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -48,7 +45,7 @@ def from_dict(cls, dikt) -> 'NameValue': return util.deserialize_model(dikt, cls) @property - def name(self): + def name(self) -> str: """Gets the name of this NameValue. # noqa: E501 @@ -59,7 +56,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this NameValue. # noqa: E501 @@ -73,7 +70,7 @@ def name(self, name): self._name = name @property - def value(self): + def value(self) -> str: """Gets the value of this NameValue. # noqa: E501 @@ -84,7 +81,7 @@ def value(self): return self._value @value.setter - def value(self, value): + def value(self, value: str): """Sets the value of this NameValue. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/registry_config.py b/libraries/models/cloudharness_model/models/registry_config.py index 79f27027..7ce48e9c 100644 --- a/libraries/models/cloudharness_model/models/registry_config.py +++ b/libraries/models/cloudharness_model/models/registry_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -48,7 +45,7 @@ def from_dict(cls, dikt) -> 'RegistryConfig': return util.deserialize_model(dikt, cls) @property - def name(self): + def name(self) -> str: """Gets the name of this RegistryConfig. # noqa: E501 @@ -59,7 +56,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this RegistryConfig. # noqa: E501 @@ -73,7 +70,7 @@ def name(self, name): self._name = name @property - def secret(self): + def secret(self) -> str: """Gets the secret of this RegistryConfig. Optional secret used for pulling from docker registry. # noqa: E501 @@ -84,7 +81,7 @@ def secret(self): return self._secret @secret.setter - def secret(self, secret): + def secret(self, secret: str): """Sets the secret of this RegistryConfig. Optional secret used for pulling from docker registry. # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/service_auto_artifact_config.py b/libraries/models/cloudharness_model/models/service_auto_artifact_config.py index 8d14ac42..77bded86 100644 --- a/libraries/models/cloudharness_model/models/service_auto_artifact_config.py +++ b/libraries/models/cloudharness_model/models/service_auto_artifact_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -15,31 +12,31 @@ class ServiceAutoArtifactConfig(Model): Do not edit the class manually. """ - def __init__(self, port=None, auto=None, name=None): # noqa: E501 + def __init__(self, auto=None, name=None, port=None): # noqa: E501 """ServiceAutoArtifactConfig - a model defined in OpenAPI - :param port: The port of this ServiceAutoArtifactConfig. # noqa: E501 - :type port: int :param auto: The auto of this ServiceAutoArtifactConfig. # noqa: E501 :type auto: bool :param name: The name of this ServiceAutoArtifactConfig. # noqa: E501 :type name: str + :param port: The port of this ServiceAutoArtifactConfig. # noqa: E501 + :type port: int """ self.openapi_types = { - 'port': int, 'auto': bool, - 'name': str + 'name': str, + 'port': int } self.attribute_map = { - 'port': 'port', 'auto': 'auto', - 'name': 'name' + 'name': 'name', + 'port': 'port' } - self._port = port self._auto = auto self._name = name + self._port = port @classmethod def from_dict(cls, dikt) -> 'ServiceAutoArtifactConfig': @@ -53,30 +50,7 @@ def from_dict(cls, dikt) -> 'ServiceAutoArtifactConfig': return util.deserialize_model(dikt, cls) @property - def port(self): - """Gets the port of this ServiceAutoArtifactConfig. - - Service port # noqa: E501 - - :return: The port of this ServiceAutoArtifactConfig. - :rtype: int - """ - return self._port - - @port.setter - def port(self, port): - """Sets the port of this ServiceAutoArtifactConfig. - - Service port # noqa: E501 - - :param port: The port of this ServiceAutoArtifactConfig. - :type port: int - """ - - self._port = port - - @property - def auto(self): + def auto(self) -> bool: """Gets the auto of this ServiceAutoArtifactConfig. When true, enables automatic template # noqa: E501 @@ -87,7 +61,7 @@ def auto(self): return self._auto @auto.setter - def auto(self, auto): + def auto(self, auto: bool): """Sets the auto of this ServiceAutoArtifactConfig. When true, enables automatic template # noqa: E501 @@ -101,7 +75,7 @@ def auto(self, auto): self._auto = auto @property - def name(self): + def name(self) -> str: """Gets the name of this ServiceAutoArtifactConfig. # noqa: E501 @@ -112,7 +86,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this ServiceAutoArtifactConfig. # noqa: E501 @@ -122,3 +96,26 @@ def name(self, name): """ self._name = name + + @property + def port(self) -> int: + """Gets the port of this ServiceAutoArtifactConfig. + + Service port # noqa: E501 + + :return: The port of this ServiceAutoArtifactConfig. + :rtype: int + """ + return self._port + + @port.setter + def port(self, port: int): + """Sets the port of this ServiceAutoArtifactConfig. + + Service port # noqa: E501 + + :param port: The port of this ServiceAutoArtifactConfig. + :type port: int + """ + + self._port = port diff --git a/libraries/models/cloudharness_model/models/unit_tests_config.py b/libraries/models/cloudharness_model/models/unit_tests_config.py index 9b7813c8..680d984a 100644 --- a/libraries/models/cloudharness_model/models/unit_tests_config.py +++ b/libraries/models/cloudharness_model/models/unit_tests_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -48,7 +45,7 @@ def from_dict(cls, dikt) -> 'UnitTestsConfig': return util.deserialize_model(dikt, cls) @property - def enabled(self): + def enabled(self) -> bool: """Gets the enabled of this UnitTestsConfig. Enables unit tests for this application (default: true) # noqa: E501 @@ -59,7 +56,7 @@ def enabled(self): return self._enabled @enabled.setter - def enabled(self, enabled): + def enabled(self, enabled: bool): """Sets the enabled of this UnitTestsConfig. Enables unit tests for this application (default: true) # noqa: E501 @@ -73,7 +70,7 @@ def enabled(self, enabled): self._enabled = enabled @property - def commands(self): + def commands(self) -> List[str]: """Gets the commands of this UnitTestsConfig. Commands to run unit tests # noqa: E501 @@ -84,7 +81,7 @@ def commands(self): return self._commands @commands.setter - def commands(self, commands): + def commands(self, commands: List[str]): """Sets the commands of this UnitTestsConfig. Commands to run unit tests # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/uri_role_mapping_config.py b/libraries/models/cloudharness_model/models/uri_role_mapping_config.py index 046becca..649ae22e 100644 --- a/libraries/models/cloudharness_model/models/uri_role_mapping_config.py +++ b/libraries/models/cloudharness_model/models/uri_role_mapping_config.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model import re from cloudharness_model import util @@ -50,7 +47,7 @@ def from_dict(cls, dikt) -> 'UriRoleMappingConfig': return util.deserialize_model(dikt, cls) @property - def uri(self): + def uri(self) -> str: """Gets the uri of this UriRoleMappingConfig. # noqa: E501 @@ -61,7 +58,7 @@ def uri(self): return self._uri @uri.setter - def uri(self, uri): + def uri(self, uri: str): """Sets the uri of this UriRoleMappingConfig. # noqa: E501 @@ -77,7 +74,7 @@ def uri(self, uri): self._uri = uri @property - def roles(self): + def roles(self) -> List[str]: """Gets the roles of this UriRoleMappingConfig. Roles allowed to access the present uri # noqa: E501 @@ -88,7 +85,7 @@ def roles(self): return self._roles @roles.setter - def roles(self, roles): + def roles(self, roles: List[str]): """Sets the roles of this UriRoleMappingConfig. Roles allowed to access the present uri # noqa: E501 diff --git a/libraries/models/cloudharness_model/models/user.py b/libraries/models/cloudharness_model/models/user.py index 3d1c1895..0676fbae 100644 --- a/libraries/models/cloudharness_model/models/user.py +++ b/libraries/models/cloudharness_model/models/user.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model.models.user_credential import UserCredential from cloudharness_model import util @@ -135,7 +132,7 @@ def from_dict(cls, dikt) -> 'User': return util.deserialize_model(dikt, cls) @property - def access(self): + def access(self) -> Dict[str, object]: """Gets the access of this User. @@ -145,7 +142,7 @@ def access(self): return self._access @access.setter - def access(self, access): + def access(self, access: Dict[str, object]): """Sets the access of this User. @@ -156,7 +153,7 @@ def access(self, access): self._access = access @property - def attributes(self): + def attributes(self) -> Dict[str, object]: """Gets the attributes of this User. @@ -166,7 +163,7 @@ def attributes(self): return self._attributes @attributes.setter - def attributes(self, attributes): + def attributes(self, attributes: Dict[str, object]): """Sets the attributes of this User. @@ -177,7 +174,7 @@ def attributes(self, attributes): self._attributes = attributes @property - def client_roles(self): + def client_roles(self) -> Dict[str, object]: """Gets the client_roles of this User. @@ -187,7 +184,7 @@ def client_roles(self): return self._client_roles @client_roles.setter - def client_roles(self, client_roles): + def client_roles(self, client_roles: Dict[str, object]): """Sets the client_roles of this User. @@ -198,7 +195,7 @@ def client_roles(self, client_roles): self._client_roles = client_roles @property - def created_timestamp(self): + def created_timestamp(self) -> int: """Gets the created_timestamp of this User. @@ -208,7 +205,7 @@ def created_timestamp(self): return self._created_timestamp @created_timestamp.setter - def created_timestamp(self, created_timestamp): + def created_timestamp(self, created_timestamp: int): """Sets the created_timestamp of this User. @@ -219,7 +216,7 @@ def created_timestamp(self, created_timestamp): self._created_timestamp = created_timestamp @property - def credentials(self): + def credentials(self) -> List[UserCredential]: """Gets the credentials of this User. @@ -229,7 +226,7 @@ def credentials(self): return self._credentials @credentials.setter - def credentials(self, credentials): + def credentials(self, credentials: List[UserCredential]): """Sets the credentials of this User. @@ -240,7 +237,7 @@ def credentials(self, credentials): self._credentials = credentials @property - def disableable_credential_types(self): + def disableable_credential_types(self) -> List[str]: """Gets the disableable_credential_types of this User. @@ -250,7 +247,7 @@ def disableable_credential_types(self): return self._disableable_credential_types @disableable_credential_types.setter - def disableable_credential_types(self, disableable_credential_types): + def disableable_credential_types(self, disableable_credential_types: List[str]): """Sets the disableable_credential_types of this User. @@ -261,7 +258,7 @@ def disableable_credential_types(self, disableable_credential_types): self._disableable_credential_types = disableable_credential_types @property - def email(self): + def email(self) -> str: """Gets the email of this User. @@ -271,7 +268,7 @@ def email(self): return self._email @email.setter - def email(self, email): + def email(self, email: str): """Sets the email of this User. @@ -282,7 +279,7 @@ def email(self, email): self._email = email @property - def email_verified(self): + def email_verified(self) -> bool: """Gets the email_verified of this User. @@ -292,7 +289,7 @@ def email_verified(self): return self._email_verified @email_verified.setter - def email_verified(self, email_verified): + def email_verified(self, email_verified: bool): """Sets the email_verified of this User. @@ -303,7 +300,7 @@ def email_verified(self, email_verified): self._email_verified = email_verified @property - def enabled(self): + def enabled(self) -> bool: """Gets the enabled of this User. @@ -313,7 +310,7 @@ def enabled(self): return self._enabled @enabled.setter - def enabled(self, enabled): + def enabled(self, enabled: bool): """Sets the enabled of this User. @@ -324,7 +321,7 @@ def enabled(self, enabled): self._enabled = enabled @property - def federation_link(self): + def federation_link(self) -> str: """Gets the federation_link of this User. @@ -334,7 +331,7 @@ def federation_link(self): return self._federation_link @federation_link.setter - def federation_link(self, federation_link): + def federation_link(self, federation_link: str): """Sets the federation_link of this User. @@ -345,7 +342,7 @@ def federation_link(self, federation_link): self._federation_link = federation_link @property - def first_name(self): + def first_name(self) -> str: """Gets the first_name of this User. @@ -355,7 +352,7 @@ def first_name(self): return self._first_name @first_name.setter - def first_name(self, first_name): + def first_name(self, first_name: str): """Sets the first_name of this User. @@ -366,7 +363,7 @@ def first_name(self, first_name): self._first_name = first_name @property - def groups(self): + def groups(self) -> List[str]: """Gets the groups of this User. @@ -376,7 +373,7 @@ def groups(self): return self._groups @groups.setter - def groups(self, groups): + def groups(self, groups: List[str]): """Sets the groups of this User. @@ -387,7 +384,7 @@ def groups(self, groups): self._groups = groups @property - def id(self): + def id(self) -> str: """Gets the id of this User. @@ -397,7 +394,7 @@ def id(self): return self._id @id.setter - def id(self, id): + def id(self, id: str): """Sets the id of this User. @@ -408,7 +405,7 @@ def id(self, id): self._id = id @property - def last_name(self): + def last_name(self) -> str: """Gets the last_name of this User. @@ -418,7 +415,7 @@ def last_name(self): return self._last_name @last_name.setter - def last_name(self, last_name): + def last_name(self, last_name: str): """Sets the last_name of this User. @@ -429,7 +426,7 @@ def last_name(self, last_name): self._last_name = last_name @property - def realm_roles(self): + def realm_roles(self) -> List[str]: """Gets the realm_roles of this User. @@ -439,7 +436,7 @@ def realm_roles(self): return self._realm_roles @realm_roles.setter - def realm_roles(self, realm_roles): + def realm_roles(self, realm_roles: List[str]): """Sets the realm_roles of this User. @@ -450,7 +447,7 @@ def realm_roles(self, realm_roles): self._realm_roles = realm_roles @property - def required_actions(self): + def required_actions(self) -> List[str]: """Gets the required_actions of this User. @@ -460,7 +457,7 @@ def required_actions(self): return self._required_actions @required_actions.setter - def required_actions(self, required_actions): + def required_actions(self, required_actions: List[str]): """Sets the required_actions of this User. @@ -471,7 +468,7 @@ def required_actions(self, required_actions): self._required_actions = required_actions @property - def service_account_client_id(self): + def service_account_client_id(self) -> str: """Gets the service_account_client_id of this User. @@ -481,7 +478,7 @@ def service_account_client_id(self): return self._service_account_client_id @service_account_client_id.setter - def service_account_client_id(self, service_account_client_id): + def service_account_client_id(self, service_account_client_id: str): """Sets the service_account_client_id of this User. @@ -492,7 +489,7 @@ def service_account_client_id(self, service_account_client_id): self._service_account_client_id = service_account_client_id @property - def username(self): + def username(self) -> str: """Gets the username of this User. @@ -502,7 +499,7 @@ def username(self): return self._username @username.setter - def username(self, username): + def username(self, username: str): """Sets the username of this User. @@ -513,7 +510,7 @@ def username(self, username): self._username = username @property - def additional_properties(self): + def additional_properties(self) -> object: """Gets the additional_properties of this User. @@ -523,7 +520,7 @@ def additional_properties(self): return self._additional_properties @additional_properties.setter - def additional_properties(self, additional_properties): + def additional_properties(self, additional_properties: object): """Sets the additional_properties of this User. diff --git a/libraries/models/cloudharness_model/models/user_credential.py b/libraries/models/cloudharness_model/models/user_credential.py index 4f54ddf5..1b692f8f 100644 --- a/libraries/models/cloudharness_model/models/user_credential.py +++ b/libraries/models/cloudharness_model/models/user_credential.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -83,7 +80,7 @@ def from_dict(cls, dikt) -> 'UserCredential': return util.deserialize_model(dikt, cls) @property - def created_date(self): + def created_date(self) -> int: """Gets the created_date of this UserCredential. @@ -93,7 +90,7 @@ def created_date(self): return self._created_date @created_date.setter - def created_date(self, created_date): + def created_date(self, created_date: int): """Sets the created_date of this UserCredential. @@ -104,7 +101,7 @@ def created_date(self, created_date): self._created_date = created_date @property - def credential_data(self): + def credential_data(self) -> str: """Gets the credential_data of this UserCredential. @@ -114,7 +111,7 @@ def credential_data(self): return self._credential_data @credential_data.setter - def credential_data(self, credential_data): + def credential_data(self, credential_data: str): """Sets the credential_data of this UserCredential. @@ -125,7 +122,7 @@ def credential_data(self, credential_data): self._credential_data = credential_data @property - def id(self): + def id(self) -> str: """Gets the id of this UserCredential. @@ -135,7 +132,7 @@ def id(self): return self._id @id.setter - def id(self, id): + def id(self, id: str): """Sets the id of this UserCredential. @@ -146,7 +143,7 @@ def id(self, id): self._id = id @property - def priority(self): + def priority(self) -> int: """Gets the priority of this UserCredential. @@ -156,7 +153,7 @@ def priority(self): return self._priority @priority.setter - def priority(self, priority): + def priority(self, priority: int): """Sets the priority of this UserCredential. @@ -167,7 +164,7 @@ def priority(self, priority): self._priority = priority @property - def secret_data(self): + def secret_data(self) -> str: """Gets the secret_data of this UserCredential. @@ -177,7 +174,7 @@ def secret_data(self): return self._secret_data @secret_data.setter - def secret_data(self, secret_data): + def secret_data(self, secret_data: str): """Sets the secret_data of this UserCredential. @@ -188,7 +185,7 @@ def secret_data(self, secret_data): self._secret_data = secret_data @property - def temporary(self): + def temporary(self) -> bool: """Gets the temporary of this UserCredential. @@ -198,7 +195,7 @@ def temporary(self): return self._temporary @temporary.setter - def temporary(self, temporary): + def temporary(self, temporary: bool): """Sets the temporary of this UserCredential. @@ -209,7 +206,7 @@ def temporary(self, temporary): self._temporary = temporary @property - def type(self): + def type(self) -> str: """Gets the type of this UserCredential. @@ -219,7 +216,7 @@ def type(self): return self._type @type.setter - def type(self, type): + def type(self, type: str): """Sets the type of this UserCredential. @@ -230,7 +227,7 @@ def type(self, type): self._type = type @property - def user_label(self): + def user_label(self) -> str: """Gets the user_label of this UserCredential. @@ -240,7 +237,7 @@ def user_label(self): return self._user_label @user_label.setter - def user_label(self, user_label): + def user_label(self, user_label: str): """Sets the user_label of this UserCredential. @@ -251,7 +248,7 @@ def user_label(self, user_label): self._user_label = user_label @property - def value(self): + def value(self) -> str: """Gets the value of this UserCredential. @@ -261,7 +258,7 @@ def value(self): return self._value @value.setter - def value(self, value): + def value(self, value: str): """Sets the value of this UserCredential. diff --git a/libraries/models/cloudharness_model/models/user_group.py b/libraries/models/cloudharness_model/models/user_group.py index 72e135ef..0cf48e99 100644 --- a/libraries/models/cloudharness_model/models/user_group.py +++ b/libraries/models/cloudharness_model/models/user_group.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -78,7 +75,7 @@ def from_dict(cls, dikt) -> 'UserGroup': return util.deserialize_model(dikt, cls) @property - def access(self): + def access(self) -> Dict[str, object]: """Gets the access of this UserGroup. @@ -88,7 +85,7 @@ def access(self): return self._access @access.setter - def access(self, access): + def access(self, access: Dict[str, object]): """Sets the access of this UserGroup. @@ -99,7 +96,7 @@ def access(self, access): self._access = access @property - def attributes(self): + def attributes(self) -> Dict[str, object]: """Gets the attributes of this UserGroup. # noqa: E501 @@ -110,7 +107,7 @@ def attributes(self): return self._attributes @attributes.setter - def attributes(self, attributes): + def attributes(self, attributes: Dict[str, object]): """Sets the attributes of this UserGroup. # noqa: E501 @@ -122,7 +119,7 @@ def attributes(self, attributes): self._attributes = attributes @property - def client_roles(self): + def client_roles(self) -> Dict[str, object]: """Gets the client_roles of this UserGroup. @@ -132,7 +129,7 @@ def client_roles(self): return self._client_roles @client_roles.setter - def client_roles(self, client_roles): + def client_roles(self, client_roles: Dict[str, object]): """Sets the client_roles of this UserGroup. @@ -143,7 +140,7 @@ def client_roles(self, client_roles): self._client_roles = client_roles @property - def id(self): + def id(self) -> str: """Gets the id of this UserGroup. @@ -153,7 +150,7 @@ def id(self): return self._id @id.setter - def id(self, id): + def id(self, id: str): """Sets the id of this UserGroup. @@ -164,7 +161,7 @@ def id(self, id): self._id = id @property - def name(self): + def name(self) -> str: """Gets the name of this UserGroup. @@ -174,7 +171,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this UserGroup. @@ -185,7 +182,7 @@ def name(self, name): self._name = name @property - def path(self): + def path(self) -> str: """Gets the path of this UserGroup. @@ -195,7 +192,7 @@ def path(self): return self._path @path.setter - def path(self, path): + def path(self, path: str): """Sets the path of this UserGroup. @@ -206,7 +203,7 @@ def path(self, path): self._path = path @property - def realm_roles(self): + def realm_roles(self) -> List[str]: """Gets the realm_roles of this UserGroup. @@ -216,7 +213,7 @@ def realm_roles(self): return self._realm_roles @realm_roles.setter - def realm_roles(self, realm_roles): + def realm_roles(self, realm_roles: List[str]): """Sets the realm_roles of this UserGroup. @@ -227,7 +224,7 @@ def realm_roles(self, realm_roles): self._realm_roles = realm_roles @property - def sub_groups(self): + def sub_groups(self) -> List[UserGroup]: """Gets the sub_groups of this UserGroup. @@ -237,7 +234,7 @@ def sub_groups(self): return self._sub_groups @sub_groups.setter - def sub_groups(self, sub_groups): + def sub_groups(self, sub_groups: List[UserGroup]): """Sets the sub_groups of this UserGroup. diff --git a/libraries/models/cloudharness_model/models/user_role.py b/libraries/models/cloudharness_model/models/user_role.py index dfa7e529..8cbc7d00 100644 --- a/libraries/models/cloudharness_model/models/user_role.py +++ b/libraries/models/cloudharness_model/models/user_role.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.base_model import Model from cloudharness_model import util @@ -73,7 +70,7 @@ def from_dict(cls, dikt) -> 'UserRole': return util.deserialize_model(dikt, cls) @property - def attributes(self): + def attributes(self) -> Dict[str, object]: """Gets the attributes of this UserRole. @@ -83,7 +80,7 @@ def attributes(self): return self._attributes @attributes.setter - def attributes(self, attributes): + def attributes(self, attributes: Dict[str, object]): """Sets the attributes of this UserRole. @@ -94,7 +91,7 @@ def attributes(self, attributes): self._attributes = attributes @property - def client_role(self): + def client_role(self) -> bool: """Gets the client_role of this UserRole. @@ -104,7 +101,7 @@ def client_role(self): return self._client_role @client_role.setter - def client_role(self, client_role): + def client_role(self, client_role: bool): """Sets the client_role of this UserRole. @@ -115,7 +112,7 @@ def client_role(self, client_role): self._client_role = client_role @property - def composite(self): + def composite(self) -> bool: """Gets the composite of this UserRole. @@ -125,7 +122,7 @@ def composite(self): return self._composite @composite.setter - def composite(self, composite): + def composite(self, composite: bool): """Sets the composite of this UserRole. @@ -136,7 +133,7 @@ def composite(self, composite): self._composite = composite @property - def container_id(self): + def container_id(self) -> str: """Gets the container_id of this UserRole. @@ -146,7 +143,7 @@ def container_id(self): return self._container_id @container_id.setter - def container_id(self, container_id): + def container_id(self, container_id: str): """Sets the container_id of this UserRole. @@ -157,7 +154,7 @@ def container_id(self, container_id): self._container_id = container_id @property - def description(self): + def description(self) -> str: """Gets the description of this UserRole. @@ -167,7 +164,7 @@ def description(self): return self._description @description.setter - def description(self, description): + def description(self, description: str): """Sets the description of this UserRole. @@ -178,7 +175,7 @@ def description(self, description): self._description = description @property - def id(self): + def id(self) -> str: """Gets the id of this UserRole. @@ -188,7 +185,7 @@ def id(self): return self._id @id.setter - def id(self, id): + def id(self, id: str): """Sets the id of this UserRole. @@ -199,7 +196,7 @@ def id(self, id): self._id = id @property - def name(self): + def name(self) -> str: """Gets the name of this UserRole. @@ -209,7 +206,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this UserRole. diff --git a/libraries/models/cloudharness_model/typing_utils.py b/libraries/models/cloudharness_model/typing_utils.py index 0563f81f..74e3c913 100644 --- a/libraries/models/cloudharness_model/typing_utils.py +++ b/libraries/models/cloudharness_model/typing_utils.py @@ -1,5 +1,3 @@ -# coding: utf-8 - import sys if sys.version_info < (3, 7): diff --git a/libraries/models/tox.ini b/libraries/models/tox.ini index e76b6dbf..132efa3e 100644 --- a/libraries/models/tox.ini +++ b/libraries/models/tox.ini @@ -5,7 +5,7 @@ skipsdist=True [testenv] deps=-r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - {toxinidir} + {toxinidir} commands= pytest --cov=cloudharness_model From 831bd1c5644d1aa88af48c6a4195f810da952244 Mon Sep 17 00:00:00 2001 From: Jake Conkerton-Darby Date: Mon, 29 Jul 2024 16:06:29 +0100 Subject: [PATCH 097/210] CH-91 Updated the auto deployment template to include the envmap and added an example to samples --- .../samples/controllers/test_controller.py | 16 ++++++++++++++++ applications/samples/deploy/values.yaml | 3 +++ .../helm/templates/auto-deployments.yaml | 4 ++++ 3 files changed, 23 insertions(+) diff --git a/applications/samples/backend/samples/controllers/test_controller.py b/applications/samples/backend/samples/controllers/test_controller.py index e792c99e..3fb922f2 100644 --- a/applications/samples/backend/samples/controllers/test_controller.py +++ b/applications/samples/backend/samples/controllers/test_controller.py @@ -22,6 +22,22 @@ def ping(): # noqa: E501 :rtype: str """ + + import os + + expected_environment_variables = { + 'ENVIRONMENT_TEST_A': 'value', + 'ENVIRONMENT_TEST_B': '123', + } + + for key, expected_value in expected_environment_variables.items(): + try: + environment_value = os.environ[key] + if environment_value != expected_value: + raise Exception(f'Expected environment variable {key} to be {expected_value}, but got {environment_value}') + except KeyError: + raise Exception(f'Expected to have an environment variable {key} defined') + import time return time.time() diff --git a/applications/samples/deploy/values.yaml b/applications/samples/deploy/values.yaml index 6265c681..ab22f1e5 100644 --- a/applications/samples/deploy/values.yaml +++ b/applications/samples/deploy/values.yaml @@ -39,6 +39,9 @@ harness: env: - name: WORKERS value: "3" + envmap: + ENVIRONMENT_TEST_A: "value" + ENVIRONMENT_TEST_B: 123 dependencies: soft: - workflows diff --git a/deployment-configuration/helm/templates/auto-deployments.yaml b/deployment-configuration/helm/templates/auto-deployments.yaml index 23d4f61c..6981c2fd 100644 --- a/deployment-configuration/helm/templates/auto-deployments.yaml +++ b/deployment-configuration/helm/templates/auto-deployments.yaml @@ -66,6 +66,10 @@ spec: {{- if .app.harness.env }} {{- .app.harness.env | toYaml | nindent 8 }} {{- end }} + {{- range $name, $value := .app.harness.envmap }} + - name: {{ $name | quote }} + value: {{ $value | quote }} + {{- end }} {{ if .app.harness.livenessProbe }} livenessProbe: httpGet: From 9d7a648789908d6c2c5484b204e06f6df3c3c640 Mon Sep 17 00:00:00 2001 From: Jake Conkerton-Darby Date: Mon, 29 Jul 2024 16:56:01 +0100 Subject: [PATCH 098/210] CH-91 Updated documentation around environment variables --- docs/applications/README.md | 5 ++- docs/applications/environment-variables.md | 46 ++++++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 docs/applications/environment-variables.md diff --git a/docs/applications/README.md b/docs/applications/README.md index e76bd861..da0c44fa 100644 --- a/docs/applications/README.md +++ b/docs/applications/README.md @@ -109,7 +109,10 @@ The most important configuration entries are the following: - `postgres`: postgres specific configurations - `mongo`: mongo specific configurations - `neo4j`: neo4j specific configurations - - `env` (`{name, value}[]`): add custom environment variables + - `envmap`: add custom environment variables + - ``: `` + - ... + - `env` (`{name, value}[]`): add custom environment variables (deprecated, please use `envmap`) - `resources`: mount files from - `use_services` (`{name, src, dst}[]`): create reverse proxy endpoints in the ingress for the listed applications on [subdomain].[Values.domain]/proxy/[name]. Useful to avoid CORS requests from frontend clients - `readinessProbe`: defines a a url to use as a readiness probe diff --git a/docs/applications/environment-variables.md b/docs/applications/environment-variables.md new file mode 100644 index 00000000..f986ec38 --- /dev/null +++ b/docs/applications/environment-variables.md @@ -0,0 +1,46 @@ +# CloudHarness Environment Variables + +CloudHarness has support for adding environment variables for an application which will be available to all containers within the application pod. + +## Automatically included environment variables + +The following environment variables are included in each container by default: +- `CH_CURRENT_APP_NAME`: the name of the application +- Any environment variables defined in the root `.Values.env` +- If `accounts` is an included application: + - `CH_ACCOUNTS_CLIENT_SECRET`: the client secret for the accounts + - `CH_ACCOUNTS_REALM`: the accounts realm + - `CH_ACCOUNTS_AUTH_DOMAIN`: the auth domain for the accounts + - `CH_ACCOUNTS_CLIENT_ID`: the client id for the accounts + - `DOMAIN`: the domain for the accounts + +## Environment variables definition in CloudHarness + +Environment variables are defined in the application values.yaml file in the `envmap` section under the `harness` section. +Example + +```yaml +harness: + envmap: + ENV_VARIABLE_A: + ... +``` + +Each key in the `envmap` will add an environment variable with a name matching the key and a value equal to the value provided. The value can be any primitive type, but will be quoted as a string within the deployment template. + +### (Deprecated) Setting with `env` + +Environment variables can be defined by using the `env` section under the `harness` section. This functionality is deprecated but not yet obsoleted, and use of `envmap` should be preferred over this approach. +Example + +```yaml +harness: + env: + - name: ENV_VARIABLE_A + value: + ... +``` + +Each element of the `env` sequence will add an environment variable named `name` with value set from `value`. + +This functionality was deprecated as cloud harness cannot merge arrays, so if an environment variable needed changing in a specific environment the entire array must be reproduced to change the single variable. \ No newline at end of file From aec6e664b55150b5810b2897016d6292f4f5e02c Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 29 Jul 2024 18:53:28 +0200 Subject: [PATCH 099/210] CH-140 fixes --- applications/samples/api/openapi.yaml | 4 +- applications/samples/backend/requirements.txt | 1 - .../controllers/resource_controller.py | 4 +- .../samples/backend/samples/encoder.py | 8 +- .../backend/samples/models/__init__.py | 3 - .../backend/samples/models/base_model.py | 68 +++ .../samples/models/inline_response202.py | 9 +- .../samples/models/inline_response202_task.py | 13 +- .../backend/samples/models/sample_resource.py | 17 +- .../backend/samples/openapi/openapi.yaml | 2 +- .../samples/backend/samples/typing_utils.py | 2 - applications/samples/backend/samples/util.py | 9 +- applications/samples/backend/setup.py | 1 - applications/samples/backend/tox.ini | 2 +- .../backend/www/assets/index-Cyl2oP5E.css | 1 + .../backend/www/assets/index-DtmKuX2X.js | 209 +++++++++ applications/samples/backend/www/favicon.png | Bin 0 -> 5899 bytes applications/samples/backend/www/index.html | 17 + applications/samples/backend/www/logo.png | Bin 0 -> 180296 bytes applications/samples/deploy/values.yaml | 32 +- applications/samples/frontend/src/rest/api.ts | 132 +++--- .../samples/frontend/src/rest/base.ts | 25 +- .../samples/frontend/src/rest/common.ts | 10 +- .../frontend/src/rest/configuration.ts | 9 + applications/samples/test/e2e/samples.spec.ts | 2 +- applications/workflows/api/openapi.yaml | 183 ++++---- deployment/codefresh-test.yaml | 421 ++++++++---------- .../cloudharness/utils/server.py | 11 +- .../cloudharness/workflows/operations.py | 47 +- libraries/cloudharness-common/setup.py | 2 +- libraries/cloudharness-utils/setup.py | 2 +- .../cloudharness_test/apitest_init.py | 4 +- .../ch_cli_tools/openapi.py | 27 +- 33 files changed, 783 insertions(+), 494 deletions(-) create mode 100644 applications/samples/backend/samples/models/base_model.py create mode 100644 applications/samples/backend/www/assets/index-Cyl2oP5E.css create mode 100644 applications/samples/backend/www/assets/index-DtmKuX2X.js create mode 100644 applications/samples/backend/www/favicon.png create mode 100644 applications/samples/backend/www/index.html create mode 100644 applications/samples/backend/www/logo.png diff --git a/applications/samples/api/openapi.yaml b/applications/samples/api/openapi.yaml index ac78581f..3332cd2b 100644 --- a/applications/samples/api/openapi.yaml +++ b/applications/samples/api/openapi.yaml @@ -1,4 +1,4 @@ -openapi: 3.0.0 +openapi: "3.0.3" info: title: CloudHarness Sample API version: 0.1.0 @@ -27,7 +27,7 @@ paths: operationId: error summary: test sentry is working x-openapi-router-controller: samples.controllers.test_controller - + /ping: get: tags: diff --git a/applications/samples/backend/requirements.txt b/applications/samples/backend/requirements.txt index b3db72c8..a5ddff35 100644 --- a/applications/samples/backend/requirements.txt +++ b/applications/samples/backend/requirements.txt @@ -1,5 +1,4 @@ connexion[swagger-ui]==2.14.2 -swagger-ui-bundle >= 0.0.2 python_dateutil >= 2.6.0 setuptools >= 21.0.0 Flask<3.0.0 diff --git a/applications/samples/backend/samples/controllers/resource_controller.py b/applications/samples/backend/samples/controllers/resource_controller.py index 3204708b..4f17d3da 100644 --- a/applications/samples/backend/samples/controllers/resource_controller.py +++ b/applications/samples/backend/samples/controllers/resource_controller.py @@ -23,9 +23,9 @@ def create_sample_resource(sample_resource=None): # noqa: E501 return "Payload is not of type SampleResource", 400 # Create a file inside the nfs - with open("/mnt/myvolume/myfile", "w") as f: + with open("/tmp/myvolume/myfile", "w") as f: print("test", file=f) - + return resource_service.create_sample_resource(sample_resource), 201 diff --git a/applications/samples/backend/samples/encoder.py b/applications/samples/backend/samples/encoder.py index fe2c3b18..ce94c8c8 100644 --- a/applications/samples/backend/samples/encoder.py +++ b/applications/samples/backend/samples/encoder.py @@ -1,8 +1,6 @@ +from connexion.apps.flask_app import FlaskJSONEncoder -import six - -from samples.models.base_model_ import Model -from flask.json import FlaskJSONEncoder +from samples.models.base_model import Model class JSONEncoder(FlaskJSONEncoder): @@ -11,7 +9,7 @@ class JSONEncoder(FlaskJSONEncoder): def default(self, o): if isinstance(o, Model): dikt = {} - for attr, _ in six.iteritems(o.openapi_types): + for attr in o.openapi_types: value = getattr(o, attr) if value is None and not self.include_nulls: continue diff --git a/applications/samples/backend/samples/models/__init__.py b/applications/samples/backend/samples/models/__init__.py index 4034deb6..94617398 100644 --- a/applications/samples/backend/samples/models/__init__.py +++ b/applications/samples/backend/samples/models/__init__.py @@ -1,7 +1,4 @@ -# coding: utf-8 - # flake8: noqa -from __future__ import absolute_import # import models into model package from samples.models.inline_response202 import InlineResponse202 from samples.models.inline_response202_task import InlineResponse202Task diff --git a/applications/samples/backend/samples/models/base_model.py b/applications/samples/backend/samples/models/base_model.py new file mode 100644 index 00000000..30bbbb63 --- /dev/null +++ b/applications/samples/backend/samples/models/base_model.py @@ -0,0 +1,68 @@ +import pprint + +import typing + +from samples import util + +T = typing.TypeVar('T') + + +class Model: + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types: typing.Dict[str, type] = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map: typing.Dict[str, str] = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr in self.openapi_types: + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/applications/samples/backend/samples/models/inline_response202.py b/applications/samples/backend/samples/models/inline_response202.py index b6d2a3ca..e98cb00e 100644 --- a/applications/samples/backend/samples/models/inline_response202.py +++ b/applications/samples/backend/samples/models/inline_response202.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from samples.models.base_model_ import Model +from samples.models.base_model import Model from samples.models.inline_response202_task import InlineResponse202Task from samples import util @@ -45,7 +42,7 @@ def from_dict(cls, dikt) -> 'InlineResponse202': return util.deserialize_model(dikt, cls) @property - def task(self): + def task(self) -> InlineResponse202Task: """Gets the task of this InlineResponse202. @@ -55,7 +52,7 @@ def task(self): return self._task @task.setter - def task(self, task): + def task(self, task: InlineResponse202Task): """Sets the task of this InlineResponse202. diff --git a/applications/samples/backend/samples/models/inline_response202_task.py b/applications/samples/backend/samples/models/inline_response202_task.py index e9a53b7f..2c4af7a0 100644 --- a/applications/samples/backend/samples/models/inline_response202_task.py +++ b/applications/samples/backend/samples/models/inline_response202_task.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from samples.models.base_model_ import Model +from samples.models.base_model import Model from samples import util @@ -48,7 +45,7 @@ def from_dict(cls, dikt) -> 'InlineResponse202Task': return util.deserialize_model(dikt, cls) @property - def href(self): + def href(self) -> str: """Gets the href of this InlineResponse202Task. the url where to check the operation status # noqa: E501 @@ -59,7 +56,7 @@ def href(self): return self._href @href.setter - def href(self, href): + def href(self, href: str): """Sets the href of this InlineResponse202Task. the url where to check the operation status # noqa: E501 @@ -71,7 +68,7 @@ def href(self, href): self._href = href @property - def name(self): + def name(self) -> str: """Gets the name of this InlineResponse202Task. @@ -81,7 +78,7 @@ def name(self): return self._name @name.setter - def name(self, name): + def name(self, name: str): """Sets the name of this InlineResponse202Task. diff --git a/applications/samples/backend/samples/models/sample_resource.py b/applications/samples/backend/samples/models/sample_resource.py index 1deca853..d5477452 100644 --- a/applications/samples/backend/samples/models/sample_resource.py +++ b/applications/samples/backend/samples/models/sample_resource.py @@ -1,11 +1,8 @@ -# coding: utf-8 - -from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing import List, Dict # noqa: F401 -from samples.models.base_model_ import Model +from samples.models.base_model import Model from samples import util @@ -53,7 +50,7 @@ def from_dict(cls, dikt) -> 'SampleResource': return util.deserialize_model(dikt, cls) @property - def a(self): + def a(self) -> float: """Gets the a of this SampleResource. # noqa: E501 @@ -64,7 +61,7 @@ def a(self): return self._a @a.setter - def a(self, a): + def a(self, a: float): """Sets the a of this SampleResource. # noqa: E501 @@ -78,7 +75,7 @@ def a(self, a): self._a = a @property - def b(self): + def b(self) -> float: """Gets the b of this SampleResource. # noqa: E501 @@ -89,7 +86,7 @@ def b(self): return self._b @b.setter - def b(self, b): + def b(self, b: float): """Sets the b of this SampleResource. # noqa: E501 @@ -101,7 +98,7 @@ def b(self, b): self._b = b @property - def id(self): + def id(self) -> float: """Gets the id of this SampleResource. # noqa: E501 @@ -112,7 +109,7 @@ def id(self): return self._id @id.setter - def id(self, id): + def id(self, id: float): """Sets the id of this SampleResource. # noqa: E501 diff --git a/applications/samples/backend/samples/openapi/openapi.yaml b/applications/samples/backend/samples/openapi/openapi.yaml index 4e63db55..6a875771 100644 --- a/applications/samples/backend/samples/openapi/openapi.yaml +++ b/applications/samples/backend/samples/openapi/openapi.yaml @@ -1,4 +1,4 @@ -openapi: 3.0.0 +openapi: 3.0.3 info: contact: email: cloudharness@metacell.us diff --git a/applications/samples/backend/samples/typing_utils.py b/applications/samples/backend/samples/typing_utils.py index 0563f81f..74e3c913 100644 --- a/applications/samples/backend/samples/typing_utils.py +++ b/applications/samples/backend/samples/typing_utils.py @@ -1,5 +1,3 @@ -# coding: utf-8 - import sys if sys.version_info < (3, 7): diff --git a/applications/samples/backend/samples/util.py b/applications/samples/backend/samples/util.py index 96a83499..5b241814 100644 --- a/applications/samples/backend/samples/util.py +++ b/applications/samples/backend/samples/util.py @@ -1,6 +1,5 @@ import datetime -import six import typing from samples import typing_utils @@ -16,7 +15,7 @@ def _deserialize(data, klass): if data is None: return None - if klass in six.integer_types or klass in (float, str, bool, bytearray): + if klass in (int, float, str, bool, bytearray): return _deserialize_primitive(data, klass) elif klass == object: return _deserialize_object(data) @@ -45,7 +44,7 @@ def _deserialize_primitive(data, klass): try: value = klass(data) except UnicodeEncodeError: - value = six.u(data) + value = data except TypeError: value = data return value @@ -110,7 +109,7 @@ def deserialize_model(data, klass): if not instance.openapi_types: return data - for attr, attr_type in six.iteritems(instance.openapi_types): + for attr, attr_type in instance.openapi_types.items(): if data is not None \ and instance.attribute_map[attr] in data \ and isinstance(data, (list, dict)): @@ -145,4 +144,4 @@ def _deserialize_dict(data, boxed_type): :rtype: dict """ return {k: _deserialize(v, boxed_type) - for k, v in six.iteritems(data)} + for k, v in data.items() } diff --git a/applications/samples/backend/setup.py b/applications/samples/backend/setup.py index 471cf86b..16908eb6 100644 --- a/applications/samples/backend/setup.py +++ b/applications/samples/backend/setup.py @@ -15,7 +15,6 @@ REQUIRES = [ "connexion>=2.0.2", - "swagger-ui-bundle>=0.0.2", "python_dateutil>=2.6.0", "pyjwt>=2.6.0", "cloudharness" diff --git a/applications/samples/backend/tox.ini b/applications/samples/backend/tox.ini index df3f1773..5d5ecced 100644 --- a/applications/samples/backend/tox.ini +++ b/applications/samples/backend/tox.ini @@ -5,7 +5,7 @@ skipsdist=True [testenv] deps=-r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt - {toxinidir} + {toxinidir} commands= pytest --cov=samples diff --git a/applications/samples/backend/www/assets/index-Cyl2oP5E.css b/applications/samples/backend/www/assets/index-Cyl2oP5E.css new file mode 100644 index 00000000..aef4555b --- /dev/null +++ b/applications/samples/backend/www/assets/index-Cyl2oP5E.css @@ -0,0 +1 @@ +body{text-align:center;background-color:"#eeeeee";font-family:Roboto,Helvetica,sans-serif} diff --git a/applications/samples/backend/www/assets/index-DtmKuX2X.js b/applications/samples/backend/www/assets/index-DtmKuX2X.js new file mode 100644 index 00000000..19bf27f5 --- /dev/null +++ b/applications/samples/backend/www/assets/index-DtmKuX2X.js @@ -0,0 +1,209 @@ +(function(){const s=document.createElement("link").relList;if(s&&s.supports&&s.supports("modulepreload"))return;for(const y of document.querySelectorAll('link[rel="modulepreload"]'))v(y);new MutationObserver(y=>{for(const S of y)if(S.type==="childList")for(const f of S.addedNodes)f.tagName==="LINK"&&f.rel==="modulepreload"&&v(f)}).observe(document,{childList:!0,subtree:!0});function p(y){const S={};return y.integrity&&(S.integrity=y.integrity),y.referrerPolicy&&(S.referrerPolicy=y.referrerPolicy),y.crossOrigin==="use-credentials"?S.credentials="include":y.crossOrigin==="anonymous"?S.credentials="omit":S.credentials="same-origin",S}function v(y){if(y.ep)return;y.ep=!0;const S=p(y);fetch(y.href,S)}})();function cA(l){return l&&l.__esModule&&Object.prototype.hasOwnProperty.call(l,"default")?l.default:l}var SE={exports:{}},Uh={},EE={exports:{}},hf={exports:{}};hf.exports;(function(l,s){/** + * @license React + * react.development.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */(function(){typeof __REACT_DEVTOOLS_GLOBAL_HOOK__<"u"&&typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart=="function"&&__REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart(new Error);var p="18.3.1",v=Symbol.for("react.element"),y=Symbol.for("react.portal"),S=Symbol.for("react.fragment"),f=Symbol.for("react.strict_mode"),N=Symbol.for("react.profiler"),z=Symbol.for("react.provider"),U=Symbol.for("react.context"),M=Symbol.for("react.forward_ref"),F=Symbol.for("react.suspense"),ee=Symbol.for("react.suspense_list"),$=Symbol.for("react.memo"),B=Symbol.for("react.lazy"),ce=Symbol.for("react.offscreen"),Je=Symbol.iterator,Ye="@@iterator";function Ce(d){if(d===null||typeof d!="object")return null;var g=Je&&d[Je]||d[Ye];return typeof g=="function"?g:null}var ne={current:null},Ke={transition:null},de={current:null,isBatchingLegacy:!1,didScheduleLegacyUpdate:!1},Fe={current:null},ge={},Gt=null;function Cn(d){Gt=d}ge.setExtraStackFrame=function(d){Gt=d},ge.getCurrentStack=null,ge.getStackAddendum=function(){var d="";Gt&&(d+=Gt);var g=ge.getCurrentStack;return g&&(d+=g()||""),d};var Rt=!1,rt=!1,Fn=!1,xe=!1,Ve=!1,yt={ReactCurrentDispatcher:ne,ReactCurrentBatchConfig:Ke,ReactCurrentOwner:Fe};yt.ReactDebugCurrentFrame=ge,yt.ReactCurrentActQueue=de;function gt(d){{for(var g=arguments.length,D=new Array(g>1?g-1:0),O=1;O1?g-1:0),O=1;O1){for(var et=Array(Ie),tt=0;tt1){for(var ut=Array(tt),pt=0;pt is not supported and will be removed in a future major release. Did you mean to render instead?")),g.Provider},set:function(Z){g.Provider=Z}},_currentValue:{get:function(){return g._currentValue},set:function(Z){g._currentValue=Z}},_currentValue2:{get:function(){return g._currentValue2},set:function(Z){g._currentValue2=Z}},_threadCount:{get:function(){return g._threadCount},set:function(Z){g._threadCount=Z}},Consumer:{get:function(){return D||(D=!0,be("Rendering is not supported and will be removed in a future major release. Did you mean to render instead?")),g.Consumer}},displayName:{get:function(){return g.displayName},set:function(Z){P||(gt("Setting `displayName` on Context.Consumer has no effect. You should set it directly on the context with Context.displayName = '%s'.",Z),P=!0)}}}),g.Consumer=le}return g._currentRenderer=null,g._currentRenderer2=null,g}var ia=-1,Ma=0,oa=1,Pr=2;function Sr(d){if(d._status===ia){var g=d._result,D=g();if(D.then(function(le){if(d._status===Ma||d._status===ia){var Z=d;Z._status=oa,Z._result=le}},function(le){if(d._status===Ma||d._status===ia){var Z=d;Z._status=Pr,Z._result=le}}),d._status===ia){var O=d;O._status=Ma,O._result=D}}if(d._status===oa){var P=d._result;return P===void 0&&be(`lazy: Expected the result of a dynamic import() call. Instead received: %s + +Your code should look like: + const MyComponent = lazy(() => import('./MyComponent')) + +Did you accidentally put curly braces around the import?`,P),"default"in P||be(`lazy: Expected the result of a dynamic import() call. Instead received: %s + +Your code should look like: + const MyComponent = lazy(() => import('./MyComponent'))`,P),P.default}else throw d._result}function b(d){var g={_status:ia,_result:d},D={$$typeof:B,_payload:g,_init:Sr};{var O,P;Object.defineProperties(D,{defaultProps:{configurable:!0,get:function(){return O},set:function(le){be("React.lazy(...): It is not supported to assign `defaultProps` to a lazy component import. Either specify them where the component is defined, or create a wrapping component around it."),O=le,Object.defineProperty(D,"defaultProps",{enumerable:!0})}},propTypes:{configurable:!0,get:function(){return P},set:function(le){be("React.lazy(...): It is not supported to assign `propTypes` to a lazy component import. Either specify them where the component is defined, or create a wrapping component around it."),P=le,Object.defineProperty(D,"propTypes",{enumerable:!0})}}})}return D}function j(d){d!=null&&d.$$typeof===$?be("forwardRef requires a render function but received a `memo` component. Instead of forwardRef(memo(...)), use memo(forwardRef(...))."):typeof d!="function"?be("forwardRef requires a render function but was given %s.",d===null?"null":typeof d):d.length!==0&&d.length!==2&&be("forwardRef render functions accept exactly two parameters: props and ref. %s",d.length===1?"Did you forget to use the ref parameter?":"Any additional parameter will be undefined."),d!=null&&(d.defaultProps!=null||d.propTypes!=null)&&be("forwardRef render functions do not support propTypes or defaultProps. Did you accidentally pass a React component?");var g={$$typeof:M,render:d};{var D;Object.defineProperty(g,"displayName",{enumerable:!1,configurable:!0,get:function(){return D},set:function(O){D=O,!d.name&&!d.displayName&&(d.displayName=O)}})}return g}var G;G=Symbol.for("react.module.reference");function oe(d){return!!(typeof d=="string"||typeof d=="function"||d===S||d===N||Ve||d===f||d===F||d===ee||xe||d===ce||Rt||rt||Fn||typeof d=="object"&&d!==null&&(d.$$typeof===B||d.$$typeof===$||d.$$typeof===z||d.$$typeof===U||d.$$typeof===M||d.$$typeof===G||d.getModuleId!==void 0))}function Ae(d,g){oe(d)||be("memo: The first argument must be a component. Instead received: %s",d===null?"null":typeof d);var D={$$typeof:$,type:d,compare:g===void 0?null:g};{var O;Object.defineProperty(D,"displayName",{enumerable:!1,configurable:!0,get:function(){return O},set:function(P){O=P,!d.name&&!d.displayName&&(d.displayName=P)}})}return D}function fe(){var d=ne.current;return d===null&&be(`Invalid hook call. Hooks can only be called inside of the body of a function component. This could happen for one of the following reasons: +1. You might have mismatching versions of React and the renderer (such as React DOM) +2. You might be breaking the Rules of Hooks +3. You might have more than one copy of React in the same app +See https://reactjs.org/link/invalid-hook-call for tips about how to debug and fix this problem.`),d}function Te(d){var g=fe();if(d._context!==void 0){var D=d._context;D.Consumer===d?be("Calling useContext(Context.Consumer) is not supported, may cause bugs, and will be removed in a future major release. Did you mean to call useContext(Context) instead?"):D.Provider===d&&be("Calling useContext(Context.Provider) is not supported. Did you mean to call useContext(Context) instead?")}return g.useContext(d)}function Se(d){var g=fe();return g.useState(d)}function ct(d,g,D){var O=fe();return O.useReducer(d,g,D)}function we(d){var g=fe();return g.useRef(d)}function st(d,g){var D=fe();return D.useEffect(d,g)}function un(d,g){var D=fe();return D.useInsertionEffect(d,g)}function Er(d,g){var D=fe();return D.useLayoutEffect(d,g)}function Rr(d,g){var D=fe();return D.useCallback(d,g)}function Vt(d,g){var D=fe();return D.useMemo(d,g)}function vi(d,g,D){var O=fe();return O.useImperativeHandle(d,g,D)}function Ji(d,g){{var D=fe();return D.useDebugValue(d,g)}}function nu(){var d=fe();return d.useTransition()}function ua(d){var g=fe();return g.useDeferredValue(d)}function _e(){var d=fe();return d.useId()}function hi(d,g,D){var O=fe();return O.useSyncExternalStore(d,g,D)}var Ua=0,ru,au,iu,ou,uu,lu,su;function Jl(){}Jl.__reactDisabledLog=!0;function Cf(){{if(Ua===0){ru=console.log,au=console.info,iu=console.warn,ou=console.error,uu=console.group,lu=console.groupCollapsed,su=console.groupEnd;var d={configurable:!0,enumerable:!0,value:Jl,writable:!0};Object.defineProperties(console,{info:d,log:d,warn:d,error:d,group:d,groupCollapsed:d,groupEnd:d})}Ua++}}function cu(){{if(Ua--,Ua===0){var d={configurable:!0,enumerable:!0,writable:!0};Object.defineProperties(console,{log:Ht({},d,{value:ru}),info:Ht({},d,{value:au}),warn:Ht({},d,{value:iu}),error:Ht({},d,{value:ou}),group:Ht({},d,{value:uu}),groupCollapsed:Ht({},d,{value:lu}),groupEnd:Ht({},d,{value:su})})}Ua<0&&be("disabledDepth fell below zero. This is a bug in React. Please file an issue.")}}var mi=yt.ReactCurrentDispatcher,or;function Na(d,g,D){{if(or===void 0)try{throw Error()}catch(P){var O=P.stack.trim().match(/\n( *(at )?)/);or=O&&O[1]||""}return` +`+or+d}}var ka=!1,Zi;{var fu=typeof WeakMap=="function"?WeakMap:Map;Zi=new fu}function Zl(d,g){if(!d||ka)return"";{var D=Zi.get(d);if(D!==void 0)return D}var O;ka=!0;var P=Error.prepareStackTrace;Error.prepareStackTrace=void 0;var le;le=mi.current,mi.current=null,Cf();try{if(g){var Z=function(){throw Error()};if(Object.defineProperty(Z.prototype,"props",{set:function(){throw Error()}}),typeof Reflect=="object"&&Reflect.construct){try{Reflect.construct(Z,[])}catch(Be){O=Be}Reflect.construct(d,[],Z)}else{try{Z.call()}catch(Be){O=Be}d.call(Z.prototype)}}else{try{throw Error()}catch(Be){O=Be}d()}}catch(Be){if(Be&&O&&typeof Be.stack=="string"){for(var pe=Be.stack.split(` +`),Le=O.stack.split(` +`),Ie=pe.length-1,et=Le.length-1;Ie>=1&&et>=0&&pe[Ie]!==Le[et];)et--;for(;Ie>=1&&et>=0;Ie--,et--)if(pe[Ie]!==Le[et]){if(Ie!==1||et!==1)do if(Ie--,et--,et<0||pe[Ie]!==Le[et]){var tt=` +`+pe[Ie].replace(" at new "," at ");return d.displayName&&tt.includes("")&&(tt=tt.replace("",d.displayName)),typeof d=="function"&&Zi.set(d,tt),tt}while(Ie>=1&&et>=0);break}}}finally{ka=!1,mi.current=le,cu(),Error.prepareStackTrace=P}var ut=d?d.displayName||d.name:"",pt=ut?Na(ut):"";return typeof d=="function"&&Zi.set(d,pt),pt}function du(d,g,D){return Zl(d,!1)}function Tf(d){var g=d.prototype;return!!(g&&g.isReactComponent)}function za(d,g,D){if(d==null)return"";if(typeof d=="function")return Zl(d,Tf(d));if(typeof d=="string")return Na(d);switch(d){case F:return Na("Suspense");case ee:return Na("SuspenseList")}if(typeof d=="object")switch(d.$$typeof){case M:return du(d.render);case $:return za(d.type,g,D);case B:{var O=d,P=O._payload,le=O._init;try{return za(le(P),g,D)}catch{}}}return""}var es={},pu=yt.ReactDebugCurrentFrame;function eo(d){if(d){var g=d._owner,D=za(d.type,d._source,g?g.type:null);pu.setExtraStackFrame(D)}else pu.setExtraStackFrame(null)}function ts(d,g,D,O,P){{var le=Function.call.bind(gr);for(var Z in d)if(le(d,Z)){var pe=void 0;try{if(typeof d[Z]!="function"){var Le=Error((O||"React class")+": "+D+" type `"+Z+"` is invalid; it must be a function, usually from the `prop-types` package, but received `"+typeof d[Z]+"`.This often happens because of typos such as `PropTypes.function` instead of `PropTypes.func`.");throw Le.name="Invariant Violation",Le}pe=d[Z](g,Z,O,D,null,"SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED")}catch(Ie){pe=Ie}pe&&!(pe instanceof Error)&&(eo(P),be("%s: type specification of %s `%s` is invalid; the type checker function must return `null` or an `Error` but returned a %s. You may have forgotten to pass an argument to the type checker creator (arrayOf, instanceOf, objectOf, oneOf, oneOfType, and shape all require an argument).",O||"React class",D,Z,typeof pe),eo(null)),pe instanceof Error&&!(pe.message in es)&&(es[pe.message]=!0,eo(P),be("Failed %s type: %s",D,pe.message),eo(null))}}}function ze(d){if(d){var g=d._owner,D=za(d.type,d._source,g?g.type:null);Cn(D)}else Cn(null)}var vu;vu=!1;function hu(){if(Fe.current){var d=Dn(Fe.current.type);if(d)return` + +Check the render method of \``+d+"`."}return""}function ye(d){if(d!==void 0){var g=d.fileName.replace(/^.*[\\\/]/,""),D=d.lineNumber;return` + +Check your code at `+g+":"+D+"."}return""}function ns(d){return d!=null?ye(d.__source):""}var ln={};function yi(d){var g=hu();if(!g){var D=typeof d=="string"?d:d.displayName||d.name;D&&(g=` + +Check the top-level render call using <`+D+">.")}return g}function Fa(d,g){if(!(!d._store||d._store.validated||d.key!=null)){d._store.validated=!0;var D=yi(g);if(!ln[D]){ln[D]=!0;var O="";d&&d._owner&&d._owner!==Fe.current&&(O=" It was passed a child from "+Dn(d._owner.type)+"."),ze(d),be('Each child in a list should have a unique "key" prop.%s%s See https://reactjs.org/link/warning-keys for more information.',D,O),ze(null)}}}function rs(d,g){if(typeof d=="object"){if(Ct(d))for(var D=0;D",P=" Did you accidentally export a JSX literal instead of a component?"):Z=typeof d,be("React.createElement: type is invalid -- expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s",Z,P)}var pe=re.apply(this,arguments);if(pe==null)return pe;if(O)for(var Le=2;Le10&>("Detected a large number of updates inside startTransition. If this is due to a subscription please re-write it to use React provided hooks. Otherwise concurrent mode guarantees are off the table."),O._updatedFibers.clear()}}}var to=!1,gi=null;function is(d){if(gi===null)try{var g=("require"+Math.random()).slice(0,7),D=l&&l[g];gi=D.call(l,"timers").setImmediate}catch{gi=function(P){to===!1&&(to=!0,typeof MessageChannel>"u"&&be("This browser does not have a MessageChannel implementation, so enqueuing tasks via await act(async () => ...) will fail. Please file an issue at https://github.com/facebook/react/issues if you encounter this warning."));var le=new MessageChannel;le.port1.onmessage=P,le.port2.postMessage(void 0)}}return gi(d)}var Ha=0,os=!1;function us(d){{var g=Ha;Ha++,de.current===null&&(de.current=[]);var D=de.isBatchingLegacy,O;try{if(de.isBatchingLegacy=!0,O=d(),!D&&de.didScheduleLegacyUpdate){var P=de.current;P!==null&&(de.didScheduleLegacyUpdate=!1,ro(P))}}catch(ut){throw la(g),ut}finally{de.isBatchingLegacy=D}if(O!==null&&typeof O=="object"&&typeof O.then=="function"){var le=O,Z=!1,pe={then:function(ut,pt){Z=!0,le.then(function(Be){la(g),Ha===0?no(Be,ut,pt):ut(Be)},function(Be){la(g),pt(Be)})}};return!os&&typeof Promise<"u"&&Promise.resolve().then(function(){}).then(function(){Z||(os=!0,be("You called act(async () => ...) without await. This could lead to unexpected testing behaviour, interleaving multiple act calls and mixing their scopes. You should - await act(async () => ...);"))}),pe}else{var Le=O;if(la(g),Ha===0){var Ie=de.current;Ie!==null&&(ro(Ie),de.current=null);var et={then:function(ut,pt){de.current===null?(de.current=[],no(Le,ut,pt)):ut(Le)}};return et}else{var tt={then:function(ut,pt){ut(Le)}};return tt}}}}function la(d){d!==Ha-1&&be("You seem to have overlapping act() calls, this is not supported. Be sure to await previous act() calls before making a new one. "),Ha=d}function no(d,g,D){{var O=de.current;if(O!==null)try{ro(O),is(function(){O.length===0?(de.current=null,g(d)):no(d,g,D)})}catch(P){D(P)}else g(d)}}var ja=!1;function ro(d){if(!ja){ja=!0;var g=0;try{for(;g1?j-1:0),oe=1;oe=1&&st>=0&&Se[we]!==ct[st];)st--;for(;we>=1&&st>=0;we--,st--)if(Se[we]!==ct[st]){if(we!==1||st!==1)do if(we--,st--,st<0||Se[we]!==ct[st]){var un=` +`+Se[we].replace(" at new "," at ");return b.displayName&&un.includes("")&&(un=un.replace("",b.displayName)),typeof b=="function"&&xn.set(b,un),un}while(we>=1&&st>=0);break}}}finally{Wt=!1,mr.current=fe,Vr(),Error.prepareStackTrace=Ae}var Er=b?b.displayName||b.name:"",Rr=Er?tr(Er):"";return typeof b=="function"&&xn.set(b,Rr),Rr}function Ct(b,j,G){return Vn(b,!1)}function an(b){var j=b.prototype;return!!(j&&j.isReactComponent)}function jt(b,j,G){if(b==null)return"";if(typeof b=="function")return Vn(b,an(b));if(typeof b=="string")return tr(b);switch(b){case U:return tr("Suspense");case M:return tr("SuspenseList")}if(typeof b=="object")switch(b.$$typeof){case z:return Ct(b.render);case F:return jt(b.type,j,G);case ee:{var oe=b,Ae=oe._payload,fe=oe._init;try{return jt(fe(Ae),j,G)}catch{}}}return""}var wt=Object.prototype.hasOwnProperty,Ot={},Bn=Ye.ReactDebugCurrentFrame;function nr(b){if(b){var j=b._owner,G=jt(b.type,b._source,j?j.type:null);Bn.setExtraStackFrame(G)}else Bn.setExtraStackFrame(null)}function Dn(b,j,G,oe,Ae){{var fe=Function.call.bind(wt);for(var Te in b)if(fe(b,Te)){var Se=void 0;try{if(typeof b[Te]!="function"){var ct=Error((oe||"React class")+": "+G+" type `"+Te+"` is invalid; it must be a function, usually from the `prop-types` package, but received `"+typeof b[Te]+"`.This often happens because of typos such as `PropTypes.function` instead of `PropTypes.func`.");throw ct.name="Invariant Violation",ct}Se=b[Te](j,Te,oe,G,null,"SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED")}catch(we){Se=we}Se&&!(Se instanceof Error)&&(nr(Ae),Ce("%s: type specification of %s `%s` is invalid; the type checker function must return `null` or an `Error` but returned a %s. You may have forgotten to pass an argument to the type checker creator (arrayOf, instanceOf, objectOf, oneOf, oneOfType, and shape all require an argument).",oe||"React class",G,Te,typeof Se),nr(null)),Se instanceof Error&&!(Se.message in Ot)&&(Ot[Se.message]=!0,nr(Ae),Ce("Failed %s type: %s",G,Se.message),nr(null))}}}var gr=Array.isArray;function rr(b){return gr(b)}function hn(b){{var j=typeof Symbol=="function"&&Symbol.toStringTag,G=j&&b[Symbol.toStringTag]||b.constructor.name||"Object";return G}}function ar(b){try{return on(b),!1}catch{return!0}}function on(b){return""+b}function Pn(b){if(ar(b))return Ce("The provided key is an unsupported type %s. This value must be coerced to a string before before using it here.",hn(b)),on(b)}var bt=Ye.ReactCurrentOwner,ir={key:!0,ref:!0,__self:!0,__source:!0},ra,aa,q;q={};function re(b){if(wt.call(b,"ref")){var j=Object.getOwnPropertyDescriptor(b,"ref").get;if(j&&j.isReactWarning)return!1}return b.ref!==void 0}function De(b){if(wt.call(b,"key")){var j=Object.getOwnPropertyDescriptor(b,"key").get;if(j&&j.isReactWarning)return!1}return b.key!==void 0}function qe(b,j){if(typeof b.ref=="string"&&bt.current&&j&&bt.current.stateNode!==j){var G=xe(bt.current.type);q[G]||(Ce('Component "%s" contains the string ref "%s". Support for string refs will be removed in a future major release. This case cannot be automatically converted to an arrow function. We ask you to manually fix this case by using useRef() or createRef() instead. Learn more about using refs safely here: https://reactjs.org/link/strict-mode-string-ref',xe(bt.current.type),b.ref),q[G]=!0)}}function Ge(b,j){{var G=function(){ra||(ra=!0,Ce("%s: `key` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)",j))};G.isReactWarning=!0,Object.defineProperty(b,"key",{get:G,configurable:!0})}}function _t(b,j){{var G=function(){aa||(aa=!0,Ce("%s: `ref` is not a prop. Trying to access it will result in `undefined` being returned. If you need to access the same value within the child component, you should pass it as a different prop. (https://reactjs.org/link/special-props)",j))};G.isReactWarning=!0,Object.defineProperty(b,"ref",{get:G,configurable:!0})}}var St=function(b,j,G,oe,Ae,fe,Te){var Se={$$typeof:s,type:b,key:j,ref:G,props:Te,_owner:fe};return Se._store={},Object.defineProperty(Se._store,"validated",{configurable:!1,enumerable:!1,writable:!0,value:!1}),Object.defineProperty(Se,"_self",{configurable:!1,enumerable:!1,writable:!1,value:oe}),Object.defineProperty(Se,"_source",{configurable:!1,enumerable:!1,writable:!1,value:Ae}),Object.freeze&&(Object.freeze(Se.props),Object.freeze(Se)),Se};function wn(b,j,G,oe,Ae){{var fe,Te={},Se=null,ct=null;G!==void 0&&(Pn(G),Se=""+G),De(j)&&(Pn(j.key),Se=""+j.key),re(j)&&(ct=j.ref,qe(j,Ae));for(fe in j)wt.call(j,fe)&&!ir.hasOwnProperty(fe)&&(Te[fe]=j[fe]);if(b&&b.defaultProps){var we=b.defaultProps;for(fe in we)Te[fe]===void 0&&(Te[fe]=we[fe])}if(Se||ct){var st=typeof b=="function"?b.displayName||b.name||"Unknown":b;Se&&Ge(Te,st),ct&&_t(Te,st)}return St(b,Se,ct,Ae,oe,bt.current,Te)}}var Ze=Ye.ReactCurrentOwner,$n=Ye.ReactDebugCurrentFrame;function it(b){if(b){var j=b._owner,G=jt(b.type,b._source,j?j.type:null);$n.setExtraStackFrame(G)}else $n.setExtraStackFrame(null)}var ot;ot=!1;function Br(b){return typeof b=="object"&&b!==null&&b.$$typeof===s}function br(){{if(Ze.current){var b=xe(Ze.current.type);if(b)return` + +Check the render method of \``+b+"`."}return""}}function fi(b){{if(b!==void 0){var j=b.fileName.replace(/^.*[\\\/]/,""),G=b.lineNumber;return` + +Check your code at `+j+":"+G+"."}return""}}var Ki={};function tu(b){{var j=br();if(!j){var G=typeof b=="string"?b:b.displayName||b.name;G&&(j=` + +Check the top-level render call using <`+G+">.")}return j}}function di(b,j){{if(!b._store||b._store.validated||b.key!=null)return;b._store.validated=!0;var G=tu(j);if(Ki[G])return;Ki[G]=!0;var oe="";b&&b._owner&&b._owner!==Ze.current&&(oe=" It was passed a child from "+xe(b._owner.type)+"."),it(b),Ce('Each child in a list should have a unique "key" prop.%s%s See https://reactjs.org/link/warning-keys for more information.',G,oe),it(null)}}function pi(b,j){{if(typeof b!="object")return;if(rr(b))for(var G=0;G",Se=" Did you accidentally export a JSX literal instead of a component?"):we=typeof b,Ce("React.jsx: type is invalid -- expected a string (for built-in components) or a class/function (for composite components) but got: %s.%s",we,Se)}var st=wn(b,j,G,Ae,fe);if(st==null)return st;if(Te){var un=j.children;if(un!==void 0)if(oe)if(rr(un)){for(var Er=0;Er0?"{key: someKey, "+Vt.join(": ..., ")+": ...}":"{key: someKey}";if(!oa[Rr+vi]){var Ji=Vt.length>0?"{"+Vt.join(": ..., ")+": ...}":"{}";Ce(`A props object containing a "key" prop is being spread into JSX: + let props = %s; + <%s {...props} /> +React keys must be passed directly to JSX without using spread: + let props = %s; + <%s key={someKey} {...props} />`,vi,Rr,Ji,Rr),oa[Rr+vi]=!0}}return b===v?Ma(st):ia(st),st}}var Sr=Pr;Uh.Fragment=v,Uh.jsxDEV=Sr})();SE.exports=Uh;var kn=SE.exports,Nh={},RE={exports:{}},Zn={},CE={exports:{}},TE={};(function(l){/** + * @license React + * scheduler.development.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */(function(){typeof __REACT_DEVTOOLS_GLOBAL_HOOK__<"u"&&typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart=="function"&&__REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart(new Error);var s=!1,p=!1,v=5;function y(q,re){var De=q.length;q.push(re),N(q,re,De)}function S(q){return q.length===0?null:q[0]}function f(q){if(q.length===0)return null;var re=q[0],De=q.pop();return De!==re&&(q[0]=De,z(q,De,0)),re}function N(q,re,De){for(var qe=De;qe>0;){var Ge=qe-1>>>1,_t=q[Ge];if(U(_t,re)>0)q[Ge]=re,q[qe]=_t,qe=Ge;else return}}function z(q,re,De){for(var qe=De,Ge=q.length,_t=Ge>>>1;qe<_t;){var St=(qe+1)*2-1,wn=q[St],Ze=St+1,$n=q[Ze];if(U(wn,re)<0)ZeDe&&(!q||nr()));){var qe=xe.callback;if(typeof qe=="function"){xe.callback=null,Ve=xe.priorityLevel;var Ge=xe.expirationTime<=De,_t=qe(Ge);De=l.unstable_now(),typeof _t=="function"?xe.callback=_t:xe===S(Rt)&&f(Rt),hr(De)}else f(Rt);xe=S(Rt)}if(xe!==null)return!0;var St=S(rt);return St!==null&&bt(Ht,St.startTime-De),!1}function Vr(q,re){switch(q){case M:case F:case ee:case $:case B:break;default:q=ee}var De=Ve;Ve=q;try{return re()}finally{Ve=De}}function mr(q){var re;switch(Ve){case M:case F:case ee:re=ee;break;default:re=Ve;break}var De=Ve;Ve=re;try{return q()}finally{Ve=De}}function yr(q){var re=Ve;return function(){var De=Ve;Ve=re;try{return q.apply(this,arguments)}finally{Ve=De}}}function tr(q,re,De){var qe=l.unstable_now(),Ge;if(typeof De=="object"&&De!==null){var _t=De.delay;typeof _t=="number"&&_t>0?Ge=qe+_t:Ge=qe}else Ge=qe;var St;switch(q){case M:St=de;break;case F:St=Fe;break;case B:St=Cn;break;case $:St=Gt;break;case ee:default:St=ge;break}var wn=Ge+St,Ze={id:Fn++,callback:re,priorityLevel:q,startTime:Ge,expirationTime:wn,sortIndex:-1};return Ge>qe?(Ze.sortIndex=Ge,y(rt,Ze),S(Rt)===null&&Ze===S(rt)&&(be?ir():be=!0,bt(Ht,Ge-qe))):(Ze.sortIndex=wn,y(Rt,Ze),!gt&&!yt&&(gt=!0,Pn(Tn))),Ze}function Wt(){}function xn(){!gt&&!yt&&(gt=!0,Pn(Tn))}function jn(){return S(Rt)}function Vn(q){q.callback=null}function Ct(){return Ve}var an=!1,jt=null,wt=-1,Ot=v,Bn=-1;function nr(){var q=l.unstable_now()-Bn;return!(q125){console.error("forceFrameRate takes a positive int between 0 and 125, forcing frame rates higher than 125 fps is not supported");return}q>0?Ot=Math.floor(1e3/q):Ot=v}var rr=function(){if(jt!==null){var q=l.unstable_now();Bn=q;var re=!0,De=!0;try{De=jt(re,q)}finally{De?hn():(an=!1,jt=null)}}else an=!1},hn;if(typeof er=="function")hn=function(){er(rr)};else if(typeof MessageChannel<"u"){var ar=new MessageChannel,on=ar.port2;ar.port1.onmessage=rr,hn=function(){on.postMessage(null)}}else hn=function(){It(rr,0)};function Pn(q){jt=q,an||(an=!0,hn())}function bt(q,re){wt=It(function(){q(l.unstable_now())},re)}function ir(){jr(wt),wt=-1}var ra=Dn,aa=null;l.unstable_IdlePriority=B,l.unstable_ImmediatePriority=M,l.unstable_LowPriority=$,l.unstable_NormalPriority=ee,l.unstable_Profiling=aa,l.unstable_UserBlockingPriority=F,l.unstable_cancelCallback=Vn,l.unstable_continueExecution=xn,l.unstable_forceFrameRate=gr,l.unstable_getCurrentPriorityLevel=Ct,l.unstable_getFirstCallbackNode=jn,l.unstable_next=mr,l.unstable_pauseExecution=Wt,l.unstable_requestPaint=ra,l.unstable_runWithPriority=Vr,l.unstable_scheduleCallback=tr,l.unstable_shouldYield=nr,l.unstable_wrapCallback=yr,typeof __REACT_DEVTOOLS_GLOBAL_HOOK__<"u"&&typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStop=="function"&&__REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStop(new Error)})()})(TE);CE.exports=TE;var pA=CE.exports;/** + * @license React + * react-dom.development.js + * + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */(function(){typeof __REACT_DEVTOOLS_GLOBAL_HOOK__<"u"&&typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart=="function"&&__REACT_DEVTOOLS_GLOBAL_HOOK__.registerInternalModuleStart(new Error);var l=Qi,s=pA,p=l.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED,v=!1;function y(e){v=e}function S(e){if(!v){for(var t=arguments.length,n=new Array(t>1?t-1:0),r=1;r1?t-1:0),r=1;r2&&(e[0]==="o"||e[0]==="O")&&(e[1]==="n"||e[1]==="N")}function wn(e,t,n,r){if(n!==null&&n.type===ar)return!1;switch(typeof t){case"function":case"symbol":return!0;case"boolean":{if(r)return!1;if(n!==null)return!n.acceptsBooleans;var a=e.toLowerCase().slice(0,5);return a!=="data-"&&a!=="aria-"}default:return!1}}function Ze(e,t,n,r){if(t===null||typeof t>"u"||wn(e,t,n,r))return!0;if(r)return!1;if(n!==null)switch(n.type){case bt:return!t;case ir:return t===!1;case ra:return isNaN(t);case aa:return isNaN(t)||t<1}return!1}function $n(e){return ot.hasOwnProperty(e)?ot[e]:null}function it(e,t,n,r,a,i,o){this.acceptsBooleans=t===Pn||t===bt||t===ir,this.attributeName=r,this.attributeNamespace=a,this.mustUseProperty=n,this.propertyName=e,this.type=t,this.sanitizeURL=i,this.removeEmptyString=o}var ot={},Br=["children","dangerouslySetInnerHTML","defaultValue","defaultChecked","innerHTML","suppressContentEditableWarning","suppressHydrationWarning","style"];Br.forEach(function(e){ot[e]=new it(e,ar,!1,e,null,!1,!1)}),[["acceptCharset","accept-charset"],["className","class"],["htmlFor","for"],["httpEquiv","http-equiv"]].forEach(function(e){var t=e[0],n=e[1];ot[t]=new it(t,on,!1,n,null,!1,!1)}),["contentEditable","draggable","spellCheck","value"].forEach(function(e){ot[e]=new it(e,Pn,!1,e.toLowerCase(),null,!1,!1)}),["autoReverse","externalResourcesRequired","focusable","preserveAlpha"].forEach(function(e){ot[e]=new it(e,Pn,!1,e,null,!1,!1)}),["allowFullScreen","async","autoFocus","autoPlay","controls","default","defer","disabled","disablePictureInPicture","disableRemotePlayback","formNoValidate","hidden","loop","noModule","noValidate","open","playsInline","readOnly","required","reversed","scoped","seamless","itemScope"].forEach(function(e){ot[e]=new it(e,bt,!1,e.toLowerCase(),null,!1,!1)}),["checked","multiple","muted","selected"].forEach(function(e){ot[e]=new it(e,bt,!0,e,null,!1,!1)}),["capture","download"].forEach(function(e){ot[e]=new it(e,ir,!1,e,null,!1,!1)}),["cols","rows","size","span"].forEach(function(e){ot[e]=new it(e,aa,!1,e,null,!1,!1)}),["rowSpan","start"].forEach(function(e){ot[e]=new it(e,ra,!1,e.toLowerCase(),null,!1,!1)});var br=/[\-\:]([a-z])/g,fi=function(e){return e[1].toUpperCase()};["accent-height","alignment-baseline","arabic-form","baseline-shift","cap-height","clip-path","clip-rule","color-interpolation","color-interpolation-filters","color-profile","color-rendering","dominant-baseline","enable-background","fill-opacity","fill-rule","flood-color","flood-opacity","font-family","font-size","font-size-adjust","font-stretch","font-style","font-variant","font-weight","glyph-name","glyph-orientation-horizontal","glyph-orientation-vertical","horiz-adv-x","horiz-origin-x","image-rendering","letter-spacing","lighting-color","marker-end","marker-mid","marker-start","overline-position","overline-thickness","paint-order","panose-1","pointer-events","rendering-intent","shape-rendering","stop-color","stop-opacity","strikethrough-position","strikethrough-thickness","stroke-dasharray","stroke-dashoffset","stroke-linecap","stroke-linejoin","stroke-miterlimit","stroke-opacity","stroke-width","text-anchor","text-decoration","text-rendering","underline-position","underline-thickness","unicode-bidi","unicode-range","units-per-em","v-alphabetic","v-hanging","v-ideographic","v-mathematical","vector-effect","vert-adv-y","vert-origin-x","vert-origin-y","word-spacing","writing-mode","xmlns:xlink","x-height"].forEach(function(e){var t=e.replace(br,fi);ot[t]=new it(t,on,!1,e,null,!1,!1)}),["xlink:actuate","xlink:arcrole","xlink:role","xlink:show","xlink:title","xlink:type"].forEach(function(e){var t=e.replace(br,fi);ot[t]=new it(t,on,!1,e,"http://www.w3.org/1999/xlink",!1,!1)}),["xml:base","xml:lang","xml:space"].forEach(function(e){var t=e.replace(br,fi);ot[t]=new it(t,on,!1,e,"http://www.w3.org/XML/1998/namespace",!1,!1)}),["tabIndex","crossOrigin"].forEach(function(e){ot[e]=new it(e,on,!1,e.toLowerCase(),null,!1,!1)});var Ki="xlinkHref";ot[Ki]=new it("xlinkHref",on,!1,"xlink:href","http://www.w3.org/1999/xlink",!0,!1),["src","href","action","formAction"].forEach(function(e){ot[e]=new it(e,on,!1,e.toLowerCase(),null,!0,!0)});var tu=/^[\u0000-\u001F ]*j[\r\n\t]*a[\r\n\t]*v[\r\n\t]*a[\r\n\t]*s[\r\n\t]*c[\r\n\t]*r[\r\n\t]*i[\r\n\t]*p[\r\n\t]*t[\r\n\t]*\:/i,di=!1;function pi(e){!di&&tu.test(e)&&(di=!0,f("A future version of React will block javascript: URLs as a security precaution. Use event handlers instead if you can. If you need to generate unsafe HTML try using dangerouslySetInnerHTML instead. React was passed %s.",JSON.stringify(e)))}function ia(e,t,n,r){if(r.mustUseProperty){var a=r.propertyName;return e[a]}else{Bn(n,t),r.sanitizeURL&&pi(""+n);var i=r.attributeName,o=null;if(r.type===ir){if(e.hasAttribute(i)){var u=e.getAttribute(i);return u===""?!0:Ze(t,n,r,!1)?u:u===""+n?n:u}}else if(e.hasAttribute(i)){if(Ze(t,n,r,!1))return e.getAttribute(i);if(r.type===bt)return n;o=e.getAttribute(i)}return Ze(t,n,r,!1)?o===null?n:o:o===""+n?n:o}}function Ma(e,t,n,r){{if(!_t(t))return;if(!e.hasAttribute(t))return n===void 0?void 0:null;var a=e.getAttribute(t);return Bn(n,t),a===""+n?n:a}}function oa(e,t,n,r){var a=$n(t);if(!St(t,a,r)){if(Ze(t,n,a,r)&&(n=null),r||a===null){if(_t(t)){var i=t;n===null?e.removeAttribute(i):(Bn(n,t),e.setAttribute(i,""+n))}return}var o=a.mustUseProperty;if(o){var u=a.propertyName;if(n===null){var c=a.type;e[u]=c===bt?!1:""}else e[u]=n;return}var h=a.attributeName,m=a.attributeNamespace;if(n===null)e.removeAttribute(h);else{var R=a.type,E;R===bt||R===ir&&n===!0?E="":(Bn(n,h),E=""+n,a.sanitizeURL&&pi(E.toString())),m?e.setAttributeNS(m,h,E):e.setAttribute(h,E)}}}var Pr=Symbol.for("react.element"),Sr=Symbol.for("react.portal"),b=Symbol.for("react.fragment"),j=Symbol.for("react.strict_mode"),G=Symbol.for("react.profiler"),oe=Symbol.for("react.provider"),Ae=Symbol.for("react.context"),fe=Symbol.for("react.forward_ref"),Te=Symbol.for("react.suspense"),Se=Symbol.for("react.suspense_list"),ct=Symbol.for("react.memo"),we=Symbol.for("react.lazy"),st=Symbol.for("react.scope"),un=Symbol.for("react.debug_trace_mode"),Er=Symbol.for("react.offscreen"),Rr=Symbol.for("react.legacy_hidden"),Vt=Symbol.for("react.cache"),vi=Symbol.for("react.tracing_marker"),Ji=Symbol.iterator,nu="@@iterator";function ua(e){if(e===null||typeof e!="object")return null;var t=Ji&&e[Ji]||e[nu];return typeof t=="function"?t:null}var _e=Object.assign,hi=0,Ua,ru,au,iu,ou,uu,lu;function su(){}su.__reactDisabledLog=!0;function Jl(){{if(hi===0){Ua=console.log,ru=console.info,au=console.warn,iu=console.error,ou=console.group,uu=console.groupCollapsed,lu=console.groupEnd;var e={configurable:!0,enumerable:!0,value:su,writable:!0};Object.defineProperties(console,{info:e,log:e,warn:e,error:e,group:e,groupCollapsed:e,groupEnd:e})}hi++}}function Cf(){{if(hi--,hi===0){var e={configurable:!0,enumerable:!0,writable:!0};Object.defineProperties(console,{log:_e({},e,{value:Ua}),info:_e({},e,{value:ru}),warn:_e({},e,{value:au}),error:_e({},e,{value:iu}),group:_e({},e,{value:ou}),groupCollapsed:_e({},e,{value:uu}),groupEnd:_e({},e,{value:lu})})}hi<0&&f("disabledDepth fell below zero. This is a bug in React. Please file an issue.")}}var cu=p.ReactCurrentDispatcher,mi;function or(e,t,n){{if(mi===void 0)try{throw Error()}catch(a){var r=a.stack.trim().match(/\n( *(at )?)/);mi=r&&r[1]||""}return` +`+mi+e}}var Na=!1,ka;{var Zi=typeof WeakMap=="function"?WeakMap:Map;ka=new Zi}function fu(e,t){if(!e||Na)return"";{var n=ka.get(e);if(n!==void 0)return n}var r;Na=!0;var a=Error.prepareStackTrace;Error.prepareStackTrace=void 0;var i;i=cu.current,cu.current=null,Jl();try{if(t){var o=function(){throw Error()};if(Object.defineProperty(o.prototype,"props",{set:function(){throw Error()}}),typeof Reflect=="object"&&Reflect.construct){try{Reflect.construct(o,[])}catch(w){r=w}Reflect.construct(e,[],o)}else{try{o.call()}catch(w){r=w}e.call(o.prototype)}}else{try{throw Error()}catch(w){r=w}e()}}catch(w){if(w&&r&&typeof w.stack=="string"){for(var u=w.stack.split(` +`),c=r.stack.split(` +`),h=u.length-1,m=c.length-1;h>=1&&m>=0&&u[h]!==c[m];)m--;for(;h>=1&&m>=0;h--,m--)if(u[h]!==c[m]){if(h!==1||m!==1)do if(h--,m--,m<0||u[h]!==c[m]){var R=` +`+u[h].replace(" at new "," at ");return e.displayName&&R.includes("")&&(R=R.replace("",e.displayName)),typeof e=="function"&&ka.set(e,R),R}while(h>=1&&m>=0);break}}}finally{Na=!1,cu.current=i,Cf(),Error.prepareStackTrace=a}var E=e?e.displayName||e.name:"",x=E?or(E):"";return typeof e=="function"&&ka.set(e,x),x}function Zl(e,t,n){return fu(e,!0)}function du(e,t,n){return fu(e,!1)}function Tf(e){var t=e.prototype;return!!(t&&t.isReactComponent)}function za(e,t,n){if(e==null)return"";if(typeof e=="function")return fu(e,Tf(e));if(typeof e=="string")return or(e);switch(e){case Te:return or("Suspense");case Se:return or("SuspenseList")}if(typeof e=="object")switch(e.$$typeof){case fe:return du(e.render);case ct:return za(e.type,t,n);case we:{var r=e,a=r._payload,i=r._init;try{return za(i(a),t,n)}catch{}}}return""}function es(e){switch(e._debugOwner&&e._debugOwner.type,e._debugSource,e.tag){case $:return or(e.type);case Gt:return or("Lazy");case de:return or("Suspense");case rt:return or("SuspenseList");case z:case M:case ge:return du(e.type);case ne:return du(e.type.render);case U:return Zl(e.type);default:return""}}function pu(e){try{var t="",n=e;do t+=es(n),n=n.return;while(n);return t}catch(r){return` +Error generating stack: `+r.message+` +`+r.stack}}function eo(e,t,n){var r=e.displayName;if(r)return r;var a=t.displayName||t.name||"";return a!==""?n+"("+a+")":n}function ts(e){return e.displayName||"Context"}function ze(e){if(e==null)return null;if(typeof e.tag=="number"&&f("Received an unexpected object in getComponentNameFromType(). This is likely a bug in React. Please file an issue."),typeof e=="function")return e.displayName||e.name||null;if(typeof e=="string")return e;switch(e){case b:return"Fragment";case Sr:return"Portal";case G:return"Profiler";case j:return"StrictMode";case Te:return"Suspense";case Se:return"SuspenseList"}if(typeof e=="object")switch(e.$$typeof){case Ae:var t=e;return ts(t)+".Consumer";case oe:var n=e;return ts(n._context)+".Provider";case fe:return eo(e,e.render,"ForwardRef");case ct:var r=e.displayName||null;return r!==null?r:ze(e.type)||"Memo";case we:{var a=e,i=a._payload,o=a._init;try{return ze(o(i))}catch{return null}}}return null}function vu(e,t,n){var r=t.displayName||t.name||"";return e.displayName||(r!==""?n+"("+r+")":n)}function hu(e){return e.displayName||"Context"}function ye(e){var t=e.tag,n=e.type;switch(t){case yt:return"Cache";case Ye:var r=n;return hu(r)+".Consumer";case Ce:var a=n;return hu(a._context)+".Provider";case Rt:return"DehydratedFragment";case ne:return vu(n,n.render,"ForwardRef");case ce:return"Fragment";case $:return n;case ee:return"Portal";case F:return"Root";case B:return"Text";case Gt:return ze(n);case Je:return n===j?"StrictMode":"Mode";case xe:return"Offscreen";case Ke:return"Profiler";case Fn:return"Scope";case de:return"Suspense";case rt:return"SuspenseList";case gt:return"TracingMarker";case U:case z:case Cn:case M:case Fe:case ge:if(typeof n=="function")return n.displayName||n.name||null;if(typeof n=="string")return n;break}return null}var ns=p.ReactDebugCurrentFrame,ln=null,yi=!1;function Fa(){{if(ln===null)return null;var e=ln._debugOwner;if(e!==null&&typeof e<"u")return ye(e)}return null}function rs(){return ln===null?"":pu(ln)}function Bt(){ns.getCurrentStack=null,ln=null,yi=!1}function ft(e){ns.getCurrentStack=e===null?null:rs,ln=e,yi=!1}function as(){return ln}function Yn(e){yi=e}function On(e){return""+e}function $r(e){switch(typeof e){case"boolean":case"number":case"string":case"undefined":return e;case"object":return hn(e),e;default:return""}}var xf={button:!0,checkbox:!0,image:!0,hidden:!0,radio:!0,reset:!0,submit:!0};function to(e,t){xf[t.type]||t.onChange||t.onInput||t.readOnly||t.disabled||t.value==null||f("You provided a `value` prop to a form field without an `onChange` handler. This will render a read-only field. If the field should be mutable use `defaultValue`. Otherwise, set either `onChange` or `readOnly`."),t.onChange||t.readOnly||t.disabled||t.checked==null||f("You provided a `checked` prop to a form field without an `onChange` handler. This will render a read-only field. If the field should be mutable use `defaultChecked`. Otherwise, set either `onChange` or `readOnly`.")}function gi(e){var t=e.type,n=e.nodeName;return n&&n.toLowerCase()==="input"&&(t==="checkbox"||t==="radio")}function is(e){return e._valueTracker}function Ha(e){e._valueTracker=null}function os(e){var t="";return e&&(gi(e)?t=e.checked?"true":"false":t=e.value),t}function us(e){var t=gi(e)?"checked":"value",n=Object.getOwnPropertyDescriptor(e.constructor.prototype,t);hn(e[t]);var r=""+e[t];if(!(e.hasOwnProperty(t)||typeof n>"u"||typeof n.get!="function"||typeof n.set!="function")){var a=n.get,i=n.set;Object.defineProperty(e,t,{configurable:!0,get:function(){return a.call(this)},set:function(u){hn(u),r=""+u,i.call(this,u)}}),Object.defineProperty(e,t,{enumerable:n.enumerable});var o={getValue:function(){return r},setValue:function(u){hn(u),r=""+u},stopTracking:function(){Ha(e),delete e[t]}};return o}}function la(e){is(e)||(e._valueTracker=us(e))}function no(e){if(!e)return!1;var t=is(e);if(!t)return!0;var n=t.getValue(),r=os(e);return r!==n?(t.setValue(r),!0):!1}function ja(e){if(e=e||(typeof document<"u"?document:void 0),typeof e>"u")return null;try{return e.activeElement||e.body}catch{return e.body}}var ro=!1,ls=!1,ss=!1,cs=!1;function fs(e){var t=e.type==="checkbox"||e.type==="radio";return t?e.checked!=null:e.value!=null}function d(e,t){var n=e,r=t.checked,a=_e({},t,{defaultChecked:void 0,defaultValue:void 0,value:void 0,checked:r??n._wrapperState.initialChecked});return a}function g(e,t){to("input",t),t.checked!==void 0&&t.defaultChecked!==void 0&&!ls&&(f("%s contains an input of type %s with both checked and defaultChecked props. Input elements must be either controlled or uncontrolled (specify either the checked prop, or the defaultChecked prop, but not both). Decide between using a controlled or uncontrolled input element and remove one of these props. More info: https://reactjs.org/link/controlled-components",Fa()||"A component",t.type),ls=!0),t.value!==void 0&&t.defaultValue!==void 0&&!ro&&(f("%s contains an input of type %s with both value and defaultValue props. Input elements must be either controlled or uncontrolled (specify either the value prop, or the defaultValue prop, but not both). Decide between using a controlled or uncontrolled input element and remove one of these props. More info: https://reactjs.org/link/controlled-components",Fa()||"A component",t.type),ro=!0);var n=e,r=t.defaultValue==null?"":t.defaultValue;n._wrapperState={initialChecked:t.checked!=null?t.checked:t.defaultChecked,initialValue:$r(t.value!=null?t.value:r),controlled:fs(t)}}function D(e,t){var n=e,r=t.checked;r!=null&&oa(n,"checked",r,!1)}function O(e,t){var n=e;{var r=fs(t);!n._wrapperState.controlled&&r&&!cs&&(f("A component is changing an uncontrolled input to be controlled. This is likely caused by the value changing from undefined to a defined value, which should not happen. Decide between using a controlled or uncontrolled input element for the lifetime of the component. More info: https://reactjs.org/link/controlled-components"),cs=!0),n._wrapperState.controlled&&!r&&!ss&&(f("A component is changing a controlled input to be uncontrolled. This is likely caused by the value changing from a defined to undefined, which should not happen. Decide between using a controlled or uncontrolled input element for the lifetime of the component. More info: https://reactjs.org/link/controlled-components"),ss=!0)}D(e,t);var a=$r(t.value),i=t.type;if(a!=null)i==="number"?(a===0&&n.value===""||n.value!=a)&&(n.value=On(a)):n.value!==On(a)&&(n.value=On(a));else if(i==="submit"||i==="reset"){n.removeAttribute("value");return}t.hasOwnProperty("value")?pe(n,t.type,a):t.hasOwnProperty("defaultValue")&&pe(n,t.type,$r(t.defaultValue)),t.checked==null&&t.defaultChecked!=null&&(n.defaultChecked=!!t.defaultChecked)}function P(e,t,n){var r=e;if(t.hasOwnProperty("value")||t.hasOwnProperty("defaultValue")){var a=t.type,i=a==="submit"||a==="reset";if(i&&(t.value===void 0||t.value===null))return;var o=On(r._wrapperState.initialValue);n||o!==r.value&&(r.value=o),r.defaultValue=o}var u=r.name;u!==""&&(r.name=""),r.defaultChecked=!r.defaultChecked,r.defaultChecked=!!r._wrapperState.initialChecked,u!==""&&(r.name=u)}function le(e,t){var n=e;O(n,t),Z(n,t)}function Z(e,t){var n=t.name;if(t.type==="radio"&&n!=null){for(var r=e;r.parentNode;)r=r.parentNode;Bn(n,"name");for(var a=r.querySelectorAll("input[name="+JSON.stringify(""+n)+'][type="radio"]'),i=0;i.")))}):t.dangerouslySetInnerHTML!=null&&(et||(et=!0,f("Pass a `value` prop if you set dangerouslyInnerHTML so React knows which value should be selected.")))),t.selected!=null&&!Le&&(f("Use the `defaultValue` or `value` props on must be a scalar value if `multiple` is false.%s",n,ao())}}}}function sa(e,t,n,r){var a=e.options;if(t){for(var i=n,o={},u=0;u.");var r=_e({},t,{value:void 0,defaultValue:void 0,children:On(n._wrapperState.initialValue)});return r}function Xh(e,t){var n=e;to("textarea",t),t.value!==void 0&&t.defaultValue!==void 0&&!Qh&&(f("%s contains a textarea with both value and defaultValue props. Textarea elements must be either controlled or uncontrolled (specify either the value prop, or the defaultValue prop, but not both). Decide between using a controlled or uncontrolled textarea and remove one of these props. More info: https://reactjs.org/link/controlled-components",Fa()||"A component"),Qh=!0);var r=t.value;if(r==null){var a=t.children,i=t.defaultValue;if(a!=null){f("Use the `defaultValue` or `value` props instead of setting children on