From d25230ff5144cfead4bf98eb343ddd684dfa654a Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 6 Feb 2024 13:23:28 -0600
Subject: [PATCH 01/94] CH-100 Add first code to call a dedicated
docker-compose generation
---
.../deploy/templates/{ => helm}/argo-sa.yaml | 0
.../templates/{ => helm}/broker-config.yml | 0
.../templates/{ => helm}/configmap.yaml | 0
.../templates/{ => helm}/deployments.yml | 0
.../deploy/templates/{ => helm}/roles.yml | 0
.../deploy/templates/{ => helm}/services.yml | 0
.../templates/{ => helm}/zoo-config.yml | 0
.../{ => helm}/_helpers-auth-rework.tpl | 0
.../templates/{ => helm}/_helpers-names.tpl | 0
.../deploy/templates/{ => helm}/_helpers.tpl | 0
.../{ => helm}/hub/_helpers-passwords.tpl | 0
.../templates/{ => helm}/hub/configmap.yaml | 0
.../templates/{ => helm}/hub/deployment.yaml | 0
.../templates/{ => helm}/hub/netpol.yaml | 0
.../deploy/templates/{ => helm}/hub/pdb.yaml | 0
.../deploy/templates/{ => helm}/hub/pvc.yaml | 0
.../deploy/templates/{ => helm}/hub/rbac.yaml | 0
.../templates/{ => helm}/hub/secret.yaml | 0
.../templates/{ => helm}/hub/service.yaml | 0
.../image-puller/_helpers-daemonset.tpl | 0
.../image-puller/daemonset-continuous.yaml | 0
.../image-puller/daemonset-hook.yaml | 0
.../{ => helm}/image-puller/job.yaml | 0
.../{ => helm}/image-puller/rbac.yaml | 0
.../{ => helm}/proxy/autohttps/_README.txt | 0
.../{ => helm}/proxy/autohttps/configmap.yaml | 0
.../proxy/autohttps/deployment.yaml | 0
.../{ => helm}/proxy/autohttps/rbac.yaml | 0
.../{ => helm}/proxy/autohttps/service.yaml | 0
.../{ => helm}/proxy/deployment.yaml | 0
.../templates/{ => helm}/proxy/netpol.yaml | 0
.../templates/{ => helm}/proxy/pdb.yaml | 0
.../templates/{ => helm}/proxy/secret.yaml | 0
.../templates/{ => helm}/proxy/service.yaml | 0
.../scheduling/_scheduling-helpers.tpl | 0
.../{ => helm}/scheduling/priorityclass.yaml | 0
.../scheduling/user-placeholder/pdb.yaml | 0
.../user-placeholder/priorityclass.yaml | 0
.../user-placeholder/statefulset.yaml | 0
.../scheduling/user-scheduler/configmap.yaml | 0
.../scheduling/user-scheduler/deployment.yaml | 0
.../scheduling/user-scheduler/pdb.yaml | 0
.../scheduling/user-scheduler/rbac.yaml | 0
.../{ => helm}/singleuser/netpol.yaml | 0
.../deploy/templates/{ => helm}/_helpers.tpl | 0
.../templates/{ => helm}/clusterrole.yaml | 0
.../{ => helm}/clusterrolebinding.yaml | 0
.../templates/{ => helm}/nfs-server.yaml | 0
.../{ => helm}/podsecuritypolicy.yaml | 0
.../deploy/templates/{ => helm}/role.yaml | 0
.../templates/{ => helm}/rolebinding.yaml | 0
.../templates/{ => helm}/serviceaccount.yaml | 0
.../templates/{ => helm}/storageclass.yaml | 0
.../deploy/templates/{ => helm}/redis.yaml | 0
deployment-configuration/compose/.helmignore | 22 +
deployment-configuration/compose/Chart.yaml | 10 +
deployment-configuration/compose/README.md | 4 +
.../compose/templates/auto-compose.yaml | 103 +++
deployment-configuration/compose/values.yaml | 79 ++
.../ch_cli_tools/dockercompose.py | 753 ++++++++++++++++++
.../deployment-cli-tools/ch_cli_tools/helm.py | 10 +-
tools/deployment-cli-tools/harness-deployment | 41 +-
62 files changed, 1015 insertions(+), 7 deletions(-)
rename applications/argo/deploy/templates/{ => helm}/argo-sa.yaml (100%)
rename applications/events/deploy/templates/{ => helm}/broker-config.yml (100%)
rename applications/events/deploy/templates/{ => helm}/configmap.yaml (100%)
rename applications/events/deploy/templates/{ => helm}/deployments.yml (100%)
rename applications/events/deploy/templates/{ => helm}/roles.yml (100%)
rename applications/events/deploy/templates/{ => helm}/services.yml (100%)
rename applications/events/deploy/templates/{ => helm}/zoo-config.yml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/_helpers-auth-rework.tpl (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/_helpers-names.tpl (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/_helpers.tpl (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/_helpers-passwords.tpl (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/configmap.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/netpol.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/pvc.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/secret.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/hub/service.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/_helpers-daemonset.tpl (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/daemonset-continuous.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/daemonset-hook.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/job.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/image-puller/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/_README.txt (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/configmap.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/autohttps/service.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/netpol.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/secret.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/proxy/service.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/_scheduling-helpers.tpl (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/priorityclass.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-placeholder/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-placeholder/priorityclass.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-placeholder/statefulset.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/configmap.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/scheduling/user-scheduler/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{ => helm}/singleuser/netpol.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/_helpers.tpl (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/clusterrole.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/clusterrolebinding.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/nfs-server.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/podsecuritypolicy.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/role.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/rolebinding.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/serviceaccount.yaml (100%)
rename applications/nfsserver/deploy/templates/{ => helm}/storageclass.yaml (100%)
rename applications/sentry/deploy/templates/{ => helm}/redis.yaml (100%)
create mode 100644 deployment-configuration/compose/.helmignore
create mode 100644 deployment-configuration/compose/Chart.yaml
create mode 100644 deployment-configuration/compose/README.md
create mode 100644 deployment-configuration/compose/templates/auto-compose.yaml
create mode 100644 deployment-configuration/compose/values.yaml
create mode 100644 tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
diff --git a/applications/argo/deploy/templates/argo-sa.yaml b/applications/argo/deploy/templates/helm/argo-sa.yaml
similarity index 100%
rename from applications/argo/deploy/templates/argo-sa.yaml
rename to applications/argo/deploy/templates/helm/argo-sa.yaml
diff --git a/applications/events/deploy/templates/broker-config.yml b/applications/events/deploy/templates/helm/broker-config.yml
similarity index 100%
rename from applications/events/deploy/templates/broker-config.yml
rename to applications/events/deploy/templates/helm/broker-config.yml
diff --git a/applications/events/deploy/templates/configmap.yaml b/applications/events/deploy/templates/helm/configmap.yaml
similarity index 100%
rename from applications/events/deploy/templates/configmap.yaml
rename to applications/events/deploy/templates/helm/configmap.yaml
diff --git a/applications/events/deploy/templates/deployments.yml b/applications/events/deploy/templates/helm/deployments.yml
similarity index 100%
rename from applications/events/deploy/templates/deployments.yml
rename to applications/events/deploy/templates/helm/deployments.yml
diff --git a/applications/events/deploy/templates/roles.yml b/applications/events/deploy/templates/helm/roles.yml
similarity index 100%
rename from applications/events/deploy/templates/roles.yml
rename to applications/events/deploy/templates/helm/roles.yml
diff --git a/applications/events/deploy/templates/services.yml b/applications/events/deploy/templates/helm/services.yml
similarity index 100%
rename from applications/events/deploy/templates/services.yml
rename to applications/events/deploy/templates/helm/services.yml
diff --git a/applications/events/deploy/templates/zoo-config.yml b/applications/events/deploy/templates/helm/zoo-config.yml
similarity index 100%
rename from applications/events/deploy/templates/zoo-config.yml
rename to applications/events/deploy/templates/helm/zoo-config.yml
diff --git a/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl
rename to applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl
diff --git a/applications/jupyterhub/deploy/templates/_helpers-names.tpl b/applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/_helpers-names.tpl
rename to applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl
diff --git a/applications/jupyterhub/deploy/templates/_helpers.tpl b/applications/jupyterhub/deploy/templates/helm/_helpers.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/_helpers.tpl
rename to applications/jupyterhub/deploy/templates/helm/_helpers.tpl
diff --git a/applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl b/applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl
rename to applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl
diff --git a/applications/jupyterhub/deploy/templates/hub/configmap.yaml b/applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/configmap.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/deployment.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/netpol.yaml b/applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/netpol.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/pdb.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/pvc.yaml b/applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/pvc.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/rbac.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/secret.yaml b/applications/jupyterhub/deploy/templates/helm/hub/secret.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/secret.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/secret.yaml
diff --git a/applications/jupyterhub/deploy/templates/hub/service.yaml b/applications/jupyterhub/deploy/templates/helm/hub/service.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/hub/service.yaml
rename to applications/jupyterhub/deploy/templates/helm/hub/service.yaml
diff --git a/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl
rename to applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl
diff --git a/applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml
rename to applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml
diff --git a/applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml
rename to applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml
diff --git a/applications/jupyterhub/deploy/templates/image-puller/job.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/image-puller/job.yaml
rename to applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml
diff --git a/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/image-puller/rbac.yaml
rename to applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt
rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt
diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/deployment.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/netpol.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/netpol.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/pdb.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/secret.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/secret.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml
diff --git a/applications/jupyterhub/deploy/templates/proxy/service.yaml b/applications/jupyterhub/deploy/templates/helm/proxy/service.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/proxy/service.yaml
rename to applications/jupyterhub/deploy/templates/helm/proxy/service.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl b/applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl
rename to applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl
diff --git a/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml b/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml
rename to applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml b/applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/singleuser/netpol.yaml
rename to applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml
diff --git a/applications/nfsserver/deploy/templates/_helpers.tpl b/applications/nfsserver/deploy/templates/helm/_helpers.tpl
similarity index 100%
rename from applications/nfsserver/deploy/templates/_helpers.tpl
rename to applications/nfsserver/deploy/templates/helm/_helpers.tpl
diff --git a/applications/nfsserver/deploy/templates/clusterrole.yaml b/applications/nfsserver/deploy/templates/helm/clusterrole.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/clusterrole.yaml
rename to applications/nfsserver/deploy/templates/helm/clusterrole.yaml
diff --git a/applications/nfsserver/deploy/templates/clusterrolebinding.yaml b/applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/clusterrolebinding.yaml
rename to applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml
diff --git a/applications/nfsserver/deploy/templates/nfs-server.yaml b/applications/nfsserver/deploy/templates/helm/nfs-server.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/nfs-server.yaml
rename to applications/nfsserver/deploy/templates/helm/nfs-server.yaml
diff --git a/applications/nfsserver/deploy/templates/podsecuritypolicy.yaml b/applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/podsecuritypolicy.yaml
rename to applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml
diff --git a/applications/nfsserver/deploy/templates/role.yaml b/applications/nfsserver/deploy/templates/helm/role.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/role.yaml
rename to applications/nfsserver/deploy/templates/helm/role.yaml
diff --git a/applications/nfsserver/deploy/templates/rolebinding.yaml b/applications/nfsserver/deploy/templates/helm/rolebinding.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/rolebinding.yaml
rename to applications/nfsserver/deploy/templates/helm/rolebinding.yaml
diff --git a/applications/nfsserver/deploy/templates/serviceaccount.yaml b/applications/nfsserver/deploy/templates/helm/serviceaccount.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/serviceaccount.yaml
rename to applications/nfsserver/deploy/templates/helm/serviceaccount.yaml
diff --git a/applications/nfsserver/deploy/templates/storageclass.yaml b/applications/nfsserver/deploy/templates/helm/storageclass.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/storageclass.yaml
rename to applications/nfsserver/deploy/templates/helm/storageclass.yaml
diff --git a/applications/sentry/deploy/templates/redis.yaml b/applications/sentry/deploy/templates/helm/redis.yaml
similarity index 100%
rename from applications/sentry/deploy/templates/redis.yaml
rename to applications/sentry/deploy/templates/helm/redis.yaml
diff --git a/deployment-configuration/compose/.helmignore b/deployment-configuration/compose/.helmignore
new file mode 100644
index 000000000..50af03172
--- /dev/null
+++ b/deployment-configuration/compose/.helmignore
@@ -0,0 +1,22 @@
+# Patterns to ignore when building packages.
+# This supports shell glob matching, relative path matching, and
+# negation (prefixed with !). Only one pattern per line.
+.DS_Store
+# Common VCS dirs
+.git/
+.gitignore
+.bzr/
+.bzrignore
+.hg/
+.hgignore
+.svn/
+# Common backup files
+*.swp
+*.bak
+*.tmp
+*~
+# Various IDEs
+.project
+.idea/
+*.tmproj
+.vscode/
diff --git a/deployment-configuration/compose/Chart.yaml b/deployment-configuration/compose/Chart.yaml
new file mode 100644
index 000000000..f294c3e78
--- /dev/null
+++ b/deployment-configuration/compose/Chart.yaml
@@ -0,0 +1,10 @@
+apiVersion: v1
+appVersion: "0.0.1"
+description: CloudHarness Helm Chart
+name: cloudharness
+version: 0.0.1
+maintainers:
+ - name: Filippo Ledda
+ email: filippo@metacell.us
+ - name: Zoran Sinnema
+ email: zoran@metacell.us
diff --git a/deployment-configuration/compose/README.md b/deployment-configuration/compose/README.md
new file mode 100644
index 000000000..abeab69d3
--- /dev/null
+++ b/deployment-configuration/compose/README.md
@@ -0,0 +1,4 @@
+# CloudHarness Helm chart: deploy CloudHarness to k8s
+
+Helm is used to define the CloudHarness deployment on Kubernetes. For further information about Helm, see https://helm.sh.
+
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
new file mode 100644
index 000000000..5b4893baa
--- /dev/null
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -0,0 +1,103 @@
+version: '3.7'
+
+services:
+{{- range $app_name, $app_config := .Values.apps }}
+ {{- if has $app_name (list "argo" "nfsserver" "workflows" "events" ) }} {{- /* We deactivate generation for some services */}}
+ {{- continue }}
+ {{- end}}
+ {{ $deployment := $app_config.harness.deployment }}
+ {{ $app_name }}:
+ {{- with $app_config.domain }}
+ domainname: {{ . }}
+ {{- end }}
+ networks:
+ - ch
+ {{- with $app_config.image }}
+ image: {{ . }}
+ {{- end }}
+ {{- with $app_config.harness.service.port }}
+ ports:
+ - "{{ . }}:{{ $app_config.harness.deployment.port }}"
+ {{- end}}
+ deploy:
+ mode: "replicated"
+ replicas: {{ $deployment.replicas | default 1 }}
+ resources:
+ limits:
+ cpus: {{ $deployment.resources.limits.cpu | default "50m" }}
+ memory: {{ trimSuffix "i" $deployment.resources.limits.memory | default "64M" }}
+ reservations:
+ cpus: {{ $deployment.resources.requests.cpu | default "25m" }}
+ memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }}
+ environment:
+ - CH_CURRENT_APP_NAME={{ $app_name | quote }}
+
+ {{- range $.Values.env }}
+ - {{ .name }}={{ .value | quote }}
+ {{- end }}
+ {{- /*{{- range $.Values.env }}
+ - {{ .name }}={{ .value | quote }}
+ {{- end }} */}}
+ {{- range $app_config.harness.env }}
+ - {{ .name }}={{ .value | quote }}
+ {{- end }}
+ {{- with $app_config.harness.dependencies.soft }}
+ # links:
+ # {{- range . }}
+ # - {{ . }}
+ # {{- end }}
+ {{- end }}
+ {{- with $app_config.harness.dependencies.hard }}
+ depends_on:
+ {{- range . }}
+ - {{ . }}
+ {{- end }}
+ {{- end }}
+ {{- if or $deployment.volume $app_config.harness.resources }}
+ volumes:
+ {{- with $deployment.volume }}
+ - type: volume
+ source: {{ .name }}
+ target: {{ .mountpath }}
+ {{- end}}
+ {{- with $app_config.harness.resources }}
+ {{- range .}}
+ - type: bind
+ source: compose/resources/{{ $app_name }}/{{.src }}
+ target: {{ .dst }}
+ {{- end }}
+ {{- end}}
+ {{- end }}
+{{- end }}
+
+ traefik:
+ image: "traefik:v2.2"
+ container_name: "traefik"
+ networks:
+ - ch
+ command:
+ - "--log.level=INFO"
+ - "--api.insecure=true"
+ - "--providers.docker=true"
+ - "--providers.docker.exposedbydefault=false"
+ - "--entrypoints.web.address=:80"
+ - "--entrypoints.websecure.address=:443"
+ - "--providers.file.directory=/etc/traefik/dynamic_conf"
+ ports:
+ - "80:80"
+ - "443:443"
+ volumes:
+ - "/var/run/docker.sock:/var/run/docker.sock:ro"
+ - "./certs/:/certs/:ro"
+ - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
+
+networks:
+ ch:
+ name: ch_network
+
+volumes: # this inclusion needs to be conditional
+{{- range $app_name, $app_config := .Values.apps }}
+ {{- with $app_config.harness.deployment.volume }}
+ {{ .name }}:
+ {{- end }}
+{{- end }}
\ No newline at end of file
diff --git a/deployment-configuration/compose/values.yaml b/deployment-configuration/compose/values.yaml
new file mode 100644
index 000000000..434dcac7c
--- /dev/null
+++ b/deployment-configuration/compose/values.yaml
@@ -0,0 +1,79 @@
+# -- If set to true, local DNS mapping is added to pods.
+local: false
+# -- Enables/disables Gatekeeper.
+secured_gatekeepers: true
+# -- The root domain.
+domain: ${{DOMAIN}}
+# -- The K8s namespace.
+namespace: ch
+# -- Name of mainapp, routes incoming traffic of root `domaim` to this app.
+mainapp: accounts
+registry:
+ # -- The docker registry.
+ name: "localhost:5000"
+ # -- Optional secret used for pulling from docker registry.
+ secret:
+# -- Docker tag used to pull images.
+tag: latest
+# -- List of applications.
+# @default -- Will be filled automatically.
+apps: {}
+env:
+ # -- Cloud Harness version
+ - name: CH_VERSION
+ value: 0.0.1
+ # -- Cloud harness chart version
+ - name: CH_CHART_VERSION
+ value: 0.0.1
+privenv:
+ # -- Defines a secret as private environment variable that is injected in containers.
+ - name: CH_SECRET
+ value: In God we trust; all others must bring data. ― W. Edwards Deming
+ingress:
+ # -- Flag to enable/disalbe ingress controller.
+ enabled: true
+ # -- K8s Name of ingress.
+ name: cloudharness-ingress
+ # -- Enables/disables SSL redirect.
+ ssl_redirect: true
+ letsencrypt:
+ # -- Email for letsencrypt.
+ email: filippo@metacell.us
+backup:
+ # -- Flag to enable/disable backups.
+ active: false
+ # -- Number of days to keep backups.
+ keep_days: "7"
+ # -- Number of weeks to keep backups.
+ keep_weeks: "4"
+ # -- Number of months to keep backups.
+ keep_months: "6"
+ # -- Schedule as cronjob expression.
+ schedule: "*/5 * * * *"
+ # -- The file suffix added to backup files.
+ suffix: ".gz"
+ # -- The volume size for backups (all backups share the same volume)
+ volumesize: "2Gi"
+ # -- Target directory of backups, the mount point of the persistent volume.
+ dir: "/backups"
+ resources:
+ requests:
+ # -- K8s memory resource definition.
+ memory: "32Mi"
+ # -- K8s cpu resource definition.
+ cpu: "25m"
+ limits:
+ # -- K8s memory resource definition.
+ memory: "64Mi"
+ # -- K8s cpu resource definition.
+ cpu: "50m"
+proxy:
+ timeout:
+ # -- Timeout for proxy connections in seconds.
+ send: 60
+ # -- Timeout for proxy responses in seconds.
+ read: 60
+ keepalive: 60
+ payload:
+ # -- Maximum size of payload in MB
+ max: 250
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
new file mode 100644
index 000000000..39ff0272e
--- /dev/null
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -0,0 +1,753 @@
+"""
+Utilities to create a helm chart from a CloudHarness directory structure
+"""
+import yaml
+import os
+import shutil
+import logging
+from hashlib import sha1
+import subprocess
+from functools import cache
+import tarfile
+from docker import from_env as DockerClient
+from pathlib import Path
+
+
+from . import HERE, CH_ROOT
+from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
+ DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH
+from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
+ get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \
+ find_dockerfiles_paths
+
+from .models import HarnessMainConfig
+
+KEY_HARNESS = 'harness'
+KEY_SERVICE = 'service'
+KEY_DATABASE = 'database'
+KEY_DEPLOYMENT = 'deployment'
+KEY_APPS = 'apps'
+KEY_TASK_IMAGES = 'task-images'
+KEY_TEST_IMAGES = 'test-images'
+
+DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
+
+
+def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
+ namespace=None, templates_path=HELM_PATH) -> HarnessMainConfig:
+ if (type(env)) == str:
+ env = [env]
+ return CloudHarnessHelm(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured,
+ output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env,
+ namespace=namespace, templates_path=templates_path).process_values()
+
+
+class CloudHarnessHelm:
+ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
+ namespace=None, templates_path=HELM_PATH):
+ assert domain, 'A domain must be specified'
+ self.root_paths = [Path(r) for r in root_paths]
+ self.tag = tag
+ if not registry.endswith('/'):
+ self.registry = f'{registry}/'
+ else:
+ self.registry = registry
+ self.local = local
+ self.domain = domain
+ self.exclude = exclude
+ self.secured = secured
+ self.output_path = Path(output_path)
+ self.include = include
+ self.registry_secret = registry_secret
+ self.tls = tls
+ self.env = env
+ self.namespace = namespace
+
+ self.templates_path = templates_path
+ self.dest_deployment_path = self.output_path / templates_path
+ self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml'
+ self.__init_deployment()
+
+ self.static_images = set()
+ self.base_images = {}
+ self.all_images = {}
+
+ def __init_deployment(self):
+ """
+ Create the base helm chart
+ """
+ if self.dest_deployment_path.exists():
+ shutil.rmtree(self.dest_deployment_path)
+ # Initialize with default
+ copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path)
+
+ # Override for every cloudharness scaffolding
+ for root_path in self.root_paths:
+ copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
+ base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path)
+ collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
+ dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
+
+ def __adjust_missing_values(self, helm_values):
+ if 'name' not in helm_values:
+ with open(self.helm_chart_path) as f:
+ chart_idx_content = yaml.safe_load(f)
+ helm_values['name'] = chart_idx_content['name'].lower()
+
+ def process_values(self) -> HarnessMainConfig:
+ """
+ Creates values file for the helm chart
+ """
+ helm_values = self.__get_default_helm_values()
+
+ self.__adjust_missing_values(helm_values)
+
+ helm_values = self.__merge_base_helm_values(helm_values)
+
+ helm_values[KEY_APPS] = {}
+
+ base_image_name = helm_values['name']
+
+ helm_values[KEY_TASK_IMAGES] = {}
+
+ self.__init_base_images(base_image_name)
+ self.__init_static_images(base_image_name)
+ helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name)
+
+ self.__process_applications(helm_values, base_image_name)
+
+ # self.create_tls_certificate(helm_values)
+
+ values, include = self.__finish_helm_values(values=helm_values)
+
+ # Adjust dependencies from static (common) images
+ self.__assign_static_build_dependencies(helm_values)
+
+ for root_path in self.root_paths:
+ collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
+ dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
+
+ # Save values file for manual helm chart
+ merged_values = merge_to_yaml_file(helm_values, self.dest_deployment_path / VALUES_MANUAL_PATH)
+ if self.namespace:
+ merge_to_yaml_file({'metadata': {'namespace': self.namespace},
+ 'name': helm_values['name']}, self.helm_chart_path)
+ validate_helm_values(merged_values)
+ return HarnessMainConfig.from_dict(merged_values)
+
+ def __process_applications(self, helm_values, base_image_name):
+ for root_path in self.root_paths:
+ app_values = init_app_values(
+ root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
+ helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ app_values)
+
+ app_base_path = root_path / APPS_PATH
+ app_values = self.collect_app_values(
+ f"{app_base_path}", base_image_name=base_image_name)
+ helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ app_values)
+
+ def collect_app_values(self, app_base_path, base_image_name=None):
+ values = {}
+
+ for app_path in get_sub_paths(app_base_path):
+ app_name = app_name_from_path(
+ os.path.relpath(app_path, app_base_path))
+
+ if app_name in self.exclude:
+ continue
+ app_key = app_name.replace('-', '_')
+
+ app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
+
+ values[app_key] = dict_merge(
+ values[app_key], app_values) if app_key in values else app_values
+
+ return values
+
+ def __init_static_images(self, base_image_name):
+ for static_img_dockerfile in self.static_images:
+ img_name = image_name_from_dockerfile_path(os.path.basename(
+ static_img_dockerfile), base_name=base_image_name)
+ self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
+ img_name, build_context_path=static_img_dockerfile)
+
+ def __assign_static_build_dependencies(self, helm_values):
+ for static_img_dockerfile in self.static_images:
+ key = os.path.basename(static_img_dockerfile)
+ if key in helm_values[KEY_TASK_IMAGES]:
+ dependencies = guess_build_dependencies_from_dockerfile(
+ static_img_dockerfile)
+ for dep in dependencies:
+ if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
+ helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
+
+ for image_name in list(helm_values[KEY_TASK_IMAGES].keys()):
+ if image_name in self.exclude:
+ del helm_values[KEY_TASK_IMAGES][image_name]
+
+ def __init_base_images(self, base_image_name):
+
+ for root_path in self.root_paths:
+ for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
+ img_name = image_name_from_dockerfile_path(
+ os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ img_name, build_context_path=root_path)
+
+ self.static_images.update(find_dockerfiles_paths(
+ os.path.join(root_path, STATIC_IMAGES_PATH)))
+ return self.base_images
+
+ def __init_test_images(self, base_image_name):
+ test_images = {}
+ for root_path in self.root_paths:
+ for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
+ img_name = image_name_from_dockerfile_path(
+ os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ img_name, build_context_path=base_img_dockerfile)
+
+ return test_images
+
+
+ def __find_static_dockerfile_paths(self, root_path):
+ return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
+
+ def __merge_base_helm_values(self, helm_values):
+ # Override for every cloudharness scaffolding
+ for root_path in self.root_paths:
+ helm_values = dict_merge(
+ helm_values,
+ collect_helm_values(root_path, env=self.env)
+ )
+
+ return helm_values
+
+ def __get_default_helm_values(self):
+ helm_values = get_template(os.path.join(
+ CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml'))
+ helm_values = dict_merge(helm_values,
+ collect_helm_values(CH_ROOT, env=self.env))
+
+ return helm_values
+
+ def create_tls_certificate(self, helm_values):
+ if not self.tls:
+ helm_values['tls'] = None
+ return
+ if not self.local:
+ return
+ helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
+
+ bootstrap_file = 'bootstrap.sh'
+ certs_parent_folder_path = self.output_path / 'helm' / 'resources'
+ certs_folder_path = certs_parent_folder_path / 'certs'
+
+ # if os.path.exists(os.path.join(certs_folder_path)):
+ if certs_folder_path.exists():
+ # don't overwrite the certificate if it exists
+ return
+
+ try:
+ client = DockerClient()
+ client.ping()
+ except:
+ raise ConnectionRefusedError(
+ '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
+
+ # Create CA and sign cert for domain
+ container = client.containers.run(image='frapsoft/openssl',
+ command=f'sleep 60',
+ entrypoint="",
+ detach=True,
+ environment=[
+ f"DOMAIN={self.domain}"],
+ )
+
+ container.exec_run('mkdir -p /mnt/vol1')
+ container.exec_run('mkdir -p /mnt/certs')
+
+ # copy bootstrap file
+ cur_dir = os.getcwd()
+ os.chdir(os.path.join(HERE, 'scripts'))
+ tar = tarfile.open(bootstrap_file + '.tar', mode='w')
+ try:
+ tar.add(bootstrap_file)
+ finally:
+ tar.close()
+ data = open(bootstrap_file + '.tar', 'rb').read()
+ container.put_archive('/mnt/vol1', data)
+ os.chdir(cur_dir)
+ container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
+
+ # exec bootstrap file
+ container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
+
+ # retrieve the certs from the container
+ bits, stat = container.get_archive('/mnt/certs')
+ if not certs_folder_path.exists():
+ certs_folder_path.mkdir(parents=True)
+ with open(certs_parent_folder_path / 'certs.tar', 'wb') as f:
+ for chunk in bits:
+ f.write(chunk)
+ cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar')
+ cf.extractall(path=certs_parent_folder_path)
+
+ logs = container.logs()
+ logging.info(f'openssl container logs: {logs}')
+
+ # stop the container
+ container.kill()
+
+ logging.info("Created certificates for local deployment")
+
+ def __finish_helm_values(self, values):
+ """
+ Sets default overridden values
+ """
+ if self.registry:
+ logging.info(f"Registry set: {self.registry}")
+ if self.local:
+ values['registry']['secret'] = ''
+ if self.registry_secret:
+ logging.info(f"Registry secret set")
+ values['registry']['name'] = self.registry
+ values['registry']['secret'] = self.registry_secret
+ values['tag'] = self.tag
+ if self.namespace:
+ values['namespace'] = self.namespace
+ values['secured_gatekeepers'] = self.secured
+ values['ingress']['ssl_redirect'] = values['ingress']['ssl_redirect'] and self.tls
+ values['tls'] = self.tls
+ if self.domain:
+ values['domain'] = self.domain
+
+ values['local'] = self.local
+ if self.local:
+ try:
+ values['localIp'] = get_cluster_ip()
+ except subprocess.TimeoutExpired:
+ logging.warning("Minikube not available")
+ except:
+ logging.warning("Kubectl not available")
+
+ apps = values[KEY_APPS]
+
+ for app_key in apps:
+ v = apps[app_key]
+
+ values_from_legacy(v)
+ assert KEY_HARNESS in v, 'Default app value loading is broken'
+
+ app_name = app_key.replace('_', '-')
+ harness = v[KEY_HARNESS]
+ harness['name'] = app_name
+
+ if not harness[KEY_SERVICE].get('name', None):
+ harness[KEY_SERVICE]['name'] = app_name
+ if not harness[KEY_DEPLOYMENT].get('name', None):
+ harness[KEY_DEPLOYMENT]['name'] = app_name
+
+ if harness[KEY_DATABASE] and not harness[KEY_DATABASE].get('name', None):
+ harness[KEY_DATABASE]['name'] = app_name.strip() + '-db'
+
+ self.__clear_unused_db_configuration(harness)
+ values_set_legacy(v)
+
+ if self.include:
+ self.include = get_included_with_dependencies(
+ values, set(self.include))
+ logging.info('Selecting included applications')
+
+ for v in [v for v in apps]:
+ if apps[v]['harness']['name'] not in self.include:
+ del apps[v]
+ continue
+ values[KEY_TASK_IMAGES].update(apps[v][KEY_TASK_IMAGES])
+ # Create environment variables
+ else:
+ for v in [v for v in apps]:
+ values[KEY_TASK_IMAGES].update(apps[v][KEY_TASK_IMAGES])
+ create_env_variables(values)
+ return values, self.include
+
+ def __clear_unused_db_configuration(self, harness_config):
+ database_config = harness_config[KEY_DATABASE]
+ database_type = database_config.get('type', None)
+ if database_type is None:
+ del harness_config[KEY_DATABASE]
+ return
+ db_specific_keys = [k for k, v in database_config.items()
+ if isinstance(v, dict) and 'image' in v and 'ports' in v]
+ for db in db_specific_keys:
+ if database_type != db:
+ del database_config[db]
+
+ def image_tag(self, image_name, build_context_path=None, dependencies=()):
+ tag = self.tag
+ if tag is None and not self.local:
+ logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
+ ignore_path = os.path.join(build_context_path, '.dockerignore')
+ ignore = set(DEFAULT_IGNORE)
+ if os.path.exists(ignore_path):
+ with open(ignore_path) as f:
+ ignore = ignore.union({line.strip() for line in f})
+ logging.info(f"Ignoring {ignore}")
+ tag = generate_tag_from_content(build_context_path, ignore)
+ logging.info(f"Content hash: {tag}")
+ dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
+ tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
+ logging.info(f"Generated tag: {tag}")
+ app_name = image_name.split("/")[-1] # the image name can have a prefix
+ self.all_images[app_name] = tag
+ return self.registry + image_name + (f':{tag}' if tag else '')
+
+ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
+ logging.info('Generating values script for ' + app_name)
+
+ specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml')
+ if os.path.exists(specific_template_path):
+ logging.info("Specific values template found: " +
+ specific_template_path)
+ values = get_template(specific_template_path)
+ else:
+ values = {}
+
+ for e in self.env:
+ specific_template_path = os.path.join(
+ app_path, 'deploy', f'values-{e}.yaml')
+ if os.path.exists(specific_template_path):
+ logging.info(
+ "Specific environment values template found: " + specific_template_path)
+ with open(specific_template_path) as f:
+ values_env_specific = yaml.safe_load(f)
+ values = dict_merge(values, values_env_specific)
+
+ if KEY_HARNESS in values and 'name' in values[KEY_HARNESS] and values[KEY_HARNESS]['name']:
+ logging.warning('Name is automatically set in applications: name %s will be ignored',
+ values[KEY_HARNESS]['name'])
+
+ image_paths = [path for path in find_dockerfiles_paths(
+ app_path) if 'tasks/' not in path and 'subapps' not in path]
+ if len(image_paths) > 1:
+ logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name,
+ image_paths[0])
+ if KEY_HARNESS in values and 'dependencies' in values[KEY_HARNESS] and 'build' in values[KEY_HARNESS]['dependencies']:
+ build_dependencies = values[KEY_HARNESS]['dependencies']['build']
+ else:
+ build_dependencies = []
+
+ if len(image_paths) > 0:
+ image_name = image_name_from_dockerfile_path(os.path.relpath(
+ image_paths[0], os.path.dirname(app_path)), base_image_name)
+
+ values['image'] = self.image_tag(
+ image_name, build_context_path=app_path, dependencies=build_dependencies)
+ elif KEY_HARNESS in values and not values[KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('image', None) and values[
+ KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('auto', False):
+ raise Exception(f"At least one Dockerfile must be specified on application {app_name}. "
+ f"Specify harness.deployment.image value if you intend to use a prebuilt image.")
+
+ task_images_paths = [path for path in find_dockerfiles_paths(
+ app_path) if 'tasks/' in path]
+ values[KEY_TASK_IMAGES] = values.get(KEY_TASK_IMAGES, {})
+
+ if build_dependencies:
+ for build_dependency in values[KEY_HARNESS]['dependencies']['build']:
+ if build_dependency in self.base_images:
+ values[KEY_TASK_IMAGES][build_dependency] = self.base_images[build_dependency]
+
+ for task_path in task_images_paths:
+ task_name = app_name_from_path(os.path.relpath(
+ task_path, os.path.dirname(app_path)))
+ img_name = image_name_from_dockerfile_path(task_name, base_image_name)
+
+ values[KEY_TASK_IMAGES][task_name] = self.image_tag(
+ img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys())
+
+ return values
+
+
+def get_included_with_dependencies(values, include):
+ app_values = values['apps'].values()
+ directly_included = [app for app in app_values if any(
+ inc == app[KEY_HARNESS]['name'] for inc in include)]
+
+ dependent = set(include)
+ for app in directly_included:
+ if app['harness']['dependencies'].get('hard', None):
+ dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
+ if app['harness']['dependencies'].get('soft', None):
+ dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
+ if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
+ dependent.add('accounts')
+ if len(dependent) == len(include):
+ return dependent
+ return get_included_with_dependencies(values, dependent)
+
+
+def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
+ pass
+
+
+def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None):
+ """
+ Searches recursively for helm templates inside the applications and collects the templates in the destination
+
+ :param search_root:
+ :param dest_helm_chart_path: collected helm templates destination folder
+ :param exclude:
+ :return:
+ """
+ app_base_path = os.path.join(search_root, APPS_PATH)
+
+ for app_path in get_sub_paths(app_base_path):
+ app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+ if app_name in exclude or (include and not any(inc in app_name for inc in include)):
+ continue
+ template_dir = os.path.join(app_path, 'deploy', 'templates', templates_path)
+ if os.path.exists(template_dir):
+ dest_dir = os.path.join(
+ dest_helm_chart_path, 'templates', app_name)
+
+ logging.info(
+ "Collecting templates for application %s to %s", app_name, dest_dir)
+ if os.path.exists(dest_dir):
+ logging.warning(
+ "Merging/overriding all files in directory %s", dest_dir)
+ merge_configuration_directories(template_dir, dest_dir)
+ else:
+ shutil.copytree(template_dir, dest_dir)
+ resources_dir = os.path.join(app_path, 'deploy/resources')
+ if os.path.exists(resources_dir):
+ dest_dir = os.path.join(
+ dest_helm_chart_path, 'resources', app_name)
+
+ logging.info(
+ "Collecting resources for application %s to %s", app_name, dest_dir)
+
+ merge_configuration_directories(resources_dir, dest_dir)
+
+ subchart_dir = os.path.join(app_path, 'deploy/charts')
+ if os.path.exists(subchart_dir):
+ dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name)
+
+ logging.info(
+ "Collecting templates for application %s to %s", app_name, dest_dir)
+ if os.path.exists(dest_dir):
+ logging.warning(
+ "Merging/overriding all files in directory %s", dest_dir)
+ merge_configuration_directories(subchart_dir, dest_dir)
+ else:
+ shutil.copytree(subchart_dir, dest_dir)
+
+
+def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
+ if not os.path.exists(base_helm_chart):
+ return
+ if os.path.exists(dest_helm_chart_path):
+ logging.info("Merging/overriding all files in directory %s",
+ dest_helm_chart_path)
+ merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
+ else:
+ logging.info("Copying base deployment chart from %s to %s",
+ base_helm_chart, dest_helm_chart_path)
+ shutil.copytree(base_helm_chart, dest_helm_chart_path)
+
+
+def collect_helm_values(deployment_root, env=()):
+ """
+ Creates helm values from a cloudharness deployment scaffolding
+ """
+
+ values_template_path = os.path.join(
+ deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml')
+
+ values = get_template(values_template_path)
+
+ for e in env:
+ specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
+ f'values-template-{e}.yaml')
+ if os.path.exists(specific_template_path):
+ logging.info(
+ "Specific environment values template found: " + specific_template_path)
+ with open(specific_template_path) as f:
+ values_env_specific = yaml.safe_load(f)
+ values = dict_merge(values, values_env_specific)
+ return values
+
+
+def init_app_values(deployment_root, exclude, values=None):
+ values = values if values is not None else {}
+ app_base_path = os.path.join(deployment_root, APPS_PATH)
+ overridden_template_path = os.path.join(
+ deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+ default_values_path = os.path.join(
+ CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+
+ for app_path in get_sub_paths(app_base_path):
+
+ app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+
+ if app_name in exclude:
+ continue
+ app_key = app_name.replace('-', '_')
+ if app_key not in values:
+ default_values = get_template(default_values_path)
+ values[app_key] = default_values
+ overridden_defaults = get_template(overridden_template_path)
+ values[app_key] = dict_merge(values[app_key], overridden_defaults)
+
+ return values
+
+
+def values_from_legacy(values):
+ if KEY_HARNESS not in values:
+ values[KEY_HARNESS] = {}
+ harness = values[KEY_HARNESS]
+ if KEY_SERVICE not in harness:
+ harness[KEY_SERVICE] = {}
+ if KEY_DEPLOYMENT not in harness:
+ harness[KEY_DEPLOYMENT] = {}
+ if KEY_DATABASE not in harness:
+ harness[KEY_DATABASE] = {}
+
+ if 'subdomain' in values:
+ harness['subdomain'] = values['subdomain']
+ if 'autodeploy' in values:
+ harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
+ if 'autoservice' in values:
+ harness[KEY_SERVICE]['auto'] = values['autoservice']
+ if 'secureme' in values:
+ harness['secured'] = values['secureme']
+ if 'resources' in values:
+ harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
+ if 'replicas' in values:
+ harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
+ if 'image' in values:
+ harness[KEY_DEPLOYMENT]['image'] = values['image']
+ if 'port' in values:
+ harness[KEY_DEPLOYMENT]['port'] = values['port']
+ harness[KEY_SERVICE]['port'] = values['port']
+
+
+def values_set_legacy(values):
+ harness = values[KEY_HARNESS]
+ if 'image' in harness[KEY_DEPLOYMENT]:
+ values['image'] = harness[KEY_DEPLOYMENT]['image']
+
+ values['name'] = harness['name']
+ if harness[KEY_DEPLOYMENT].get('port', None):
+ values['port'] = harness[KEY_DEPLOYMENT]['port']
+ if 'resources' in harness[KEY_DEPLOYMENT]:
+ values['resources'] = harness[KEY_DEPLOYMENT]['resources']
+
+
+def generate_tag_from_content(content_path, ignore=()):
+ from dirhash import dirhash
+ return dirhash(content_path, 'sha1', ignore=ignore)
+
+
+def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
+ if isinstance(values, dict):
+ newenvs = list(envs)
+ for key, value in values.items():
+ v = extract_env_variables_from_values(
+ value, envs, f"{prefix}_{key}".replace('-', '_').upper())
+ if key in ('name', 'port', 'subdomain'):
+ newenvs.extend(v)
+ return newenvs
+ else:
+ return [env_variable(prefix, values)]
+
+
+def create_env_variables(values):
+ for app_name, value in values[KEY_APPS].items():
+ if KEY_HARNESS in value:
+ values['env'].extend(extract_env_variables_from_values(
+ value[KEY_HARNESS], prefix='CH_' + app_name))
+ values['env'].append(env_variable('CH_DOMAIN', values['domain']))
+ values['env'].append(env_variable(
+ 'CH_IMAGE_REGISTRY', values['registry']['name']))
+ values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
+
+
+def hosts_info(values):
+ domain = values['domain']
+ namespace = values['namespace']
+ subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
+ KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
+ KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
+ try:
+ ip = get_cluster_ip()
+ except:
+ logging.warning('Cannot get cluster ip')
+ return
+ logging.info(
+ "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
+
+ deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
+ for app in values[KEY_APPS].values() if KEY_HARNESS in app)
+
+ logging.info(
+ "\nTo run locally some apps, also those references may be needed")
+ for appname in values[KEY_APPS]:
+ app = values[KEY_APPS][appname]['harness']
+ if 'deployment' not in app:
+ continue
+ print(
+ "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
+ app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
+
+ print(
+ f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
+
+
+class ValuesValidationException(Exception):
+ pass
+
+
+def validate_helm_values(values):
+ validate_dependencies(values)
+
+
+def validate_dependencies(values):
+ all_apps = {a for a in values["apps"]}
+ for app in all_apps:
+ app_values = values["apps"][app]
+ if 'dependencies' in app_values[KEY_HARNESS]:
+ soft_dependencies = {
+ d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
+ not_found = {d for d in soft_dependencies if d not in all_apps}
+ if not_found:
+ logging.warning(
+ f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
+ hard_dependencies = {
+ d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
+ not_found = {d for d in hard_dependencies if d not in all_apps}
+ if not_found:
+ raise ValuesValidationException(
+ f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
+
+ build_dependencies = {
+ d for d in app_values[KEY_HARNESS]['dependencies']['build']}
+
+ not_found = {
+ d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
+ not_found = {d for d in not_found if d not in all_apps}
+ if not_found:
+ raise ValuesValidationException(
+ f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
+
+ if 'use_services' in app_values[KEY_HARNESS]:
+ service_dependencies = {d['name'].replace(
+ "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
+
+ not_found = {d for d in service_dependencies if d not in all_apps}
+ if not_found:
+ raise ValuesValidationException(
+ f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index 4c75a9092..9bd43b8c3 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -212,7 +212,7 @@ def __init_base_images(self, base_image_name):
self.static_images.update(find_dockerfiles_paths(
os.path.join(root_path, STATIC_IMAGES_PATH)))
return self.base_images
-
+
def __init_test_images(self, base_image_name):
test_images = {}
for root_path in self.root_paths:
@@ -224,7 +224,7 @@ def __init_test_images(self, base_image_name):
return test_images
-
+
def __find_static_dockerfile_paths(self, root_path):
return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
@@ -417,7 +417,7 @@ def image_tag(self, image_name, build_context_path=None, dependencies=()):
app_name = image_name.split("/")[-1] # the image name can have a prefix
self.all_images[app_name] = tag
return self.registry + image_name + (f':{tag}' if tag else '')
-
+
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
@@ -456,7 +456,7 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
if len(image_paths) > 0:
image_name = image_name_from_dockerfile_path(os.path.relpath(
image_paths[0], os.path.dirname(app_path)), base_image_name)
-
+
values['image'] = self.image_tag(
image_name, build_context_path=app_path, dependencies=build_dependencies)
elif KEY_HARNESS in values and not values[KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('image', None) and values[
@@ -521,7 +521,7 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), i
app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
if app_name in exclude or (include and not any(inc in app_name for inc in include)):
continue
- template_dir = os.path.join(app_path, 'deploy/templates')
+ template_dir = os.path.join(app_path, 'deploy', 'templates', HELM_PATH)
if os.path.exists(template_dir):
dest_dir = os.path.join(
dest_helm_chart_path, 'templates', app_name)
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index a9cecabbf..d8aaebda2 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -4,6 +4,7 @@ import logging
import sys
import os
+from ch_cli_tools.dockercompose import create_docker_compose_configuration
from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy
from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration
from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file
@@ -61,6 +62,8 @@ if __name__ == "__main__":
help=f'Do not generate ci/cd files')
parser.add_argument('-we', '--write-env', dest='write_env', action="store_const", default=None, const=True,
help=f'Write build env to .env file in {DEPLOYMENT_PATH}')
+ parser.add_argument('--docker-compose', dest='docker_compose', action="store_true",
+ help='Generate docker-compose.yaml and dedicated Skaffold configuration')
args, unknown = parser.parse_known_args(sys.argv[1:])
@@ -81,7 +84,24 @@ if __name__ == "__main__":
merge_app_directories(root_paths, destination=args.merge)
root_paths = [args.merge]
- helm_values = create_helm_chart(
+ # helm_values = create_helm_chart(
+ # root_paths,
+ # tag=args.tag,
+ # registry=args.registry,
+ # domain=args.domain,
+ # local=args.local,
+ # secured=not args.unsecured,
+ # output_path=args.output_path,
+ # exclude=args.exclude,
+ # include=args.include,
+ # registry_secret=args.registry_secret,
+ # tls=not args.no_tls,
+ # env=envs,
+ # namespace=args.namespace
+ # )
+
+ if not args.docker_compose:
+ helm_values = create_helm_chart(
root_paths,
tag=args.tag,
registry=args.registry,
@@ -96,6 +116,23 @@ if __name__ == "__main__":
env=envs,
namespace=args.namespace
)
+ else:
+ helm_values = create_docker_compose_configuration(
+ root_paths,
+ tag=args.tag,
+ registry=args.registry,
+ domain=args.domain,
+ local=args.local,
+ secured=not args.unsecured,
+ output_path=args.output_path,
+ exclude=args.exclude,
+ include=args.include,
+ registry_secret=args.registry_secret,
+ tls=not args.no_tls,
+ env=envs,
+ namespace=args.namespace,
+ templates_path="compose",
+ )
merged_root_paths = preprocess_build_overrides(
root_paths=root_paths, helm_values=helm_values)
@@ -108,7 +145,7 @@ if __name__ == "__main__":
envs=envs,
base_image_name=helm_values['name'],
helm_values=helm_values)
-
+
if args.write_env:
write_env_file(helm_values, os.path.join(root_paths[-1], DEPLOYMENT_PATH, ".env"))
From 6f306a18a2f07a6ba78501946d9be3887244b162 Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 7 Feb 2024 09:24:29 -0600
Subject: [PATCH 02/94] Squashed commit of the following:
commit c698bbadf4f5cf41a59818d3738258fb29919249
Author: aranega
Date: Wed Feb 7 08:55:45 2024 -0600
CH-100 Add second path using pathlib
commit 0422bfe9860f272354c1faadd851d37b4976650a
Author: aranega
Date: Wed Feb 7 07:33:43 2024 -0600
CH-100 Add first port to pathlib
---
.../ch_cli_tools/dockercompose.py | 92 +++++++++----------
tools/deployment-cli-tools/harness-deployment | 1 -
2 files changed, 45 insertions(+), 48 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 39ff0272e..06bf6d230 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -31,16 +31,16 @@
KEY_TEST_IMAGES = 'test-images'
DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
-
+COMPOSE = 'compose'
def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- namespace=None, templates_path=HELM_PATH) -> HarnessMainConfig:
+ namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
env = [env]
return CloudHarnessHelm(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured,
output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env,
- namespace=namespace, templates_path=templates_path).process_values()
+ namespace=namespace, templates_path=COMPOSE).process_values()
class CloudHarnessHelm:
@@ -146,16 +146,15 @@ def __process_applications(self, helm_values, base_image_name):
app_base_path = root_path / APPS_PATH
app_values = self.collect_app_values(
- f"{app_base_path}", base_image_name=base_image_name)
+ app_base_path, base_image_name=base_image_name)
helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
app_values)
def collect_app_values(self, app_base_path, base_image_name=None):
values = {}
- for app_path in get_sub_paths(app_base_path):
- app_name = app_name_from_path(
- os.path.relpath(app_path, app_base_path))
+ for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
+ app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}")
if app_name in self.exclude:
continue
@@ -185,7 +184,7 @@ def __assign_static_build_dependencies(self, helm_values):
if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
- for image_name in list(helm_values[KEY_TASK_IMAGES].keys()):
+ for image_name in helm_values[KEY_TASK_IMAGES].keys():
if image_name in self.exclude:
del helm_values[KEY_TASK_IMAGES][image_name]
@@ -228,10 +227,11 @@ def __merge_base_helm_values(self, helm_values):
return helm_values
def __get_default_helm_values(self):
- helm_values = get_template(os.path.join(
- CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml'))
+ ch_root_path = Path(CH_ROOT)
+ values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml'
+ helm_values = get_template(values_yaml_path)
helm_values = dict_merge(helm_values,
- collect_helm_values(CH_ROOT, env=self.env))
+ collect_helm_values(ch_root_path, env=self.env))
return helm_values
@@ -273,7 +273,7 @@ def create_tls_certificate(self, helm_values):
# copy bootstrap file
cur_dir = os.getcwd()
- os.chdir(os.path.join(HERE, 'scripts'))
+ os.chdir(Path(HERE) / 'scripts')
tar = tarfile.open(bootstrap_file + '.tar', mode='w')
try:
tar.add(bootstrap_file)
@@ -291,10 +291,11 @@ def create_tls_certificate(self, helm_values):
bits, stat = container.get_archive('/mnt/certs')
if not certs_folder_path.exists():
certs_folder_path.mkdir(parents=True)
- with open(certs_parent_folder_path / 'certs.tar', 'wb') as f:
+ certs_tar = certs_parent_folder_path / 'certs.tar'
+ with open(certs_tar, 'wb') as f:
for chunk in bits:
f.write(chunk)
- cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar')
+ cf = tarfile.open(certs_tar)
cf.extractall(path=certs_parent_folder_path)
logs = container.logs()
@@ -409,20 +410,19 @@ def image_tag(self, image_name, build_context_path=None, dependencies=()):
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
- specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml')
- if os.path.exists(specific_template_path):
- logging.info("Specific values template found: " +
- specific_template_path)
+ deploy_path = app_path / 'deploy'
+ specific_template_path = deploy_path / 'values.yaml'
+ if specific_template_path.exists():
+ logging.info(f"Specific values template found: {specific_template_path}")
values = get_template(specific_template_path)
else:
values = {}
for e in self.env:
- specific_template_path = os.path.join(
- app_path, 'deploy', f'values-{e}.yaml')
- if os.path.exists(specific_template_path):
+ specific_template_path = deploy_path / f'values-{e}.yaml'
+ if specific_template_path.exists():
logging.info(
- "Specific environment values template found: " + specific_template_path)
+ f"Specific environment values template found: {specific_template_path}")
with open(specific_template_path) as f:
values_env_specific = yaml.safe_load(f)
values = dict_merge(values, values_env_specific)
@@ -433,6 +433,8 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
image_paths = [path for path in find_dockerfiles_paths(
app_path) if 'tasks/' not in path and 'subapps' not in path]
+ import ipdb; ipdb.set_trace() # fmt: skip
+
if len(image_paths) > 1:
logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name,
image_paths[0])
@@ -463,7 +465,7 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
for task_path in task_images_paths:
task_name = app_name_from_path(os.path.relpath(
- task_path, os.path.dirname(app_path)))
+ task_path, app_path.parent))
img_name = image_name_from_dockerfile_path(task_name, base_image_name)
values[KEY_TASK_IMAGES][task_name] = self.image_tag(
@@ -503,53 +505,51 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat
:param exclude:
:return:
"""
- app_base_path = os.path.join(search_root, APPS_PATH)
+ app_base_path = search_root / APPS_PATH
- for app_path in get_sub_paths(app_base_path):
- app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+ for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
+ app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path))
if app_name in exclude or (include and not any(inc in app_name for inc in include)):
continue
- template_dir = os.path.join(app_path, 'deploy', 'templates', templates_path)
- if os.path.exists(template_dir):
- dest_dir = os.path.join(
- dest_helm_chart_path, 'templates', app_name)
+ template_dir = app_path / 'deploy' / 'templates' / templates_path
+ if template_dir.exists():
+ dest_dir = dest_helm_chart_path / 'templates' / app_name
logging.info(
"Collecting templates for application %s to %s", app_name, dest_dir)
- if os.path.exists(dest_dir):
+ if dest_dir.exists():
logging.warning(
"Merging/overriding all files in directory %s", dest_dir)
- merge_configuration_directories(template_dir, dest_dir)
+ merge_configuration_directories(f"{template_dir}", f"{dest_dir}")
else:
shutil.copytree(template_dir, dest_dir)
- resources_dir = os.path.join(app_path, 'deploy/resources')
- if os.path.exists(resources_dir):
- dest_dir = os.path.join(
- dest_helm_chart_path, 'resources', app_name)
+ resources_dir = app_path / 'deploy' / 'resources'
+ if resources_dir.exists():
+ dest_dir = dest_helm_chart_path / 'resources' / app_name
logging.info(
"Collecting resources for application %s to %s", app_name, dest_dir)
- merge_configuration_directories(resources_dir, dest_dir)
+ merge_configuration_directories(f"{resources_dir}", f"{dest_dir}")
- subchart_dir = os.path.join(app_path, 'deploy/charts')
- if os.path.exists(subchart_dir):
- dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name)
+ subchart_dir = app_path / 'deploy/charts'
+ if subchart_dir.exists():
+ dest_dir = dest_helm_chart_path / 'charts' / app_name
logging.info(
"Collecting templates for application %s to %s", app_name, dest_dir)
- if os.path.exists(dest_dir):
+ if dest_dir.exists():
logging.warning(
"Merging/overriding all files in directory %s", dest_dir)
- merge_configuration_directories(subchart_dir, dest_dir)
+ merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
else:
shutil.copytree(subchart_dir, dest_dir)
def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
- if not os.path.exists(base_helm_chart):
+ if not base_helm_chart.exists():
return
- if os.path.exists(dest_helm_chart_path):
+ if dest_helm_chart_path.exists():
logging.info("Merging/overriding all files in directory %s",
dest_helm_chart_path)
merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
@@ -563,9 +563,7 @@ def collect_helm_values(deployment_root, env=()):
"""
Creates helm values from a cloudharness deployment scaffolding
"""
-
- values_template_path = os.path.join(
- deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml')
+ values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml'
values = get_template(values_template_path)
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index d8aaebda2..e5cf49f18 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -131,7 +131,6 @@ if __name__ == "__main__":
tls=not args.no_tls,
env=envs,
namespace=args.namespace,
- templates_path="compose",
)
merged_root_paths = preprocess_build_overrides(
From 6bbae19137873aa6970ae36d77680b2f0d750d3c Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 7 Feb 2024 10:24:48 -0600
Subject: [PATCH 03/94] CH-100 Add first skaffold dedicated generation for
docker compose
---
.../cloudharness_utils/constants.py | 2 +
.../ch_cli_tools/dockercompose.py | 47 +++-
.../ch_cli_tools/skaffoldcompose.py | 251 ++++++++++++++++++
tools/deployment-cli-tools/harness-deployment | 6 +-
4 files changed, 300 insertions(+), 6 deletions(-)
create mode 100644 tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py
diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py
index 4b42761a6..168b78116 100644
--- a/libraries/cloudharness-utils/cloudharness_utils/constants.py
+++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py
@@ -10,6 +10,8 @@
HELM_PATH = "helm"
HELM_CHART_PATH = HELM_PATH
+COMPOSE = 'compose'
+
INFRASTRUCTURE_PATH = 'infrastructure'
STATIC_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'common-images')
BASE_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'base-images')
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 06bf6d230..2cf768a4b 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -15,10 +15,10 @@
from . import HERE, CH_ROOT
from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
- DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH
+ DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE
from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \
- find_dockerfiles_paths
+ find_dockerfiles_paths, find_file_paths
from .models import HarnessMainConfig
@@ -31,7 +31,6 @@
KEY_TEST_IMAGES = 'test-images'
DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
-COMPOSE = 'compose'
def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
@@ -50,7 +49,7 @@ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=Non
assert domain, 'A domain must be specified'
self.root_paths = [Path(r) for r in root_paths]
self.tag = tag
- if not registry.endswith('/'):
+ if registry and not registry.endswith('/'):
self.registry = f'{registry}/'
else:
self.registry = registry
@@ -433,7 +432,10 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
image_paths = [path for path in find_dockerfiles_paths(
app_path) if 'tasks/' not in path and 'subapps' not in path]
- import ipdb; ipdb.set_trace() # fmt: skip
+
+ # Inject entry points commands
+ for image_path in image_paths:
+ self.inject_entry_points_commands(values, image_path, app_path)
if len(image_paths) > 1:
logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name,
@@ -474,6 +476,18 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
return values
+ def inject_entry_points_commands(self, helm_values, image_path, app_path):
+ context_path = os.path.relpath(image_path, '.')
+
+ mains_candidates = find_file_paths(context_path, '__main__.py')
+
+ task_main_file = identify_unicorn_based_main(mains_candidates, app_path)
+
+ if task_main_file:
+ helm_values[KEY_HARNESS]['deployment']['command'] = ['python']
+ helm_values[KEY_HARNESS]['deployment']['args'] = [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py']
+
+
def get_included_with_dependencies(values, include):
app_values = values['apps'].values()
directly_included = [app for app in app_values if any(
@@ -749,3 +763,26 @@ def validate_dependencies(values):
if not_found:
raise ValuesValidationException(
f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
+
+
+def identify_unicorn_based_main(candidates, app_path):
+ import re
+ gunicorn_pattern = re.compile(r"gunicorn")
+ # sort candidates, shortest path first
+ for candidate in sorted(candidates,key=lambda x: len(x.split("/"))):
+ dockerfile_path = f"{candidate}/.."
+ while not os.path.exists(f"{dockerfile_path}/Dockerfile") and os.path.abspath(dockerfile_path) != os.path.abspath(app_path):
+ dockerfile_path += "/.."
+ dockerfile = f"{dockerfile_path}/Dockerfile"
+ if not os.path.exists(dockerfile):
+ continue
+ with open(dockerfile, 'r') as file:
+ if re.search(gunicorn_pattern, file.read()):
+ return candidate
+ requirements = f"{candidate}/../requirements.txt"
+ if not os.path.exists(requirements):
+ continue
+ with open(requirements, 'r') as file:
+ if re.search(gunicorn_pattern, file.read()):
+ return candidate
+ return None
\ No newline at end of file
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py b/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py
new file mode 100644
index 000000000..27a4701ac
--- /dev/null
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py
@@ -0,0 +1,251 @@
+import os
+import logging
+import json
+import time
+
+from os.path import join, relpath, basename, exists, abspath
+from cloudharness_model import ApplicationTestConfig, HarnessMainConfig
+
+from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \
+ BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE, HELM_PATH
+from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES
+from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \
+ find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name
+
+def relpath_if(p1, p2):
+ if os.path.isabs(p1):
+ return p1
+ return relpath(p1, p2)
+
+def create_skaffold_compose_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True):
+ skaffold_conf = get_template('skaffold-template.yaml', True)
+ apps = helm_values.apps
+ base_image_name = (helm_values.registry.name or "") + helm_values.name
+ artifacts = {}
+ overrides = {}
+
+ def remove_tag(image_name):
+ return image_name.split(":")[0]
+
+ def get_image_tag(name):
+ return f"{get_image_name(name, base_image_name)}"
+
+ builds = {}
+
+ def build_artifact(image_name, context_path, requirements=None, dockerfile_path=''):
+ artifact_spec = {
+ 'image': image_name,
+ 'context': context_path,
+ 'docker': {
+ 'dockerfile': join(dockerfile_path, 'Dockerfile'),
+ 'buildArgs': {
+ 'REGISTRY': helm_values.registry.name,
+ 'TAG': helm_values.tag,
+ 'NOCACHE': str(time.time())
+ },
+ 'ssh': 'default'
+ }
+ }
+ if requirements:
+ artifact_spec['requires'] = [{'image': get_image_tag(req), 'alias': req.replace('-', '_').upper()} for req
+ in requirements]
+ return artifact_spec
+
+
+ base_images = set()
+
+ def process_build_dockerfile(dockerfile_path, root_path, global_context=False, requirements=None, app_name=None):
+ if app_name is None:
+ app_name = app_name_from_path(basename(dockerfile_path))
+ if app_name in helm_values[KEY_TASK_IMAGES] or app_name.replace("-", "_") in helm_values.apps:
+ context_path = relpath_if(root_path, output_path) if global_context else relpath_if(dockerfile_path, output_path)
+
+ builds[app_name] = context_path
+ base_images.add(get_image_name(app_name))
+ artifacts[app_name] = build_artifact(
+ get_image_tag(app_name),
+ context_path,
+ dockerfile_path=relpath(dockerfile_path, output_path),
+ requirements=requirements or guess_build_dependencies_from_dockerfile(dockerfile_path)
+ )
+
+ for root_path in root_paths:
+ skaffold_conf = dict_merge(skaffold_conf, get_template(
+ join(root_path, DEPLOYMENT_CONFIGURATION_PATH, 'skaffold-template.yaml')))
+
+ base_dockerfiles = find_dockerfiles_paths(
+ join(root_path, BASE_IMAGES_PATH))
+
+ for dockerfile_path in base_dockerfiles:
+ process_build_dockerfile(dockerfile_path, root_path, global_context=True)
+
+ release_config = skaffold_conf['deploy']['helm']['releases'][0]
+ release_config['name'] = helm_values.namespace
+ release_config['namespace'] = helm_values.namespace
+ release_config['artifactOverrides'][KEY_APPS] = {}
+
+ static_images = set()
+ for root_path in root_paths:
+ static_dockerfiles = find_dockerfiles_paths(
+ join(root_path, STATIC_IMAGES_PATH))
+
+ for dockerfile_path in static_dockerfiles:
+ process_build_dockerfile(dockerfile_path, root_path)
+
+
+ for root_path in root_paths:
+ apps_path = join(root_path, APPS_PATH)
+ app_dockerfiles = find_dockerfiles_paths(apps_path)
+
+ release_config['artifactOverrides'][KEY_TASK_IMAGES] = {
+ task_image: remove_tag(helm_values[KEY_TASK_IMAGES][task_image])
+ for task_image in helm_values[KEY_TASK_IMAGES]
+ }
+ for dockerfile_path in app_dockerfiles:
+ app_relative_to_skaffold = os.path.relpath(
+ dockerfile_path, output_path)
+ context_path = os.path.relpath(dockerfile_path, '.')
+ app_relative_to_base = os.path.relpath(dockerfile_path, apps_path)
+ app_name = app_name_from_path(app_relative_to_base)
+ app_key = app_name.replace('-', '_')
+ if app_key not in apps:
+ if 'tasks' in app_relative_to_base and manage_task_images:
+ parent_app_name = app_name_from_path(
+ app_relative_to_base.split('/tasks')[0])
+ parent_app_key = parent_app_name.replace('-', '_')
+
+ if parent_app_key in apps:
+ artifacts[app_key] = build_artifact(get_image_tag(app_name), app_relative_to_skaffold,
+ base_images.union(static_images))
+
+ continue
+
+ build_requirements = apps[app_key][KEY_HARNESS].dependencies.build
+ # app_image_tag = remove_tag(
+ # apps[app_key][KEY_HARNESS][KEY_DEPLOYMENT]['image'])
+ # artifacts[app_key] = build_artifact(
+ # app_image_tag, app_relative_to_skaffold, build_requirements)
+ process_build_dockerfile(dockerfile_path, root_path, requirements=build_requirements, app_name=app_name)
+ app = apps[app_key]
+ if app[KEY_HARNESS][KEY_DEPLOYMENT]['image']:
+ release_config['artifactOverrides']['apps'][app_key] = \
+ {
+ KEY_HARNESS: {
+ KEY_DEPLOYMENT: {
+ 'image': remove_tag(app[KEY_HARNESS][KEY_DEPLOYMENT]['image'])
+ }
+ }
+ }
+
+ mains_candidates = find_file_paths(context_path, '__main__.py')
+
+ def identify_unicorn_based_main(candidates):
+ import re
+ gunicorn_pattern = re.compile(r"gunicorn")
+ # sort candidates, shortest path first
+ for candidate in sorted(candidates,key=lambda x: len(x.split("/"))):
+ dockerfile_path = f"{candidate}/.."
+ while not exists(f"{dockerfile_path}/Dockerfile") and abspath(dockerfile_path) != abspath(root_path):
+ dockerfile_path += "/.."
+ dockerfile = f"{dockerfile_path}/Dockerfile"
+ if not exists(dockerfile):
+ continue
+ with open(dockerfile, 'r') as file:
+ if re.search(gunicorn_pattern, file.read()):
+ return candidate
+ requirements = f"{candidate}/../requirements.txt"
+ if not exists(requirements):
+ continue
+ with open(requirements, 'r') as file:
+ if re.search(gunicorn_pattern, file.read()):
+ return candidate
+ return None
+
+ task_main_file = identify_unicorn_based_main(mains_candidates)
+
+ if task_main_file:
+ release_config['overrides']['apps'][app_key] = \
+ {
+ 'harness': {
+ 'deployment': {
+ 'command': ['python'],
+ 'args': [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py']
+ }
+ }
+ }
+
+ test_config: ApplicationTestConfig = helm_values.apps[app_key].harness.test
+ if test_config.unit.enabled and test_config.unit.commands:
+
+ skaffold_conf['test'].append(dict(
+ image=get_image_tag(app_name),
+ custom=[dict(command="docker run $IMAGE " + cmd) for cmd in test_config.unit.commands]
+ ))
+
+
+ del skaffold_conf['deploy']
+ skaffold_conf['deploy'] = {
+ 'docker': {
+ 'useCompose': True,
+ 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']]
+ }
+ }
+
+ skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
+ import ipdb; ipdb.set_trace() # fmt: skip
+
+ merge_to_yaml_file(skaffold_conf, os.path.join(
+ output_path, 'skaffold.yaml'))
+
+ return skaffold_conf
+
+
+def create_vscode_debug_configuration(root_paths, helm_values):
+ logging.info(
+ "Creating VS code cloud build configuration.\nCloud build extension is needed to debug.")
+
+ vscode_launch_path = '.vscode/launch.json'
+
+ vs_conf = get_json_template(vscode_launch_path, True)
+ base_image_name = helm_values.name
+ debug_conf = get_json_template('vscode-debug-template.json', True)
+
+ def get_image_tag(name):
+ return f"{get_image_name(name, base_image_name)}"
+
+ if helm_values.registry.name:
+ base_image_name = helm_values.registry.name + helm_values.name
+ for i in range(len(vs_conf['configurations'])):
+ conf = vs_conf['configurations'][i]
+ if conf['name'] == debug_conf['name']:
+ del vs_conf['configurations'][i]
+ break
+ vs_conf['configurations'].append(debug_conf)
+
+ apps = helm_values.apps
+
+ for root_path in root_paths:
+ apps_path = os.path.join(root_path, 'applications')
+
+ src_root_paths = find_file_paths(apps_path, 'setup.py')
+
+ for path in src_root_paths:
+ app_relative_to_base = os.path.relpath(path, apps_path)
+ app_relative_to_root = os.path.relpath(path, '.')
+ app_name = app_name_from_path(app_relative_to_base.split('/')[0])
+ app_key = app_name.replace('-', '_')
+ if app_key in apps.keys():
+ debug_conf["debug"].append({
+ "image": get_image_tag(app_name),
+ "sourceFileMap": {
+ "justMyCode": False,
+ f"${{workspaceFolder}}/{app_relative_to_root}": apps[app_key].harness.get('sourceRoot',
+ "/usr/src/app"),
+ }
+ })
+
+
+ if not os.path.exists(os.path.dirname(vscode_launch_path)):
+ os.makedirs(os.path.dirname(vscode_launch_path))
+ with open(vscode_launch_path, 'w') as f:
+ json.dump(vs_conf, f, indent=2, sort_keys=True)
\ No newline at end of file
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index e5cf49f18..978975168 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -7,6 +7,7 @@ import os
from ch_cli_tools.dockercompose import create_docker_compose_configuration
from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy
from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration
+from ch_cli_tools.skaffoldcompose import create_skaffold_compose_configuration
from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file
from ch_cli_tools.preprocessing import preprocess_build_overrides
from ch_cli_tools.utils import merge_app_directories
@@ -148,7 +149,10 @@ if __name__ == "__main__":
if args.write_env:
write_env_file(helm_values, os.path.join(root_paths[-1], DEPLOYMENT_PATH, ".env"))
- create_skaffold_configuration(merged_root_paths, helm_values)
+ if not args.docker_compose:
+ create_skaffold_configuration(merged_root_paths, helm_values)
+ else:
+ create_skaffold_compose_configuration(merged_root_paths, helm_values)
create_vscode_debug_configuration(root_paths, helm_values)
hosts_info(helm_values)
From 528754579c3a33e993f623cb9e1a4fe9d86748fa Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 7 Feb 2024 10:33:07 -0600
Subject: [PATCH 04/94] CH-100 Make skaffold script a little bit more generic
(ugly)
---
.../cloudharness_utils/constants.py | 2 +
.../ch_cli_tools/skaffold.py | 19 +-
.../ch_cli_tools/skaffoldcompose.py | 251 ------------------
tools/deployment-cli-tools/harness-deployment | 21 +-
4 files changed, 18 insertions(+), 275 deletions(-)
delete mode 100644 tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py
diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py
index 168b78116..a5163f2be 100644
--- a/libraries/cloudharness-utils/cloudharness_utils/constants.py
+++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py
@@ -9,8 +9,10 @@
HELM_PATH = "helm"
HELM_CHART_PATH = HELM_PATH
+HELM_ENGINE = HELM_PATH
COMPOSE = 'compose'
+COMPOSE_ENGINE = 'docker-compose'
INFRASTRUCTURE_PATH = 'infrastructure'
STATIC_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'common-images')
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
index c0de5764b..bc66d6168 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
@@ -7,7 +7,7 @@
from cloudharness_model import ApplicationTestConfig, HarnessMainConfig
from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \
- BASE_IMAGES_PATH, STATIC_IMAGES_PATH
+ BASE_IMAGES_PATH, STATIC_IMAGES_PATH, HELM_ENGINE, COMPOSE_ENGINE
from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES
from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \
find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name
@@ -17,12 +17,13 @@ def relpath_if(p1, p2):
return p1
return relpath(p1, p2)
-def create_skaffold_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True):
+def create_skaffold_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True, backend_deploy=HELM_ENGINE):
skaffold_conf = get_template('skaffold-template.yaml', True)
apps = helm_values.apps
base_image_name = (helm_values.registry.name or "") + helm_values.name
artifacts = {}
overrides = {}
+ backend = backend_deploy or HELM_ENGINE
def remove_tag(image_name):
return image_name.split(":")[0]
@@ -183,10 +184,18 @@ def identify_unicorn_based_main(candidates):
custom=[dict(command="docker run $IMAGE " + cmd) for cmd in test_config.unit.commands]
))
+ if backend == COMPOSE_ENGINE:
+ del skaffold_conf['deploy']
+ skaffold_conf['deploy'] = {
+ 'docker': {
+ 'useCompose': True,
+ 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']]
+ }
+ }
- skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
- merge_to_yaml_file(skaffold_conf, os.path.join(
- output_path, 'skaffold.yaml'))
+ skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
+ merge_to_yaml_file(skaffold_conf, os.path.join(
+ output_path, 'skaffold.yaml'))
return skaffold_conf
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py b/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py
deleted file mode 100644
index 27a4701ac..000000000
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffoldcompose.py
+++ /dev/null
@@ -1,251 +0,0 @@
-import os
-import logging
-import json
-import time
-
-from os.path import join, relpath, basename, exists, abspath
-from cloudharness_model import ApplicationTestConfig, HarnessMainConfig
-
-from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \
- BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE, HELM_PATH
-from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES
-from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \
- find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name
-
-def relpath_if(p1, p2):
- if os.path.isabs(p1):
- return p1
- return relpath(p1, p2)
-
-def create_skaffold_compose_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True):
- skaffold_conf = get_template('skaffold-template.yaml', True)
- apps = helm_values.apps
- base_image_name = (helm_values.registry.name or "") + helm_values.name
- artifacts = {}
- overrides = {}
-
- def remove_tag(image_name):
- return image_name.split(":")[0]
-
- def get_image_tag(name):
- return f"{get_image_name(name, base_image_name)}"
-
- builds = {}
-
- def build_artifact(image_name, context_path, requirements=None, dockerfile_path=''):
- artifact_spec = {
- 'image': image_name,
- 'context': context_path,
- 'docker': {
- 'dockerfile': join(dockerfile_path, 'Dockerfile'),
- 'buildArgs': {
- 'REGISTRY': helm_values.registry.name,
- 'TAG': helm_values.tag,
- 'NOCACHE': str(time.time())
- },
- 'ssh': 'default'
- }
- }
- if requirements:
- artifact_spec['requires'] = [{'image': get_image_tag(req), 'alias': req.replace('-', '_').upper()} for req
- in requirements]
- return artifact_spec
-
-
- base_images = set()
-
- def process_build_dockerfile(dockerfile_path, root_path, global_context=False, requirements=None, app_name=None):
- if app_name is None:
- app_name = app_name_from_path(basename(dockerfile_path))
- if app_name in helm_values[KEY_TASK_IMAGES] or app_name.replace("-", "_") in helm_values.apps:
- context_path = relpath_if(root_path, output_path) if global_context else relpath_if(dockerfile_path, output_path)
-
- builds[app_name] = context_path
- base_images.add(get_image_name(app_name))
- artifacts[app_name] = build_artifact(
- get_image_tag(app_name),
- context_path,
- dockerfile_path=relpath(dockerfile_path, output_path),
- requirements=requirements or guess_build_dependencies_from_dockerfile(dockerfile_path)
- )
-
- for root_path in root_paths:
- skaffold_conf = dict_merge(skaffold_conf, get_template(
- join(root_path, DEPLOYMENT_CONFIGURATION_PATH, 'skaffold-template.yaml')))
-
- base_dockerfiles = find_dockerfiles_paths(
- join(root_path, BASE_IMAGES_PATH))
-
- for dockerfile_path in base_dockerfiles:
- process_build_dockerfile(dockerfile_path, root_path, global_context=True)
-
- release_config = skaffold_conf['deploy']['helm']['releases'][0]
- release_config['name'] = helm_values.namespace
- release_config['namespace'] = helm_values.namespace
- release_config['artifactOverrides'][KEY_APPS] = {}
-
- static_images = set()
- for root_path in root_paths:
- static_dockerfiles = find_dockerfiles_paths(
- join(root_path, STATIC_IMAGES_PATH))
-
- for dockerfile_path in static_dockerfiles:
- process_build_dockerfile(dockerfile_path, root_path)
-
-
- for root_path in root_paths:
- apps_path = join(root_path, APPS_PATH)
- app_dockerfiles = find_dockerfiles_paths(apps_path)
-
- release_config['artifactOverrides'][KEY_TASK_IMAGES] = {
- task_image: remove_tag(helm_values[KEY_TASK_IMAGES][task_image])
- for task_image in helm_values[KEY_TASK_IMAGES]
- }
- for dockerfile_path in app_dockerfiles:
- app_relative_to_skaffold = os.path.relpath(
- dockerfile_path, output_path)
- context_path = os.path.relpath(dockerfile_path, '.')
- app_relative_to_base = os.path.relpath(dockerfile_path, apps_path)
- app_name = app_name_from_path(app_relative_to_base)
- app_key = app_name.replace('-', '_')
- if app_key not in apps:
- if 'tasks' in app_relative_to_base and manage_task_images:
- parent_app_name = app_name_from_path(
- app_relative_to_base.split('/tasks')[0])
- parent_app_key = parent_app_name.replace('-', '_')
-
- if parent_app_key in apps:
- artifacts[app_key] = build_artifact(get_image_tag(app_name), app_relative_to_skaffold,
- base_images.union(static_images))
-
- continue
-
- build_requirements = apps[app_key][KEY_HARNESS].dependencies.build
- # app_image_tag = remove_tag(
- # apps[app_key][KEY_HARNESS][KEY_DEPLOYMENT]['image'])
- # artifacts[app_key] = build_artifact(
- # app_image_tag, app_relative_to_skaffold, build_requirements)
- process_build_dockerfile(dockerfile_path, root_path, requirements=build_requirements, app_name=app_name)
- app = apps[app_key]
- if app[KEY_HARNESS][KEY_DEPLOYMENT]['image']:
- release_config['artifactOverrides']['apps'][app_key] = \
- {
- KEY_HARNESS: {
- KEY_DEPLOYMENT: {
- 'image': remove_tag(app[KEY_HARNESS][KEY_DEPLOYMENT]['image'])
- }
- }
- }
-
- mains_candidates = find_file_paths(context_path, '__main__.py')
-
- def identify_unicorn_based_main(candidates):
- import re
- gunicorn_pattern = re.compile(r"gunicorn")
- # sort candidates, shortest path first
- for candidate in sorted(candidates,key=lambda x: len(x.split("/"))):
- dockerfile_path = f"{candidate}/.."
- while not exists(f"{dockerfile_path}/Dockerfile") and abspath(dockerfile_path) != abspath(root_path):
- dockerfile_path += "/.."
- dockerfile = f"{dockerfile_path}/Dockerfile"
- if not exists(dockerfile):
- continue
- with open(dockerfile, 'r') as file:
- if re.search(gunicorn_pattern, file.read()):
- return candidate
- requirements = f"{candidate}/../requirements.txt"
- if not exists(requirements):
- continue
- with open(requirements, 'r') as file:
- if re.search(gunicorn_pattern, file.read()):
- return candidate
- return None
-
- task_main_file = identify_unicorn_based_main(mains_candidates)
-
- if task_main_file:
- release_config['overrides']['apps'][app_key] = \
- {
- 'harness': {
- 'deployment': {
- 'command': ['python'],
- 'args': [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py']
- }
- }
- }
-
- test_config: ApplicationTestConfig = helm_values.apps[app_key].harness.test
- if test_config.unit.enabled and test_config.unit.commands:
-
- skaffold_conf['test'].append(dict(
- image=get_image_tag(app_name),
- custom=[dict(command="docker run $IMAGE " + cmd) for cmd in test_config.unit.commands]
- ))
-
-
- del skaffold_conf['deploy']
- skaffold_conf['deploy'] = {
- 'docker': {
- 'useCompose': True,
- 'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']]
- }
- }
-
- skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
- import ipdb; ipdb.set_trace() # fmt: skip
-
- merge_to_yaml_file(skaffold_conf, os.path.join(
- output_path, 'skaffold.yaml'))
-
- return skaffold_conf
-
-
-def create_vscode_debug_configuration(root_paths, helm_values):
- logging.info(
- "Creating VS code cloud build configuration.\nCloud build extension is needed to debug.")
-
- vscode_launch_path = '.vscode/launch.json'
-
- vs_conf = get_json_template(vscode_launch_path, True)
- base_image_name = helm_values.name
- debug_conf = get_json_template('vscode-debug-template.json', True)
-
- def get_image_tag(name):
- return f"{get_image_name(name, base_image_name)}"
-
- if helm_values.registry.name:
- base_image_name = helm_values.registry.name + helm_values.name
- for i in range(len(vs_conf['configurations'])):
- conf = vs_conf['configurations'][i]
- if conf['name'] == debug_conf['name']:
- del vs_conf['configurations'][i]
- break
- vs_conf['configurations'].append(debug_conf)
-
- apps = helm_values.apps
-
- for root_path in root_paths:
- apps_path = os.path.join(root_path, 'applications')
-
- src_root_paths = find_file_paths(apps_path, 'setup.py')
-
- for path in src_root_paths:
- app_relative_to_base = os.path.relpath(path, apps_path)
- app_relative_to_root = os.path.relpath(path, '.')
- app_name = app_name_from_path(app_relative_to_base.split('/')[0])
- app_key = app_name.replace('-', '_')
- if app_key in apps.keys():
- debug_conf["debug"].append({
- "image": get_image_tag(app_name),
- "sourceFileMap": {
- "justMyCode": False,
- f"${{workspaceFolder}}/{app_relative_to_root}": apps[app_key].harness.get('sourceRoot',
- "/usr/src/app"),
- }
- })
-
-
- if not os.path.exists(os.path.dirname(vscode_launch_path)):
- os.makedirs(os.path.dirname(vscode_launch_path))
- with open(vscode_launch_path, 'w') as f:
- json.dump(vs_conf, f, indent=2, sort_keys=True)
\ No newline at end of file
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index 978975168..9a5cc78cc 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -7,11 +7,10 @@ import os
from ch_cli_tools.dockercompose import create_docker_compose_configuration
from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy
from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration
-from ch_cli_tools.skaffoldcompose import create_skaffold_compose_configuration
from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file
from ch_cli_tools.preprocessing import preprocess_build_overrides
from ch_cli_tools.utils import merge_app_directories
-from cloudharness_utils.constants import DEPLOYMENT_PATH
+from cloudharness_utils.constants import DEPLOYMENT_PATH, COMPOSE_ENGINE
HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/')
ROOT = os.path.dirname(os.path.dirname(HERE)).replace(os.path.sep, '/')
@@ -85,22 +84,6 @@ if __name__ == "__main__":
merge_app_directories(root_paths, destination=args.merge)
root_paths = [args.merge]
- # helm_values = create_helm_chart(
- # root_paths,
- # tag=args.tag,
- # registry=args.registry,
- # domain=args.domain,
- # local=args.local,
- # secured=not args.unsecured,
- # output_path=args.output_path,
- # exclude=args.exclude,
- # include=args.include,
- # registry_secret=args.registry_secret,
- # tls=not args.no_tls,
- # env=envs,
- # namespace=args.namespace
- # )
-
if not args.docker_compose:
helm_values = create_helm_chart(
root_paths,
@@ -152,7 +135,7 @@ if __name__ == "__main__":
if not args.docker_compose:
create_skaffold_configuration(merged_root_paths, helm_values)
else:
- create_skaffold_compose_configuration(merged_root_paths, helm_values)
+ create_skaffold_configuration(merged_root_paths, helm_values, backend_deploy=COMPOSE_ENGINE)
create_vscode_debug_configuration(root_paths, helm_values)
hosts_info(helm_values)
From 9f75c9c109f0591ff845c20d370aba21e93fc74e Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 7 Feb 2024 11:30:18 -0600
Subject: [PATCH 05/94] CH-100 Fix issue with entrypoint
---
deployment-configuration/compose/templates/auto-compose.yaml | 3 +++
tools/deployment-cli-tools/ch_cli_tools/dockercompose.py | 4 ++--
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 5b4893baa..43bd84014 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -29,6 +29,9 @@ services:
reservations:
cpus: {{ $deployment.resources.requests.cpu | default "25m" }}
memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }}
+ {{- with $deployment.command }}
+ entrypoint: {{ cat . $deployment.args }}
+ {{- end }}
environment:
- CH_CURRENT_APP_NAME={{ $app_name | quote }}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 2cf768a4b..a935899e2 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -484,8 +484,8 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path):
task_main_file = identify_unicorn_based_main(mains_candidates, app_path)
if task_main_file:
- helm_values[KEY_HARNESS]['deployment']['command'] = ['python']
- helm_values[KEY_HARNESS]['deployment']['args'] = [f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py']
+ helm_values[KEY_HARNESS]['deployment']['command'] = 'python'
+ helm_values[KEY_HARNESS]['deployment']['args'] = f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'
def get_included_with_dependencies(values, include):
From c159a4f0d266592d4269e4911969065bd7acb764 Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 8 Feb 2024 08:18:56 -0600
Subject: [PATCH 06/94] CH-100 Remove generation of chart files for
docker-compose
---
.../ch_cli_tools/dockercompose.py | 24 +++++++++----------
1 file changed, 12 insertions(+), 12 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index a935899e2..9abcd5652 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -546,18 +546,18 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat
merge_configuration_directories(f"{resources_dir}", f"{dest_dir}")
- subchart_dir = app_path / 'deploy/charts'
- if subchart_dir.exists():
- dest_dir = dest_helm_chart_path / 'charts' / app_name
-
- logging.info(
- "Collecting templates for application %s to %s", app_name, dest_dir)
- if dest_dir.exists():
- logging.warning(
- "Merging/overriding all files in directory %s", dest_dir)
- merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
- else:
- shutil.copytree(subchart_dir, dest_dir)
+ # subchart_dir = app_path / 'deploy/charts'
+ # if subchart_dir.exists():
+ # dest_dir = dest_helm_chart_path / 'charts' / app_name
+
+ # logging.info(
+ # "Collecting templates for application %s to %s", app_name, dest_dir)
+ # if dest_dir.exists():
+ # logging.warning(
+ # "Merging/overriding all files in directory %s", dest_dir)
+ # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
+ # else:
+ # shutil.copytree(subchart_dir, dest_dir)
def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
From 02bd318b84667e8cfc4a3a94fc0e2020c2ff79ac Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 9 Feb 2024 10:34:44 -0600
Subject: [PATCH 07/94] CH-100 Rollback default per-app helm template directory
---
.../argo/deploy/templates/{helm => }/argo-sa.yaml | 0
.../deploy/templates/{helm => }/broker-config.yml | 0
.../events/deploy/templates/{helm => }/configmap.yaml | 0
.../deploy/templates/{helm => }/deployments.yml | 0
.../events/deploy/templates/{helm => }/roles.yml | 0
.../events/deploy/templates/{helm => }/services.yml | 0
.../events/deploy/templates/{helm => }/zoo-config.yml | 0
.../templates/{helm => }/_helpers-auth-rework.tpl | 0
.../deploy/templates/{helm => }/_helpers-names.tpl | 0
.../deploy/templates/{helm => }/_helpers.tpl | 0
.../templates/{helm => }/hub/_helpers-passwords.tpl | 0
.../deploy/templates/{helm => }/hub/configmap.yaml | 0
.../deploy/templates/{helm => }/hub/deployment.yaml | 0
.../deploy/templates/{helm => }/hub/netpol.yaml | 0
.../deploy/templates/{helm => }/hub/pdb.yaml | 0
.../deploy/templates/{helm => }/hub/pvc.yaml | 0
.../deploy/templates/{helm => }/hub/rbac.yaml | 0
.../deploy/templates/{helm => }/hub/secret.yaml | 0
.../deploy/templates/{helm => }/hub/service.yaml | 0
.../{helm => }/image-puller/_helpers-daemonset.tpl | 0
.../{helm => }/image-puller/daemonset-continuous.yaml | 0
.../{helm => }/image-puller/daemonset-hook.yaml | 0
.../deploy/templates/{helm => }/image-puller/job.yaml | 0
.../templates/{helm => }/image-puller/rbac.yaml | 0
.../templates/{helm => }/proxy/autohttps/_README.txt | 0
.../{helm => }/proxy/autohttps/configmap.yaml | 0
.../{helm => }/proxy/autohttps/deployment.yaml | 0
.../templates/{helm => }/proxy/autohttps/rbac.yaml | 0
.../templates/{helm => }/proxy/autohttps/service.yaml | 0
.../deploy/templates/{helm => }/proxy/deployment.yaml | 0
.../deploy/templates/{helm => }/proxy/netpol.yaml | 0
.../deploy/templates/{helm => }/proxy/pdb.yaml | 0
.../deploy/templates/{helm => }/proxy/secret.yaml | 0
.../deploy/templates/{helm => }/proxy/service.yaml | 0
.../{helm => }/scheduling/_scheduling-helpers.tpl | 0
.../{helm => }/scheduling/priorityclass.yaml | 0
.../{helm => }/scheduling/user-placeholder/pdb.yaml | 0
.../scheduling/user-placeholder/priorityclass.yaml | 0
.../scheduling/user-placeholder/statefulset.yaml | 0
.../scheduling/user-scheduler/configmap.yaml | 0
.../scheduling/user-scheduler/deployment.yaml | 0
.../{helm => }/scheduling/user-scheduler/pdb.yaml | 0
.../{helm => }/scheduling/user-scheduler/rbac.yaml | 0
.../templates/{helm => }/singleuser/netpol.yaml | 0
.../deploy/templates/{helm => }/_helpers.tpl | 0
.../deploy/templates/{helm => }/clusterrole.yaml | 0
.../templates/{helm => }/clusterrolebinding.yaml | 0
.../deploy/templates/{helm => }/nfs-server.yaml | 0
.../templates/{helm => }/podsecuritypolicy.yaml | 0
.../nfsserver/deploy/templates/{helm => }/role.yaml | 0
.../deploy/templates/{helm => }/rolebinding.yaml | 0
.../deploy/templates/{helm => }/serviceaccount.yaml | 0
.../deploy/templates/{helm => }/storageclass.yaml | 0
.../sentry/deploy/templates/{helm => }/redis.yaml | 0
.../compose/templates/auto-compose.yaml | 11 +++++++----
.../ch_cli_tools/dockercompose.py | 2 +-
tools/deployment-cli-tools/ch_cli_tools/helm.py | 2 +-
57 files changed, 9 insertions(+), 6 deletions(-)
rename applications/argo/deploy/templates/{helm => }/argo-sa.yaml (100%)
rename applications/events/deploy/templates/{helm => }/broker-config.yml (100%)
rename applications/events/deploy/templates/{helm => }/configmap.yaml (100%)
rename applications/events/deploy/templates/{helm => }/deployments.yml (100%)
rename applications/events/deploy/templates/{helm => }/roles.yml (100%)
rename applications/events/deploy/templates/{helm => }/services.yml (100%)
rename applications/events/deploy/templates/{helm => }/zoo-config.yml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/_helpers-auth-rework.tpl (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/_helpers-names.tpl (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/_helpers.tpl (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/_helpers-passwords.tpl (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/configmap.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/netpol.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/pvc.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/secret.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/hub/service.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/_helpers-daemonset.tpl (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/daemonset-continuous.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/daemonset-hook.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/job.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/image-puller/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/_README.txt (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/configmap.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/autohttps/service.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/netpol.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/secret.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/proxy/service.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/_scheduling-helpers.tpl (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/priorityclass.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-placeholder/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-placeholder/priorityclass.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-placeholder/statefulset.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/configmap.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/deployment.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/pdb.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/scheduling/user-scheduler/rbac.yaml (100%)
rename applications/jupyterhub/deploy/templates/{helm => }/singleuser/netpol.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/_helpers.tpl (100%)
rename applications/nfsserver/deploy/templates/{helm => }/clusterrole.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/clusterrolebinding.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/nfs-server.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/podsecuritypolicy.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/role.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/rolebinding.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/serviceaccount.yaml (100%)
rename applications/nfsserver/deploy/templates/{helm => }/storageclass.yaml (100%)
rename applications/sentry/deploy/templates/{helm => }/redis.yaml (100%)
diff --git a/applications/argo/deploy/templates/helm/argo-sa.yaml b/applications/argo/deploy/templates/argo-sa.yaml
similarity index 100%
rename from applications/argo/deploy/templates/helm/argo-sa.yaml
rename to applications/argo/deploy/templates/argo-sa.yaml
diff --git a/applications/events/deploy/templates/helm/broker-config.yml b/applications/events/deploy/templates/broker-config.yml
similarity index 100%
rename from applications/events/deploy/templates/helm/broker-config.yml
rename to applications/events/deploy/templates/broker-config.yml
diff --git a/applications/events/deploy/templates/helm/configmap.yaml b/applications/events/deploy/templates/configmap.yaml
similarity index 100%
rename from applications/events/deploy/templates/helm/configmap.yaml
rename to applications/events/deploy/templates/configmap.yaml
diff --git a/applications/events/deploy/templates/helm/deployments.yml b/applications/events/deploy/templates/deployments.yml
similarity index 100%
rename from applications/events/deploy/templates/helm/deployments.yml
rename to applications/events/deploy/templates/deployments.yml
diff --git a/applications/events/deploy/templates/helm/roles.yml b/applications/events/deploy/templates/roles.yml
similarity index 100%
rename from applications/events/deploy/templates/helm/roles.yml
rename to applications/events/deploy/templates/roles.yml
diff --git a/applications/events/deploy/templates/helm/services.yml b/applications/events/deploy/templates/services.yml
similarity index 100%
rename from applications/events/deploy/templates/helm/services.yml
rename to applications/events/deploy/templates/services.yml
diff --git a/applications/events/deploy/templates/helm/zoo-config.yml b/applications/events/deploy/templates/zoo-config.yml
similarity index 100%
rename from applications/events/deploy/templates/helm/zoo-config.yml
rename to applications/events/deploy/templates/zoo-config.yml
diff --git a/applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl b/applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/_helpers-auth-rework.tpl
rename to applications/jupyterhub/deploy/templates/_helpers-auth-rework.tpl
diff --git a/applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl b/applications/jupyterhub/deploy/templates/_helpers-names.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/_helpers-names.tpl
rename to applications/jupyterhub/deploy/templates/_helpers-names.tpl
diff --git a/applications/jupyterhub/deploy/templates/helm/_helpers.tpl b/applications/jupyterhub/deploy/templates/_helpers.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/_helpers.tpl
rename to applications/jupyterhub/deploy/templates/_helpers.tpl
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl b/applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/_helpers-passwords.tpl
rename to applications/jupyterhub/deploy/templates/hub/_helpers-passwords.tpl
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml b/applications/jupyterhub/deploy/templates/hub/configmap.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/configmap.yaml
rename to applications/jupyterhub/deploy/templates/hub/configmap.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml b/applications/jupyterhub/deploy/templates/hub/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/deployment.yaml
rename to applications/jupyterhub/deploy/templates/hub/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml b/applications/jupyterhub/deploy/templates/hub/netpol.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/netpol.yaml
rename to applications/jupyterhub/deploy/templates/hub/netpol.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml b/applications/jupyterhub/deploy/templates/hub/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/pdb.yaml
rename to applications/jupyterhub/deploy/templates/hub/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml b/applications/jupyterhub/deploy/templates/hub/pvc.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/pvc.yaml
rename to applications/jupyterhub/deploy/templates/hub/pvc.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml b/applications/jupyterhub/deploy/templates/hub/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/rbac.yaml
rename to applications/jupyterhub/deploy/templates/hub/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/secret.yaml b/applications/jupyterhub/deploy/templates/hub/secret.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/secret.yaml
rename to applications/jupyterhub/deploy/templates/hub/secret.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/hub/service.yaml b/applications/jupyterhub/deploy/templates/hub/service.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/hub/service.yaml
rename to applications/jupyterhub/deploy/templates/hub/service.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl b/applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/image-puller/_helpers-daemonset.tpl
rename to applications/jupyterhub/deploy/templates/image-puller/_helpers-daemonset.tpl
diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml b/applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-continuous.yaml
rename to applications/jupyterhub/deploy/templates/image-puller/daemonset-continuous.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml b/applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/image-puller/daemonset-hook.yaml
rename to applications/jupyterhub/deploy/templates/image-puller/daemonset-hook.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml b/applications/jupyterhub/deploy/templates/image-puller/job.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/image-puller/job.yaml
rename to applications/jupyterhub/deploy/templates/image-puller/job.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml b/applications/jupyterhub/deploy/templates/image-puller/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/image-puller/rbac.yaml
rename to applications/jupyterhub/deploy/templates/image-puller/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt b/applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/_README.txt
rename to applications/jupyterhub/deploy/templates/proxy/autohttps/_README.txt
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/configmap.yaml
rename to applications/jupyterhub/deploy/templates/proxy/autohttps/configmap.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/deployment.yaml
rename to applications/jupyterhub/deploy/templates/proxy/autohttps/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/rbac.yaml
rename to applications/jupyterhub/deploy/templates/proxy/autohttps/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml b/applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/autohttps/service.yaml
rename to applications/jupyterhub/deploy/templates/proxy/autohttps/service.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml b/applications/jupyterhub/deploy/templates/proxy/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/deployment.yaml
rename to applications/jupyterhub/deploy/templates/proxy/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml b/applications/jupyterhub/deploy/templates/proxy/netpol.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/netpol.yaml
rename to applications/jupyterhub/deploy/templates/proxy/netpol.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml b/applications/jupyterhub/deploy/templates/proxy/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/pdb.yaml
rename to applications/jupyterhub/deploy/templates/proxy/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml b/applications/jupyterhub/deploy/templates/proxy/secret.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/secret.yaml
rename to applications/jupyterhub/deploy/templates/proxy/secret.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/proxy/service.yaml b/applications/jupyterhub/deploy/templates/proxy/service.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/proxy/service.yaml
rename to applications/jupyterhub/deploy/templates/proxy/service.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl b/applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/_scheduling-helpers.tpl
rename to applications/jupyterhub/deploy/templates/scheduling/_scheduling-helpers.tpl
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/priorityclass.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/priorityclass.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/pdb.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-placeholder/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/priorityclass.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-placeholder/priorityclass.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-placeholder/statefulset.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-placeholder/statefulset.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/configmap.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/configmap.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/deployment.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/deployment.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/pdb.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/pdb.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml b/applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/scheduling/user-scheduler/rbac.yaml
rename to applications/jupyterhub/deploy/templates/scheduling/user-scheduler/rbac.yaml
diff --git a/applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml b/applications/jupyterhub/deploy/templates/singleuser/netpol.yaml
similarity index 100%
rename from applications/jupyterhub/deploy/templates/helm/singleuser/netpol.yaml
rename to applications/jupyterhub/deploy/templates/singleuser/netpol.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/_helpers.tpl b/applications/nfsserver/deploy/templates/_helpers.tpl
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/_helpers.tpl
rename to applications/nfsserver/deploy/templates/_helpers.tpl
diff --git a/applications/nfsserver/deploy/templates/helm/clusterrole.yaml b/applications/nfsserver/deploy/templates/clusterrole.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/clusterrole.yaml
rename to applications/nfsserver/deploy/templates/clusterrole.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml b/applications/nfsserver/deploy/templates/clusterrolebinding.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/clusterrolebinding.yaml
rename to applications/nfsserver/deploy/templates/clusterrolebinding.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/nfs-server.yaml b/applications/nfsserver/deploy/templates/nfs-server.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/nfs-server.yaml
rename to applications/nfsserver/deploy/templates/nfs-server.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml b/applications/nfsserver/deploy/templates/podsecuritypolicy.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/podsecuritypolicy.yaml
rename to applications/nfsserver/deploy/templates/podsecuritypolicy.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/role.yaml b/applications/nfsserver/deploy/templates/role.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/role.yaml
rename to applications/nfsserver/deploy/templates/role.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/rolebinding.yaml b/applications/nfsserver/deploy/templates/rolebinding.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/rolebinding.yaml
rename to applications/nfsserver/deploy/templates/rolebinding.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/serviceaccount.yaml b/applications/nfsserver/deploy/templates/serviceaccount.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/serviceaccount.yaml
rename to applications/nfsserver/deploy/templates/serviceaccount.yaml
diff --git a/applications/nfsserver/deploy/templates/helm/storageclass.yaml b/applications/nfsserver/deploy/templates/storageclass.yaml
similarity index 100%
rename from applications/nfsserver/deploy/templates/helm/storageclass.yaml
rename to applications/nfsserver/deploy/templates/storageclass.yaml
diff --git a/applications/sentry/deploy/templates/helm/redis.yaml b/applications/sentry/deploy/templates/redis.yaml
similarity index 100%
rename from applications/sentry/deploy/templates/helm/redis.yaml
rename to applications/sentry/deploy/templates/redis.yaml
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 43bd84014..cdf8ddddf 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -45,10 +45,13 @@ services:
- {{ .name }}={{ .value | quote }}
{{- end }}
{{- with $app_config.harness.dependencies.soft }}
- # links:
- # {{- range . }}
- # - {{ . }}
- # {{- end }}
+ links:
+ {{- range . }}
+ - {{ . }}
+ {{- with $app_config.harness.domain }}
+ :{{- . }}
+ {{- end }}
+ {{- end }}
{{- end }}
{{- with $app_config.harness.dependencies.hard }}
depends_on:
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 9abcd5652..2c2a2c35c 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -525,7 +525,7 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat
app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path))
if app_name in exclude or (include and not any(inc in app_name for inc in include)):
continue
- template_dir = app_path / 'deploy' / 'templates' / templates_path
+ template_dir = app_path / 'deploy' / f'templates-{templates_path}'
if template_dir.exists():
dest_dir = dest_helm_chart_path / 'templates' / app_name
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index 9bd43b8c3..64683197b 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -521,7 +521,7 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), i
app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
if app_name in exclude or (include and not any(inc in app_name for inc in include)):
continue
- template_dir = os.path.join(app_path, 'deploy', 'templates', HELM_PATH)
+ template_dir = os.path.join(app_path, 'deploy', 'templates')
if os.path.exists(template_dir):
dest_dir = os.path.join(
dest_helm_chart_path, 'templates', app_name)
From 1a570647182d3f89dd057cbe7a8be770fa39b52a Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 9 Feb 2024 10:35:41 -0600
Subject: [PATCH 08/94] CH-100 Add subdomain configuration
---
deployment-configuration/compose/templates/auto-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index cdf8ddddf..9ba6a3ef6 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -48,8 +48,8 @@ services:
links:
{{- range . }}
- {{ . }}
- {{- with $app_config.harness.domain }}
- :{{- . }}
+ {{- with $app_config.harness.subdomain }}
+ {{- ":" }}{{ . }}.{{ $.Values.domain }}
{{- end }}
{{- end }}
{{- end }}
From b97c19c827e3dbd4a51ff2df273b0e63e810be1a Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 9 Feb 2024 12:20:31 -0600
Subject: [PATCH 09/94] CH-100 Fix bad "links" generation
---
.../compose/templates/auto-compose.yaml | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 9ba6a3ef6..b15d32cd1 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -47,10 +47,7 @@ services:
{{- with $app_config.harness.dependencies.soft }}
links:
{{- range . }}
- - {{ . }}
- {{- with $app_config.harness.subdomain }}
- {{- ":" }}{{ . }}.{{ $.Values.domain }}
- {{- end }}
+ - {{ . }}:{{ . }}.{{ $.Values.domain }}
{{- end }}
{{- end }}
{{- with $app_config.harness.dependencies.hard }}
@@ -77,7 +74,7 @@ services:
{{- end }}
traefik:
- image: "traefik:v2.2"
+ image: "traefik:v2.10"
container_name: "traefik"
networks:
- ch
From 85dcfd93cb131bd29097479ee6bcdd08ee966dbc Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 12 Feb 2024 12:54:00 -0600
Subject: [PATCH 10/94] CH-100 Add support for aliases and service links
---
.../compose/templates/auto-compose.yaml | 15 +++++++++++----
1 file changed, 11 insertions(+), 4 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index b15d32cd1..512ba0db4 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -2,16 +2,22 @@ version: '3.7'
services:
{{- range $app_name, $app_config := .Values.apps }}
- {{- if has $app_name (list "argo" "nfsserver" "workflows" "events" ) }} {{- /* We deactivate generation for some services */}}
+ {{ $deployment := $app_config.harness.deployment }}
+ {{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }}
{{- continue }}
{{- end}}
- {{ $deployment := $app_config.harness.deployment }}
{{ $app_name }}:
{{- with $app_config.domain }}
domainname: {{ . }}
{{- end }}
networks:
- - ch
+ {{- if ne $app_config.harness.service.name $app_name}}
+ ch:
+ aliases:
+ - {{ $app_config.harness.service.name }}
+ {{- else }}
+ - ch
+ {{- end}}
{{- with $app_config.image }}
image: {{ . }}
{{- end }}
@@ -47,7 +53,8 @@ services:
{{- with $app_config.harness.dependencies.soft }}
links:
{{- range . }}
- - {{ . }}:{{ . }}.{{ $.Values.domain }}
+ {{- $service_name := (get $.Values.apps .).harness.service.name }}
+ - {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
{{- end }}
{{- end }}
{{- with $app_config.harness.dependencies.hard }}
From 1ac82264e80fa1d19730d85036878ed4c75c6b81 Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 14 Feb 2024 10:34:54 -0600
Subject: [PATCH 11/94] CH-100 Add first support for auto databases
---
.../compose/templates/auto-compose.yaml | 23 ++++++++++++++++++-
1 file changed, 22 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 512ba0db4..80f4845be 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -78,8 +78,29 @@ services:
{{- end }}
{{- end}}
{{- end }}
+ {{- with $app_config.harness.database }}
+ {{- if not .auto }}
+ {{- continue}}
+ {{- end }}
+ # Database for {{ $app_name }}, type {{ .type }} named {{ .name }}
+ {{ .name }}:
+ {{- $db_infos := (get . .type) }}
+ image: {{ $db_infos.image }}
+ expose:
+ {{- range $port := $db_infos.ports }}
+ - {{ $port.port | quote }}
+ {{- end }}
+ {{- with .resources }}
+ resources:
+ limits:
+ cpus: {{ .limits.cpu | default "1000m" }}
+ memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
+ reservations:
+ cpus: {{ .requests.cpu | default "100m" }}
+ memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
+ {{- end }}
+ {{- end}}
{{- end }}
-
traefik:
image: "traefik:v2.10"
container_name: "traefik"
From 56e22b7b883a57cc0eb9c672c5b00f43cc8172a0 Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 14 Feb 2024 10:41:32 -0600
Subject: [PATCH 12/94] CH-100 Add finer grain port handling
---
.../compose/templates/auto-compose.yaml | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 80f4845be..c432d522a 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -24,6 +24,10 @@ services:
{{- with $app_config.harness.service.port }}
ports:
- "{{ . }}:{{ $app_config.harness.deployment.port }}"
+ {{- end }}
+ {{- with $app_config.harness.deployment.port }}
+ expose:
+ - {{ . | quote }}
{{- end}}
deploy:
mode: "replicated"
@@ -85,6 +89,8 @@ services:
# Database for {{ $app_name }}, type {{ .type }} named {{ .name }}
{{ .name }}:
{{- $db_infos := (get . .type) }}
+ networks:
+ ch:
image: {{ $db_infos.image }}
expose:
{{- range $port := $db_infos.ports }}
From 336b5585f7fa059b282cede4fcedd6023bcf563a Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 15 Feb 2024 08:11:42 -0600
Subject: [PATCH 13/94] CH-100 Change way port is exposed to outside world
---
.../compose/templates/auto-compose.yaml | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index c432d522a..894814f66 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -97,13 +97,14 @@ services:
- {{ $port.port | quote }}
{{- end }}
{{- with .resources }}
- resources:
- limits:
- cpus: {{ .limits.cpu | default "1000m" }}
- memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
- reservations:
- cpus: {{ .requests.cpu | default "100m" }}
- memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
+ deploy:
+ resources:
+ limits:
+ cpus: {{ .limits.cpu | default "1000m" }}
+ memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
+ reservations:
+ cpus: {{ .requests.cpu | default "100m" }}
+ memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
{{- end }}
{{- end}}
{{- end }}
From a004ffb80583dee5be24181789e8ca13cc8f508a Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 15 Feb 2024 11:02:12 -0600
Subject: [PATCH 14/94] CH-100 Fix issue with env var quoting
---
.../compose/templates/auto-compose.yaml | 16 ++++++++++------
1 file changed, 10 insertions(+), 6 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 894814f66..138826e3b 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -21,10 +21,12 @@ services:
{{- with $app_config.image }}
image: {{ . }}
{{- end }}
+ {{- if eq $.Values.mainapp $app_name }}
{{- with $app_config.harness.service.port }}
ports:
- "{{ . }}:{{ $app_config.harness.deployment.port }}"
{{- end }}
+ {{- end }}
{{- with $app_config.harness.deployment.port }}
expose:
- {{ . | quote }}
@@ -40,19 +42,20 @@ services:
cpus: {{ $deployment.resources.requests.cpu | default "25m" }}
memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }}
{{- with $deployment.command }}
- entrypoint: {{ cat . $deployment.args }}
+ # entrypoint: {{ cat . $deployment.args }}
{{- end }}
environment:
- - CH_CURRENT_APP_NAME={{ $app_name | quote }}
+ - CH_CURRENT_APP_NAME={{ $app_name }}
+ - CH_VALUES_PATH=/opt/cloudharness/resources/allvalues.yaml
{{- range $.Values.env }}
- - {{ .name }}={{ .value | quote }}
+ - {{ .name }}={{ .value }}
{{- end }}
{{- /*{{- range $.Values.env }}
- - {{ .name }}={{ .value | quote }}
+ - {{ .name }}={{ .value }}
{{- end }} */}}
{{- range $app_config.harness.env }}
- - {{ .name }}={{ .value | quote }}
+ - {{ .name }}={{ .value }}
{{- end }}
{{- with $app_config.harness.dependencies.soft }}
links:
@@ -67,8 +70,9 @@ services:
- {{ . }}
{{- end }}
{{- end }}
- {{- if or $deployment.volume $app_config.harness.resources }}
volumes:
+ - ./compose/values.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
+ {{- if or $deployment.volume $app_config.harness.resources }}
{{- with $deployment.volume }}
- type: volume
source: {{ .name }}
From f73108c2fa976d7654c76798cde9f2697e2ac23c Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 15 Feb 2024 11:20:41 -0600
Subject: [PATCH 15/94] CH-100 Add special behavior to produce allvalues.yaml
---
.../compose/templates/auto-compose.yaml | 2 +-
.../ch_cli_tools/dockercompose.py | 19 +++++++++++++++++++
2 files changed, 20 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 138826e3b..3896ae138 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -71,7 +71,7 @@ services:
{{- end }}
{{- end }}
volumes:
- - ./compose/values.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
+ - ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
{{- if or $deployment.volume $app_config.harness.resources }}
{{- with $deployment.volume }}
- type: volume
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 2c2a2c35c..1c51abd66 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -11,6 +11,7 @@
import tarfile
from docker import from_env as DockerClient
from pathlib import Path
+import copy
from . import HERE, CH_ROOT
@@ -134,6 +135,12 @@ def process_values(self) -> HarnessMainConfig:
merge_to_yaml_file({'metadata': {'namespace': self.namespace},
'name': helm_values['name']}, self.helm_chart_path)
validate_helm_values(merged_values)
+
+ # All values save
+ all_values = self.__get_default_helm_values_with_secrets(merged_values)
+
+ merge_to_yaml_file(all_values, self.dest_deployment_path / 'allvalues.yaml')
+
return HarnessMainConfig.from_dict(merged_values)
def __process_applications(self, helm_values, base_image_name):
@@ -234,6 +241,18 @@ def __get_default_helm_values(self):
return helm_values
+ def __get_default_helm_values_with_secrets(self, helm_values):
+ helm_values = copy.deepcopy(helm_values)
+ # {{- $values_copy := deepCopy .Values }}
+ # {{- range $key, $val := .Values.apps }}
+ # {{- $new_secrets := dict "apps" (dict $key (dict "harness" (dict "secrets"))) }}
+ # {{- $tmp := mergeOverwrite $values_copy $new_secrets }}
+ # {{- end }}
+ # {{ $values_copy | toYaml | indent 4 }}
+ for key, val in helm_values['apps'].items():
+ helm_values['apps'][key]['harness']['secrets'] = {}
+ return helm_values
+
def create_tls_certificate(self, helm_values):
if not self.tls:
helm_values['tls'] = None
From 2370b20c646e2cb4c2c87221e746d23ab1ad5ec3 Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 15 Feb 2024 12:08:01 -0600
Subject: [PATCH 16/94] CH-100 Add actual docker-compose.yaml generation in
harness-deployment
---
.../compose/templates/auto-compose.yaml | 1 -
.../ch_cli_tools/dockercompose.py | 12 ++++++++++++
2 files changed, 12 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 3896ae138..4999b46f7 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -46,7 +46,6 @@ services:
{{- end }}
environment:
- CH_CURRENT_APP_NAME={{ $app_name }}
- - CH_VALUES_PATH=/opt/cloudharness/resources/allvalues.yaml
{{- range $.Values.env }}
- {{ .name }}={{ .value }}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 1c51abd66..dfe0bf5bc 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -141,8 +141,20 @@ def process_values(self) -> HarnessMainConfig:
merge_to_yaml_file(all_values, self.dest_deployment_path / 'allvalues.yaml')
+ self.generate_docker_compose_yaml()
+
return HarnessMainConfig.from_dict(merged_values)
+ def generate_docker_compose_yaml(self):
+ compose_templates = self.dest_deployment_path
+ dest_compose_yaml = self.dest_deployment_path.parent / "docker-compose.yaml"
+
+ logging.info(f'Generate docker compose configuration in: {dest_compose_yaml}, using templates from {compose_templates}')
+ command = f"helm template {compose_templates} > {dest_compose_yaml}"
+
+ subprocess.call(command, shell=True)
+
+
def __process_applications(self, helm_values, base_image_name):
for root_path in self.root_paths:
app_values = init_app_values(
From 5ed630d8dcb9a12dbd7238dad51ad6ed03e3bbcd Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 15 Feb 2024 12:40:31 -0600
Subject: [PATCH 17/94] CH-100 Add first dedicated templates for postgres
---
.../compose/templates/auto-compose.yaml | 7 ++++---
.../compose/templates/auto-database-postgres.yaml | 7 +++++++
2 files changed, 11 insertions(+), 3 deletions(-)
create mode 100644 deployment-configuration/compose/templates/auto-database-postgres.yaml
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 4999b46f7..22f8c5a25 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -86,9 +86,9 @@ services:
{{- end}}
{{- end }}
{{- with $app_config.harness.database }}
- {{- if not .auto }}
- {{- continue}}
- {{- end }}
+ {{- if not .auto }}
+ {{- continue}}
+ {{- end }}
# Database for {{ $app_name }}, type {{ .type }} named {{ .name }}
{{ .name }}:
{{- $db_infos := (get . .type) }}
@@ -109,6 +109,7 @@ services:
cpus: {{ .requests.cpu | default "100m" }}
memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
{{- end }}
+ {{- include "deploy_utils.database.postgres" . }}
{{- end}}
{{- end }}
traefik:
diff --git a/deployment-configuration/compose/templates/auto-database-postgres.yaml b/deployment-configuration/compose/templates/auto-database-postgres.yaml
new file mode 100644
index 000000000..d832193f8
--- /dev/null
+++ b/deployment-configuration/compose/templates/auto-database-postgres.yaml
@@ -0,0 +1,7 @@
+{{- define "deploy_utils.database.postgres" }}
+ environment:
+ - POSTGRES_DB={{ .postgres.initialdb | quote }}
+ - POSTGRES_USER={{ .user | quote }}
+ - POSTGRES_PASSWORD={{ .pass | quote }}
+ - PGDATA=/data/db/pgdata
+{{- end }}
\ No newline at end of file
From 111a4f3e5be5d7655faa783009106a0dafebfd6f Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 15 Feb 2024 13:09:12 -0600
Subject: [PATCH 18/94] CH-100 Add volumes for db
---
.../compose/templates/auto-compose.yaml | 17 ++++++++++++++++-
1 file changed, 16 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 22f8c5a25..f14c7e116 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -109,7 +109,16 @@ services:
cpus: {{ .requests.cpu | default "100m" }}
memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
{{- end }}
- {{- include "deploy_utils.database.postgres" . }}
+ volumes:
+ - type: volume
+ source: {{ .name }}
+ target: /data/db
+ {{- if eq .type "postgres" }}
+ - type: volume
+ source: dshm
+ target: /dev/shm
+ {{- include "deploy_utils.database.postgres" . }}
+ {{- end }}
{{- end}}
{{- end }}
traefik:
@@ -142,4 +151,10 @@ volumes: # this inclusion needs to be conditional
{{- with $app_config.harness.deployment.volume }}
{{ .name }}:
{{- end }}
+ {{- with $app_config.harness.database }}
+ {{ .name }}:
+ {{- if eq .type "postgres" }}
+ dshm:
+ {{- end }}
+ {{- end }}
{{- end }}
\ No newline at end of file
From ce4596ab1b62110c56c0bb1ef339cf9cc7cbdc91 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 16 Feb 2024 06:46:34 -0600
Subject: [PATCH 19/94] CH-100 Add first template for events
---
.../templates-compose/events-deployment.yaml | 13 +++++
.../compose/templates/auto-compose.yaml | 47 +++++++++++--------
2 files changed, 40 insertions(+), 20 deletions(-)
create mode 100644 applications/events/deploy/templates-compose/events-deployment.yaml
diff --git a/applications/events/deploy/templates-compose/events-deployment.yaml b/applications/events/deploy/templates-compose/events-deployment.yaml
new file mode 100644
index 000000000..f16e7e388
--- /dev/null
+++ b/applications/events/deploy/templates-compose/events-deployment.yaml
@@ -0,0 +1,13 @@
+{{- define "events.deployment" }}
+{{- $nfs := .apps.nfsserver}}
+
+{{ $nfs.name }}:
+ image: {{ $nfs.harness.deployment.image }}
+ environment:
+ # NFS useDNS? {{ $nfs.nfs.useDNS }}
+ {{- if $nfs.nfs.useDNS }}
+ - NFS_SERVER={{ printf "nfs-server.%s.svc.cluster.local" .namespace }}
+ {{- end }}
+ - NFS_PATH={{ $nfs.nfs.path }}
+ - PROVISIONER_NAME={{ printf "%s-nfs-provisioner" .namespace }}
+{{- end }}
\ No newline at end of file
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index f14c7e116..b51aa02e4 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -1,8 +1,32 @@
version: '3.7'
services:
+ traefik:
+ image: "traefik:v2.10"
+ container_name: "traefik"
+ networks:
+ - ch
+ command:
+ - "--log.level=INFO"
+ - "--api.insecure=true"
+ - "--providers.docker=true"
+ - "--providers.docker.exposedbydefault=false"
+ - "--entrypoints.web.address=:80"
+ - "--entrypoints.websecure.address=:443"
+ - "--providers.file.directory=/etc/traefik/dynamic_conf"
+ ports:
+ - "80:80"
+ - "443:443"
+ volumes:
+ - "/var/run/docker.sock:/var/run/docker.sock:ro"
+ - "./certs/:/certs/:ro"
+ - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
+
{{- range $app_name, $app_config := .Values.apps }}
{{ $deployment := $app_config.harness.deployment }}
+ {{- if eq $app_name "nfsserver" }}
+ {{- include "events.deployment" $.Values}}
+ {{- end }}
{{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }}
{{- continue }}
{{- end}}
@@ -120,28 +144,11 @@ services:
{{- include "deploy_utils.database.postgres" . }}
{{- end }}
{{- end}}
+
{{- end }}
- traefik:
- image: "traefik:v2.10"
- container_name: "traefik"
- networks:
- - ch
- command:
- - "--log.level=INFO"
- - "--api.insecure=true"
- - "--providers.docker=true"
- - "--providers.docker.exposedbydefault=false"
- - "--entrypoints.web.address=:80"
- - "--entrypoints.websecure.address=:443"
- - "--providers.file.directory=/etc/traefik/dynamic_conf"
- ports:
- - "80:80"
- - "443:443"
- volumes:
- - "/var/run/docker.sock:/var/run/docker.sock:ro"
- - "./certs/:/certs/:ro"
- - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
+
+# Network definition
networks:
ch:
name: ch_network
From 3868b56b476c97d0b3f2d84ea44c96f12339341a Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 16 Feb 2024 07:04:48 -0600
Subject: [PATCH 20/94] CH-100 Refactor templates for the db
---
.../compose/templates/auto-compose.yaml | 35 ++-----------------
.../templates/auto-database-postgres.yaml | 10 +++---
.../compose/templates/auto-database.yaml | 32 +++++++++++++++++
3 files changed, 39 insertions(+), 38 deletions(-)
create mode 100644 deployment-configuration/compose/templates/auto-database.yaml
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index b51aa02e4..0ac27f326 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -25,7 +25,7 @@ services:
{{- range $app_name, $app_config := .Values.apps }}
{{ $deployment := $app_config.harness.deployment }}
{{- if eq $app_name "nfsserver" }}
- {{- include "events.deployment" $.Values}}
+ {{- include "events.deployment" $.Values | indent 2 }}
{{- end }}
{{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }}
{{- continue }}
@@ -113,41 +113,10 @@ services:
{{- if not .auto }}
{{- continue}}
{{- end }}
- # Database for {{ $app_name }}, type {{ .type }} named {{ .name }}
- {{ .name }}:
- {{- $db_infos := (get . .type) }}
- networks:
- ch:
- image: {{ $db_infos.image }}
- expose:
- {{- range $port := $db_infos.ports }}
- - {{ $port.port | quote }}
- {{- end }}
- {{- with .resources }}
- deploy:
- resources:
- limits:
- cpus: {{ .limits.cpu | default "1000m" }}
- memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
- reservations:
- cpus: {{ .requests.cpu | default "100m" }}
- memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
- {{- end }}
- volumes:
- - type: volume
- source: {{ .name }}
- target: /data/db
- {{- if eq .type "postgres" }}
- - type: volume
- source: dshm
- target: /dev/shm
- {{- include "deploy_utils.database.postgres" . }}
- {{- end }}
+ {{- include "db.deployment" . | indent 2}}
{{- end}}
-
{{- end }}
-
# Network definition
networks:
ch:
diff --git a/deployment-configuration/compose/templates/auto-database-postgres.yaml b/deployment-configuration/compose/templates/auto-database-postgres.yaml
index d832193f8..b6db420c6 100644
--- a/deployment-configuration/compose/templates/auto-database-postgres.yaml
+++ b/deployment-configuration/compose/templates/auto-database-postgres.yaml
@@ -1,7 +1,7 @@
{{- define "deploy_utils.database.postgres" }}
- environment:
- - POSTGRES_DB={{ .postgres.initialdb | quote }}
- - POSTGRES_USER={{ .user | quote }}
- - POSTGRES_PASSWORD={{ .pass | quote }}
- - PGDATA=/data/db/pgdata
+ environment:
+ - POSTGRES_DB={{ .postgres.initialdb }}
+ - POSTGRES_USER={{ .user }}
+ - POSTGRES_PASSWORD={{ .pass }}
+ - PGDATA=/data/db/pgdata
{{- end }}
\ No newline at end of file
diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml
new file mode 100644
index 000000000..70bda63a5
--- /dev/null
+++ b/deployment-configuration/compose/templates/auto-database.yaml
@@ -0,0 +1,32 @@
+{{- define "db.deployment" }}
+# Database type {{ .type }} named {{ .name }}
+{{ .name }}:
+ {{- $db_infos := (get . .type) }}
+ networks:
+ ch:
+ image: {{ $db_infos.image }}
+ expose:
+ {{- range $port := $db_infos.ports }}
+ - {{ $port.port | quote }}
+ {{- end }}
+ {{- with .resources }}
+ deploy:
+ resources:
+ limits:
+ cpus: {{ .limits.cpu | default "1000m" }}
+ memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
+ reservations:
+ cpus: {{ .requests.cpu | default "100m" }}
+ memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
+ {{- end }}
+ volumes:
+ - type: volume
+ source: {{ .name }}
+ target: /data/db
+ {{- if eq .type "postgres" }}
+ - type: volume
+ source: dshm
+ target: /dev/shm
+ {{- include "deploy_utils.database.postgres" . }}
+ {{- end }}
+{{- end }}
\ No newline at end of file
From a0476d6cd29401a486e96d8654490e784bd0bcac Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 16 Feb 2024 07:13:09 -0600
Subject: [PATCH 21/94] CH-100 Add /etc/hosts generation
---
.../compose/templates/auto-compose.yaml | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 0ac27f326..04db3327e 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -104,11 +104,22 @@ services:
{{- with $app_config.harness.resources }}
{{- range .}}
- type: bind
- source: compose/resources/{{ $app_name }}/{{.src }}
+ source: compose/resources/{{ $app_name }}/{{ .src }}
target: {{ .dst }}
{{- end }}
{{- end}}
{{- end }}
+ {{- if $.Values.local }}
+ {{- $domain := $.Values.domain }}
+ {{- $ip := $.Values.localIp }}
+ extra_hosts:
+ - "{{ $.Values.domain }}:{{ $ip }}"
+ {{- range $app := $.Values.apps }}
+ {{- with $app.harness.subdomain}}
+ - "{{ . }}.{{ $domain }}:{{ $ip }}"
+ {{- end }}
+ {{- end }}
+ {{- end }}
{{- with $app_config.harness.database }}
{{- if not .auto }}
{{- continue}}
From bd3e423a08b34ece388d2cf175b9b6b270d4747a Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 16 Feb 2024 07:27:20 -0600
Subject: [PATCH 22/94] CH-100 Add specific varenvs by container
---
.../compose/templates/auto-compose.yaml | 11 ++++++++---
1 file changed, 8 insertions(+), 3 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 04db3327e..e05934b93 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -74,9 +74,13 @@ services:
{{- range $.Values.env }}
- {{ .name }}={{ .value }}
{{- end }}
- {{- /*{{- range $.Values.env }}
- - {{ .name }}={{ .value }}
- {{- end }} */}}
+ {{- with $.Values.apps.accounts }}
+ - CH_ACCOUNTS_CLIENT_SECRET={{ .client.secret }}
+ - CH_ACCOUNTS_REALM={{ $.Values.namespace }}
+ - CH_ACCOUNTS_AUTH_DOMAIN={{ printf "%s.%s" .harness.subdomain $.Values.domain }}
+ - CH_ACCOUNTS_CLIENT_ID={{ .client.id }}
+ - DOMAIN={{ $.Values.domain }}
+ {{- end}}
{{- range $app_config.harness.env }}
- {{ .name }}={{ .value }}
{{- end }}
@@ -110,6 +114,7 @@ services:
{{- end}}
{{- end }}
{{- if $.Values.local }}
+ # Extra /etc/hosts list
{{- $domain := $.Values.domain }}
{{- $ip := $.Values.localIp }}
extra_hosts:
From 8851c48ef3777aef08144bcda1e309680cc878ca Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 20 Feb 2024 11:06:17 -0600
Subject: [PATCH 23/94] CH-100 Rename some templates
---
.../nfsserver-deployment.yaml} | 5 +-
.../compose/templates/auto-compose.yaml | 24 +++++-----
.../ch_cli_tools/dockercompose.py | 46 ++++++++++++++++++-
3 files changed, 60 insertions(+), 15 deletions(-)
rename applications/{events/deploy/templates-compose/events-deployment.yaml => nfsserver/deploy/templates-compose/nfsserver-deployment.yaml} (74%)
diff --git a/applications/events/deploy/templates-compose/events-deployment.yaml b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
similarity index 74%
rename from applications/events/deploy/templates-compose/events-deployment.yaml
rename to applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
index f16e7e388..2dd8509b5 100644
--- a/applications/events/deploy/templates-compose/events-deployment.yaml
+++ b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
@@ -1,7 +1,10 @@
-{{- define "events.deployment" }}
+{{- define "nfsserver.deployment" }}
{{- $nfs := .apps.nfsserver}}
{{ $nfs.name }}:
+ build:
+ context: {{ $nfs.build.context }}
+ dockerfile: {{ $nfs.build.dockerfile }}
image: {{ $nfs.harness.deployment.image }}
environment:
# NFS useDNS? {{ $nfs.nfs.useDNS }}
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index e05934b93..f174d9431 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -25,7 +25,7 @@ services:
{{- range $app_name, $app_config := .Values.apps }}
{{ $deployment := $app_config.harness.deployment }}
{{- if eq $app_name "nfsserver" }}
- {{- include "events.deployment" $.Values | indent 2 }}
+ {{- include "nfsserver.deployment" $.Values | indent 2 }}
{{- end }}
{{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }}
{{- continue }}
@@ -114,16 +114,16 @@ services:
{{- end}}
{{- end }}
{{- if $.Values.local }}
- # Extra /etc/hosts list
- {{- $domain := $.Values.domain }}
- {{- $ip := $.Values.localIp }}
- extra_hosts:
- - "{{ $.Values.domain }}:{{ $ip }}"
- {{- range $app := $.Values.apps }}
- {{- with $app.harness.subdomain}}
- - "{{ . }}.{{ $domain }}:{{ $ip }}"
- {{- end }}
- {{- end }}
+ # Extra /etc/hosts list
+ {{- $domain := $.Values.domain }}
+ {{- $ip := $.Values.localIp }}
+ extra_hosts:
+ - "{{ $.Values.domain }}={{ $ip }}"
+ {{- range $app := $.Values.apps }}
+ {{- with $app.harness.subdomain}}
+ - "{{ . }}.{{ $domain }}={{ $ip }}"
+ {{- end }}
+ {{- end }}
{{- end }}
{{- with $app_config.harness.database }}
{{- if not .auto }}
@@ -149,4 +149,4 @@ volumes: # this inclusion needs to be conditional
dshm:
{{- end }}
{{- end }}
-{{- end }}
\ No newline at end of file
+{{- end }}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index dfe0bf5bc..6563eac01 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -29,6 +29,7 @@
KEY_DEPLOYMENT = 'deployment'
KEY_APPS = 'apps'
KEY_TASK_IMAGES = 'task-images'
+# KEY_TASK_IMAGES_BUILD = f"{KEY_TASK_IMAGES}-build"
KEY_TEST_IMAGES = 'test-images'
DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
@@ -180,6 +181,30 @@ def collect_app_values(self, app_base_path, base_image_name=None):
app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
+ # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None)
+ # # for dockerfile_path in app_path.rglob('**/Dockerfile'):
+ # # parent_name = dockerfile_path.parent.name.replace("-", "_")
+ # # if parent_name == app_key:
+ # # app_values['build'] = {
+ # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
+ # # 'dockerfile': "Dockerfile",
+ # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # }
+ # # elif "tasks/" in f"{dockerfile_path}":
+ # # parent_name = parent_name.upper()
+ # # values.setdefault("task-images-build", {})[parent_name] = {
+ # # 'dockerfile': "Dockerfile",
+ # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # }
+ # # import ipdb; ipdb.set_trace() # fmt: skip
+
+ # if dockerfile_path:
+ # app_values['build'] = {
+ # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
+ # 'dockerfile': "Dockerfile",
+ # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # }
+
values[app_key] = dict_merge(
values[app_key], app_values) if app_key in values else app_values
@@ -201,10 +226,15 @@ def __assign_static_build_dependencies(self, helm_values):
for dep in dependencies:
if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
+ # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = {
+ # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent),
+ # 'dockerfile': 'Dockerfile',
+ # }
for image_name in helm_values[KEY_TASK_IMAGES].keys():
if image_name in self.exclude:
del helm_values[KEY_TASK_IMAGES][image_name]
+ # del helm_values[KEY_TASK_IMAGES_BUILD][image_name]
def __init_base_images(self, base_image_name):
@@ -501,8 +531,20 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
task_path, app_path.parent))
img_name = image_name_from_dockerfile_path(task_name, base_image_name)
- values[KEY_TASK_IMAGES][task_name] = self.image_tag(
- img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys())
+ # import ipdb; ipdb.set_trace() # fmt: skip
+
+ # values[KEY_TASK_IMAGES][task_name] = self.image_tag(
+ # img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys())
+ # values.setdefault(KEY_TASK_IMAGES_BUILD, {})[task_name] = {
+ # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent),
+ # 'dockerfile': 'Dockerfile',
+ # }
+
+ values[KEY_TASK_IMAGES][task_name] = {
+ 'name': self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()),
+ # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent),
+ # 'dockerfile': 'Dockerfile',
+ }
return values
From ced7cd2b189c2e40ab3f77e95fb3437eb5fa909a Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 20 Feb 2024 11:28:00 -0600
Subject: [PATCH 24/94] CH-100 Add first traeffik configuration
---
.../templates-compose/nfsserver-deployment.yaml | 3 ---
.../compose/templates/auto-compose.yaml | 9 +++++++++
.../ch_cli_tools/dockercompose.py | 12 +++++++-----
3 files changed, 16 insertions(+), 8 deletions(-)
diff --git a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
index 2dd8509b5..7e9b68194 100644
--- a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
+++ b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
@@ -2,9 +2,6 @@
{{- $nfs := .apps.nfsserver}}
{{ $nfs.name }}:
- build:
- context: {{ $nfs.build.context }}
- dockerfile: {{ $nfs.build.dockerfile }}
image: {{ $nfs.harness.deployment.image }}
environment:
# NFS useDNS? {{ $nfs.nfs.useDNS }}
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index f174d9431..11996c912 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -125,6 +125,15 @@ services:
{{- end }}
{{- end }}
{{- end }}
+ labels:
+ - "traefik.enable=true"
+ {{- with $app_config.harness.service.port }}
+ - "traefik.http.services.{{ $app_name }}.loadbalancer.server.port={{ . }}"
+ {{- end }}
+ # - "traefik.http.middlewares.redirect-middleware.redirectscheme.scheme=https"
+ # - "traefik.http.routers.{{ .app_name }}.middlewares=redirect-middleware"
+ - "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)"
+ - "traefik.http.routers.{{ $app_name }}.entrypoints=web"
{{- with $app_config.harness.database }}
{{- if not .auto }}
{{- continue}}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 6563eac01..83e04024c 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -540,11 +540,13 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
# 'dockerfile': 'Dockerfile',
# }
- values[KEY_TASK_IMAGES][task_name] = {
- 'name': self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()),
- # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent),
- # 'dockerfile': 'Dockerfile',
- }
+ # values[KEY_TASK_IMAGES][task_name] = {
+ # 'name': self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()),
+ # # 'context': os.path.relpath(task_path, self.dest_deployment_path.parent),
+ # # 'dockerfile': 'Dockerfile',
+ # }
+
+ values[KEY_TASK_IMAGES][task_name] = self.image_tag(img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys())
return values
From f7ebeb03606302e9ffc56bf010ad49ad36cf428b Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 20 Feb 2024 11:40:27 -0600
Subject: [PATCH 25/94] CH-100 Fix bad indentation in docker-compose.yaml
---
deployment-configuration/compose/templates/auto-compose.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 11996c912..b175f2b59 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -125,7 +125,7 @@ services:
{{- end }}
{{- end }}
{{- end }}
- labels:
+ labels:
- "traefik.enable=true"
{{- with $app_config.harness.service.port }}
- "traefik.http.services.{{ $app_name }}.loadbalancer.server.port={{ . }}"
From 8fdcba6e840340b4a347eb6ec00c61c0547ee645 Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 20 Feb 2024 11:41:41 -0600
Subject: [PATCH 26/94] CH-100 Remove exposition of ports
---
.../compose/templates/auto-compose.yaml | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index b175f2b59..f25f558e9 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -46,10 +46,10 @@ services:
image: {{ . }}
{{- end }}
{{- if eq $.Values.mainapp $app_name }}
- {{- with $app_config.harness.service.port }}
- ports:
- - "{{ . }}:{{ $app_config.harness.deployment.port }}"
- {{- end }}
+ # {{- with $app_config.harness.service.port }}
+ # ports:
+ # - "{{ . }}:{{ $app_config.harness.deployment.port }}"
+ # {{- end }}
{{- end }}
{{- with $app_config.harness.deployment.port }}
expose:
From 428a6191b0ec0fd5a09a59082b2d376785967328 Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 21 Feb 2024 06:48:39 -0600
Subject: [PATCH 27/94] CH-100 Add post-process mechanism to generate files
---
.../ch_cli_tools/dockercompose.py | 26 +++++++++++++++++++
1 file changed, 26 insertions(+)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 83e04024c..c088dc44f 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -155,6 +155,32 @@ def generate_docker_compose_yaml(self):
subprocess.call(command, shell=True)
+ self.__post_process_multiple_document_docker_compose(dest_compose_yaml)
+
+ def __post_process_multiple_document_docker_compose(self, yaml_document):
+ if not yaml_document.exists():
+ logging.warning("Something went wrong during the docker-compose.yaml generation, cannot post-process it")
+ return
+
+ with open(yaml_document, "r") as f:
+ documents = yaml.safe_load_all(f)
+
+ for document in documents:
+ if "cloudharness-metadata" in document:
+ document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"]
+ logging.info("Post-process docker-compose.yaml, creating %s", document_path)
+ document_path.write_text(document["data"])
+ else:
+ with open(yaml_document, "w") as f:
+ yaml.dump(document, f)
+
+ # cloudharness-metadata:
+ # path: secrets.yaml
+
+ # data: |
+ # sdfmsldkf
+ # sdfmlskdfmslkdfs
+ # sdmlksdf
def __process_applications(self, helm_values, base_image_name):
for root_path in self.root_paths:
From 9d6c8b2f015c5269cb44dd27c9ae96038a39b46e Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 21 Feb 2024 08:12:00 -0600
Subject: [PATCH 28/94] CH-100 Add new templates
---
.../deploy/templates-compose/deployments.yaml | 97 +++++++++++++++++++
.../compose/templates/allvalues-template.yaml | 20 ++++
2 files changed, 117 insertions(+)
create mode 100644 applications/events/deploy/templates-compose/deployments.yaml
create mode 100644 deployment-configuration/compose/templates/allvalues-template.yaml
diff --git a/applications/events/deploy/templates-compose/deployments.yaml b/applications/events/deploy/templates-compose/deployments.yaml
new file mode 100644
index 000000000..0001fde48
--- /dev/null
+++ b/applications/events/deploy/templates-compose/deployments.yaml
@@ -0,0 +1,97 @@
+{{- define "events.deployment" }}
+events:
+ networks:
+ - ch
+ image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d
+ ports:
+ - "9094:9092"
+ expose:
+ - 5555
+ - 9094
+ - 9092
+ environment:
+ - CLASSPATH=/opt/kafka/libs/extensions/*
+ - KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/etc/kafka/log4j.properties
+ - JMX_PORT=5555
+ command:
+ - "./bin/kafka-server-start.sh"
+ - "/etc/kafka/server.properties"
+ - "--override"
+ - "default.replication.factor=1"
+ - "--override"
+ - "min.insync.replicas=1"
+ - "--override"
+ - "offsets.topic.replication.factor=1"
+ - "--override"
+ - "offsets.topic.num.partitions=1"
+ depends_on:
+ events-kafka-init:
+ condition: service_completed_successfully
+
+events-kafka-init:
+ networks:
+ - ch
+ image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef
+ command:
+ - "/bin/bash"
+ - "/etc/kafka-configmap/init.sh"
+ environment:
+
+pzoo:
+ networks:
+ - ch
+ expose:
+ - 2181
+ - 2888
+ - 3888
+ image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d
+ command:
+ - "./bin/zookeeper-server-start.sh"
+ - "/etc/kafka/zookeeper.properties"
+ environment:
+ - KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/etc/kafka/log4j.properties
+ depends_on:
+ events-pzoo-init:
+ condition: service_completed_successfully
+
+events-pzoo-init:
+ networks:
+ - ch
+ expose:
+ image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef
+ command:
+ - "/bin/bash"
+ - "/etc/kafka-configmap/init.sh"
+ environment:
+ - PZOO_REPLICAS=1
+ - ZOO_REPLICAS=0
+
+zoo:
+ networks:
+ - ch
+ expose:
+ - 2181
+ - 2888
+ - 3888
+ image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d
+ command:
+ - "./bin/zookeeper-server-start.sh"
+ - "/etc/kafka/zookeeper.properties"
+ environment:
+ - KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:/etc/kafka/log4j.properties
+ depends_on:
+ events-zoo-init:
+ condition: service_completed_successfully
+
+events-zoo-init:
+ networks:
+ - ch
+ image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef
+ command:
+ - "/bin/bash"
+ - "/etc/kafka-configmap/init.sh"
+ environment:
+ - PZOO_REPLICAS=1
+ - ZOO_REPLICAS=0
+ - ID_OFFSET=2
+{{- end }}
\ No newline at end of file
diff --git a/deployment-configuration/compose/templates/allvalues-template.yaml b/deployment-configuration/compose/templates/allvalues-template.yaml
new file mode 100644
index 000000000..d69538aa0
--- /dev/null
+++ b/deployment-configuration/compose/templates/allvalues-template.yaml
@@ -0,0 +1,20 @@
+{{- /*
+to replace the secrets values we create a dict with the structure:
+ app:
+ :
+ harness:
+ secrets:
+
+thus with an empty secrets node
+and then it's mergeOverwrite the copy of the .Values we created
+resulting in a copy of the .Values with all secrets being ""
+*/ -}}
+cloudharness-metadata:
+ path: allvalues2.yaml
+data: |
+{{- $values_copy := deepCopy .Values }}
+{{- range $key, $val := .Values.apps }}
+ {{- $new_secrets := dict "apps" (dict $key (dict "harness" (dict "secrets"))) }}
+ {{- $tmp := mergeOverwrite $values_copy $new_secrets }}
+{{- end }}
+{{ $values_copy | toYaml | indent 4 }}
From ae7cf1918e0d2d370eabdf2ceb5b7c1db3de43cf Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 21 Feb 2024 10:33:20 -0600
Subject: [PATCH 29/94] CH-100 Add new templates (not modified yet)
---
.../compose/templates/auto-gatekeepers.yaml | 174 ++++++++++++++++++
.../compose/templates/auto-secrets.yaml | 50 +++++
.../ch_cli_tools/dockercompose.py | 19 +-
3 files changed, 233 insertions(+), 10 deletions(-)
create mode 100644 deployment-configuration/compose/templates/auto-gatekeepers.yaml
create mode 100644 deployment-configuration/compose/templates/auto-secrets.yaml
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
new file mode 100644
index 000000000..898995cd6
--- /dev/null
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -0,0 +1,174 @@
+{{/* Secured Services/Deployments */}}
+{{- define "deploy_utils.securedservice" }}
+{{- $tls := not (not .root.Values.tls) }}
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: "{{ .app.harness.service.name }}-gk"
+ labels:
+ app: "{{ .app.harness.service.name }}-gk"
+data:
+ proxy.yml: |-
+ verbose: {{ .root.Values.debug }}
+ discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }}
+ client-id: {{ .root.Values.apps.accounts.webclient.id | quote }}
+ client-secret: {{ .root.Values.apps.accounts.webclient.secret }}
+ secure-cookie: {{ $tls }}
+ forbidden-page: /templates/access-denied.html.tmpl
+ enable-default-deny: {{ eq (.app.harness.secured | toString) "true" }}
+ listen: 0.0.0.0:8080
+ enable-refresh-tokens: true
+ server-write-timeout: {{ .app.harness.proxy.timeout.send | default .root.Values.proxy.timeout.send | default 180 }}s
+ upstream-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s
+ upstream-response-header-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s
+ upstream-expect-continue-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s
+ server-read-timeout: {{ .app.harness.proxy.timeout.read | default .root.Values.proxy.timeout.read | default 180 }}s
+ upstream-keepalive-timeout: {{ .app.harness.proxy.timeout.keepalive | default .root.Values.proxy.timeout.keepalive | default 180 }}s
+ http-only-cookie: false
+ tls-cert:
+ tls-private-key:
+ redirection-url: {{ ternary "https" "http" $tls }}://{{ .app.harness.subdomain }}.{{ .root.Values.domain }}
+ encryption-key: AgXa7xRcoClDEU0ZDSH4X0XhL5Qy2Z2j
+ upstream-url: http://{{ .app.harness.service.name }}.{{ .app.namespace | default .root.Release.Namespace }}:{{ .app.harness.service.port | default 80}}
+ {{ if .app.harness.secured }}
+ {{ with .app.harness.uri_role_mapping }}
+ resources:
+ {{. | toYaml | nindent 4 }}
+ {{- end }}
+ {{- end }}
+ {{ if or .root.Values.local (not $tls) }}
+ skip-openid-provider-tls-verify: true
+ skip-upstream-tls-verify: true
+ {{- end }}
+ cacert.crt: {{ .files.Get "resources/certs/cacert.crt" | quote }}
+ access-denied.html.tmpl: |-
+
+
+
+
+ 403 - Access Forbidden
+
+
+
+
+
+
+
+
+
+
+
Oops!
+
403 Permission Denied
+
+ Sorry, you do not have access to this page, please contact your administrator.
+ If you have been assigned new authorizations, try to refresh the page or to
login again .
+
+
+
+
+
+
+
+
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: "{{ .app.harness.service.name }}-gk"
+ labels:
+ app: "{{ .app.harness.service.name }}-gk"
+spec:
+ ports:
+ - name: http
+ port: 8080
+ selector:
+ app: "{{ .app.harness.service.name }}-gk"
+ type: ClusterIP
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: "{{ .app.harness.service.name }}-gk"
+ labels:
+ app: "{{ .app.harness.service.name }}-gk"
+
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ app: "{{ .app.harness.service.name }}-gk"
+ template:
+ metadata:
+ annotations:
+ checksum/config: {{ .app.harness.uri_role_mapping | toString | sha256sum }}
+ labels:
+ app: "{{ .app.harness.service.name }}-gk"
+ spec:
+{{ include "deploy_utils.etcHosts" .root | indent 6 }}
+ containers:
+ - name: {{ .app.harness.service.name | quote }}
+ image: "quay.io/gogatekeeper/gatekeeper:1.3.8"
+ imagePullPolicy: IfNotPresent
+ {{ if .root.Values.local }}
+ securityContext:
+ allowPrivilegeEscalation: false
+ runAsUser: 0
+ {{- end }}
+ env:
+ - name: PROXY_CONFIG_FILE
+ value: /opt/proxy.yml
+ volumeMounts:
+ - name: "{{ .app.harness.service.name }}-gk-proxy-config"
+ mountPath: /opt/proxy.yml
+ subPath: proxy.yml
+ - name: "{{ .app.harness.service.name }}-gk-proxy-config"
+ mountPath: /etc/pki/ca-trust/source/anchors/cacert.crt
+ subPath: cacert.crt
+ - name: "{{ .app.harness.service.name }}-gk-proxy-config"
+ mountPath: /templates/access-denied.html.tmpl
+ subPath: access-denied.html.tmpl
+ ports:
+ - name: http
+ containerPort: 8080
+ - name: https
+ containerPort: 8443
+ resources:
+ requests:
+ memory: "32Mi"
+ cpu: "50m"
+ limits:
+ memory: "64Mi"
+ cpu: "100m"
+ volumes:
+ - name: "{{ .app.harness.service.name }}-gk-proxy-config"
+ configMap:
+ name: "{{ .app.harness.service.name }}-gk"
+---
+{{- end }}
+{{- if .Values.secured_gatekeepers }}
+{{ $files := .Files }}
+{{- range $app := .Values.apps }}
+ {{- if and (hasKey $app "port") ($app.harness.secured) }}
+---
+ {{ include "deploy_utils.securedservice" (dict "root" $ "app" $app "files" $files) }}
+ {{- end }}
+ {{- range $subapp := $app }}
+ {{- if contains "map" (typeOf $subapp) }}
+ {{- if and (hasKey $subapp "harness.port") (hasKey $subapp "harness.secured") }}
+ {{- if $subapp.harness.secured }}
+---
+ {{ include "deploy_utils.securedservice" (dict "root" $ "app" $subapp "files" $files) }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/deployment-configuration/compose/templates/auto-secrets.yaml b/deployment-configuration/compose/templates/auto-secrets.yaml
new file mode 100644
index 000000000..a0a37a2f8
--- /dev/null
+++ b/deployment-configuration/compose/templates/auto-secrets.yaml
@@ -0,0 +1,50 @@
+{{- define "deploy_utils.secret" }}
+{{- if .app.harness.secrets }}
+{{- $secret_name := printf "%s" .app.harness.deployment.name }}
+apiVersion: v1
+kind: Secret
+metadata:
+ name: {{ $secret_name }}
+ namespace: {{ .root.Values.namespace }}
+ labels:
+ app: {{ .app.harness.deployment.name }}
+type: Opaque
+ {{- $secret := (lookup "v1" "Secret" .root.Values.namespace $secret_name) }}
+ {{- if $secret }}
+# secret already exists
+ {{- if not (compact (values .app.harness.secrets)) }}
+# secret values are null, copy from the existing secret
+data:
+ {{- range $k, $v := $secret.data }}
+ {{ $k }}: {{ $v }}
+ {{- end }}
+ {{- else }}
+# there are non default values in values.yaml, use these
+stringData:
+ {{- range $k, $v := .app.harness.secrets }}
+ {{ $k }}: {{ $v | default (randAlphaNum 20) }}
+ {{- end }}
+ {{- end }}
+ {{- else }}
+# secret doesn't exist
+stringData:
+ {{- range $k, $v := .app.harness.secrets }}
+ {{ $k }}: {{ $v | default (randAlphaNum 20) }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+---
+{{- end }}
+---
+{{- range $app := .Values.apps }}
+---
+ {{- include "deploy_utils.secret" (dict "root" $ "app" $app) }}
+ {{- range $subapp := $app }}
+ {{- if contains "map" (typeOf $subapp) }}
+ {{- if hasKey $subapp "harness" }}
+---
+ {{- include "deploy_utils.secret" (dict "root" $ "app" $subapp) }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
\ No newline at end of file
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index c088dc44f..d5d51a6fc 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -166,21 +166,20 @@ def __post_process_multiple_document_docker_compose(self, yaml_document):
documents = yaml.safe_load_all(f)
for document in documents:
+ if not document:
+ continue
if "cloudharness-metadata" in document:
document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"]
logging.info("Post-process docker-compose.yaml, creating %s", document_path)
- document_path.write_text(document["data"])
+ data = document["data"]
+ # if document_path.suffix == ".yaml":
+ # with open(document_path, "w") as f:
+ # yaml.dump(yaml.safe_load(data), f, default_flow_style=True)
+ # else:
+ document_path.write_text(data)
else:
with open(yaml_document, "w") as f:
- yaml.dump(document, f)
-
- # cloudharness-metadata:
- # path: secrets.yaml
-
- # data: |
- # sdfmsldkf
- # sdfmlskdfmslkdfs
- # sdmlksdf
+ yaml.dump(document, f, default_flow_style=False)
def __process_applications(self, helm_values, base_image_name):
for root_path in self.root_paths:
From 7acc7f5eade36af4f859f8b6686444b0d9ed65d6 Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 22 Feb 2024 08:21:00 -0600
Subject: [PATCH 30/94] CH-100 Add generation of resources files
---
.../compose/templates/auto-compose.yaml | 2 +-
.../compose/templates/auto-resources.yaml | 18 ++++++++++++++++++
.../compose/templates/auto-secrets.yaml | 3 ++-
.../ch_cli_tools/dockercompose.py | 12 ++++++++++--
4 files changed, 31 insertions(+), 4 deletions(-)
create mode 100644 deployment-configuration/compose/templates/auto-resources.yaml
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index f25f558e9..9a0f4fd97 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -108,7 +108,7 @@ services:
{{- with $app_config.harness.resources }}
{{- range .}}
- type: bind
- source: compose/resources/{{ $app_name }}/{{ .src }}
+ source: compose/resources/generated/{{ $app_name }}/{{ .src }}
target: {{ .dst }}
{{- end }}
{{- end}}
diff --git a/deployment-configuration/compose/templates/auto-resources.yaml b/deployment-configuration/compose/templates/auto-resources.yaml
new file mode 100644
index 000000000..8d1e1e738
--- /dev/null
+++ b/deployment-configuration/compose/templates/auto-resources.yaml
@@ -0,0 +1,18 @@
+{{- define "deploy_utils.resource"}}
+{{ $service_name := .app.harness.deployment.name }}
+---
+# {{ $service_name }}-{{ .resource.name }}
+cloudharness-metadata:
+ path: resources/generated/{{ $service_name }}/{{ base .resource.src }}
+data: |
+{{ tpl (.root.Files.Get (print "resources/" $service_name "/" .resource.src)) .root | trim | indent 2 }}
+{{- end}}
+
+{{- range $app := .Values.apps }}
+ {{- if and (hasKey $app "port") $app.harness.deployment.auto | default false }}
+---
+ {{- range $resource := $app.harness.resources }}
+ {{- include "deploy_utils.resource" (dict "app" $app "resource" $resource "root" $) }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
\ No newline at end of file
diff --git a/deployment-configuration/compose/templates/auto-secrets.yaml b/deployment-configuration/compose/templates/auto-secrets.yaml
index a0a37a2f8..9635d33a9 100644
--- a/deployment-configuration/compose/templates/auto-secrets.yaml
+++ b/deployment-configuration/compose/templates/auto-secrets.yaml
@@ -1,5 +1,6 @@
{{- define "deploy_utils.secret" }}
-{{- if .app.harness.secrets }}
+{{- if and .app.harness.secrets false }} {{/* TODO */}}
+
{{- $secret_name := printf "%s" .app.harness.deployment.name }}
apiVersion: v1
kind: Secret
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index d5d51a6fc..c28eb2b77 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -165,21 +165,29 @@ def __post_process_multiple_document_docker_compose(self, yaml_document):
with open(yaml_document, "r") as f:
documents = yaml.safe_load_all(f)
+ main_document = None
for document in documents:
if not document:
continue
if "cloudharness-metadata" in document:
document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"]
logging.info("Post-process docker-compose.yaml, creating %s", document_path)
+ document_path.parent.mkdir(parents=True, exist_ok=True)
data = document["data"]
# if document_path.suffix == ".yaml":
# with open(document_path, "w") as f:
# yaml.dump(yaml.safe_load(data), f, default_flow_style=True)
# else:
+
document_path.write_text(data)
else:
- with open(yaml_document, "w") as f:
- yaml.dump(document, f, default_flow_style=False)
+ # We need to save the main document later
+ # "safe_load_all" returns a generator over the file,
+ # so if we modify it while looping on "documents"
+ # the output will be affected (probably truncated for some outputs)
+ main_document = document # we need to save the main document later,
+ with open(yaml_document, "w") as f:
+ yaml.dump(main_document, f, default_flow_style=False)
def __process_applications(self, helm_values, base_image_name):
for root_path in self.root_paths:
From 571c2ab494bd2207f500ae78a5738f77ec49b719 Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 22 Feb 2024 09:12:20 -0600
Subject: [PATCH 31/94] CH-100 Add dependency between service and db
---
deployment-configuration/compose/templates/auto-compose.yaml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 9a0f4fd97..abfdb2c63 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -135,6 +135,8 @@ services:
- "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)"
- "traefik.http.routers.{{ $app_name }}.entrypoints=web"
{{- with $app_config.harness.database }}
+ depends_on:
+ - {{ .name }}
{{- if not .auto }}
{{- continue}}
{{- end }}
From 9c8c19ee7b7b79080e1ceb5c87979c363855f5ba Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 23 Feb 2024 06:00:47 -0600
Subject: [PATCH 32/94] CH-100 Add secret handling
---
.../compose/templates/auto-compose.yaml | 3 ++
.../compose/templates/auto-secrets.yaml | 32 +++++++++----------
deployment/sc.yaml | 7 ----
3 files changed, 18 insertions(+), 24 deletions(-)
delete mode 100644 deployment/sc.yaml
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index abfdb2c63..120b8fa90 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -99,6 +99,9 @@ services:
{{- end }}
volumes:
- ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
+ {{- range $file_name, $_ := $app_config.harness.secrets }}
+ - ./compose/resources/generated/auth/{{ $file_name }}:/opt/cloudharness/resources/auth/{{ $file_name }}
+ {{- end }}
{{- if or $deployment.volume $app_config.harness.resources }}
{{- with $deployment.volume }}
- type: volume
diff --git a/deployment-configuration/compose/templates/auto-secrets.yaml b/deployment-configuration/compose/templates/auto-secrets.yaml
index 9635d33a9..ed9345d2f 100644
--- a/deployment-configuration/compose/templates/auto-secrets.yaml
+++ b/deployment-configuration/compose/templates/auto-secrets.yaml
@@ -1,49 +1,47 @@
{{- define "deploy_utils.secret" }}
-{{- if and .app.harness.secrets false }} {{/* TODO */}}
+{{- if .app.harness.secrets }}
{{- $secret_name := printf "%s" .app.harness.deployment.name }}
-apiVersion: v1
-kind: Secret
-metadata:
- name: {{ $secret_name }}
- namespace: {{ .root.Values.namespace }}
- labels:
- app: {{ .app.harness.deployment.name }}
-type: Opaque
{{- $secret := (lookup "v1" "Secret" .root.Values.namespace $secret_name) }}
{{- if $secret }}
# secret already exists
{{- if not (compact (values .app.harness.secrets)) }}
# secret values are null, copy from the existing secret
-data:
{{- range $k, $v := $secret.data }}
- {{ $k }}: {{ $v }}
+cloudharness-metadata:
+ path: resources/generated/auth/{{ $k }}
+
+data: {{ $v }}
+---
{{- end }}
{{- else }}
# there are non default values in values.yaml, use these
stringData:
{{- range $k, $v := .app.harness.secrets }}
- {{ $k }}: {{ $v | default (randAlphaNum 20) }}
+cloudharness-metadata:
+ path: resources/generated/auth/{{ $k }}
+
+data: {{ $v | default (randAlphaNum 20) }}
+---
{{- end }}
{{- end }}
{{- else }}
# secret doesn't exist
stringData:
{{- range $k, $v := .app.harness.secrets }}
- {{ $k }}: {{ $v | default (randAlphaNum 20) }}
+cloudharness-metadata:
+ path: resources/generated/auth/{{ $k }}
+data: {{ $v | default (randAlphaNum 20) }}
+---
{{- end }}
{{- end }}
{{- end }}
----
{{- end }}
----
{{- range $app := .Values.apps }}
----
{{- include "deploy_utils.secret" (dict "root" $ "app" $app) }}
{{- range $subapp := $app }}
{{- if contains "map" (typeOf $subapp) }}
{{- if hasKey $subapp "harness" }}
----
{{- include "deploy_utils.secret" (dict "root" $ "app" $subapp) }}
{{- end }}
{{- end }}
diff --git a/deployment/sc.yaml b/deployment/sc.yaml
deleted file mode 100644
index 69c99d990..000000000
--- a/deployment/sc.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-apiVersion: storage.k8s.io/v1
-kind: StorageClass
-metadata:
- name: standard
-provisioner: docker.io/hostpath
-reclaimPolicy: Delete
-volumeBindingMode: Immediate
\ No newline at end of file
From f6d0c6763b2378ff5cfe331d019dfa735bf61f2a Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 23 Feb 2024 06:25:39 -0600
Subject: [PATCH 33/94] CH-100 Remove argo from dependencies
---
.../compose/templates/auto-compose.yaml | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 120b8fa90..b28fbc882 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -91,7 +91,10 @@ services:
- {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
{{- end }}
{{- end }}
- {{- with $app_config.harness.dependencies.hard }}
+ {{/* Takes the hard deps, removes argo and adds the db if there is one */}}
+ {{/* To be sure to add the db properly, we "dig" the "harness" config for "database.name" and return "" if one of the keys doesn't exist */}}
+ {{/* "compact" in the beginning is to remove empty values */}}
+ {{- with compact (append (without $app_config.harness.dependencies.hard "argo") (dig "database" "name" "" $app_config.harness) ) }}
depends_on:
{{- range . }}
- {{ . }}
@@ -138,8 +141,6 @@ services:
- "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)"
- "traefik.http.routers.{{ $app_name }}.entrypoints=web"
{{- with $app_config.harness.database }}
- depends_on:
- - {{ .name }}
{{- if not .auto }}
{{- continue}}
{{- end }}
From 4fd6deeb480add6f99fbc30223d1af8353a9288a Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 23 Feb 2024 06:41:41 -0600
Subject: [PATCH 34/94] CH-100 Change from pyyaml to ruamel
The dependency is already gathered from a third-party lib
---
.../ch_cli_tools/dockercompose.py | 45 +++++++++----------
1 file changed, 20 insertions(+), 25 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index c28eb2b77..bafe5a003 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -2,6 +2,7 @@
Utilities to create a helm chart from a CloudHarness directory structure
"""
import yaml
+from ruamel.yaml import YAML
import os
import shutil
import logging
@@ -162,32 +163,26 @@ def __post_process_multiple_document_docker_compose(self, yaml_document):
logging.warning("Something went wrong during the docker-compose.yaml generation, cannot post-process it")
return
- with open(yaml_document, "r") as f:
- documents = yaml.safe_load_all(f)
+ yaml_handler = YAML()
+ documents = yaml_handler.load_all(yaml_document)
- main_document = None
- for document in documents:
- if not document:
- continue
- if "cloudharness-metadata" in document:
- document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"]
- logging.info("Post-process docker-compose.yaml, creating %s", document_path)
- document_path.parent.mkdir(parents=True, exist_ok=True)
- data = document["data"]
- # if document_path.suffix == ".yaml":
- # with open(document_path, "w") as f:
- # yaml.dump(yaml.safe_load(data), f, default_flow_style=True)
- # else:
-
- document_path.write_text(data)
- else:
- # We need to save the main document later
- # "safe_load_all" returns a generator over the file,
- # so if we modify it while looping on "documents"
- # the output will be affected (probably truncated for some outputs)
- main_document = document # we need to save the main document later,
- with open(yaml_document, "w") as f:
- yaml.dump(main_document, f, default_flow_style=False)
+ main_document = None
+ for document in documents:
+ if not document:
+ continue
+ if "cloudharness-metadata" in document:
+ document_path = self.dest_deployment_path / document["cloudharness-metadata"]["path"]
+ logging.info("Post-process docker-compose.yaml, creating %s", document_path)
+ document_path.parent.mkdir(parents=True, exist_ok=True)
+ data = document["data"]
+ document_path.write_text(data)
+ else:
+ # We need to save the main document later
+ # "load_all" returns a generator over the file,
+ # so if we modify it while looping on "documents"
+ # the output will be affected (probably truncated for some outputs)
+ main_document = document # we need to save the main document later
+ yaml_handler.dump(main_document, yaml_document)
def __process_applications(self, helm_values, base_image_name):
for root_path in self.root_paths:
From 86c0e4f51c4c4a56a4b59e148f1c1454675286ec Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 23 Feb 2024 06:42:24 -0600
Subject: [PATCH 35/94] CH-100 Remove tmp "events" from deps
---
deployment-configuration/compose/templates/auto-compose.yaml | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index b28fbc882..8fc62e833 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -88,7 +88,11 @@ services:
links:
{{- range . }}
{{- $service_name := (get $.Values.apps .).harness.service.name }}
+ {{- if eq . "events"}}
+# - {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
+ {{- else }}
- {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
+ {{- end }}
{{- end }}
{{- end }}
{{/* Takes the hard deps, removes argo and adds the db if there is one */}}
From a5909cea41d1af911c025c29711adb73fb299ed4 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 23 Feb 2024 06:46:25 -0600
Subject: [PATCH 36/94] CH-100 Add back sc.yaml
---
deployment/sc.yaml | 7 +++++++
1 file changed, 7 insertions(+)
create mode 100644 deployment/sc.yaml
diff --git a/deployment/sc.yaml b/deployment/sc.yaml
new file mode 100644
index 000000000..69c99d990
--- /dev/null
+++ b/deployment/sc.yaml
@@ -0,0 +1,7 @@
+apiVersion: storage.k8s.io/v1
+kind: StorageClass
+metadata:
+ name: standard
+provisioner: docker.io/hostpath
+reclaimPolicy: Delete
+volumeBindingMode: Immediate
\ No newline at end of file
From 4379252dd0bea6e6b4ad4d66f671e075fca598f9 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 23 Feb 2024 07:46:38 -0600
Subject: [PATCH 37/94] CH-100 Add first handling of NFS volume
---
.../nfsserver-deployment.yaml | 18 ++++++++++++------
.../compose/templates/auto-compose.yaml | 7 +++++++
2 files changed, 19 insertions(+), 6 deletions(-)
diff --git a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
index 7e9b68194..50dc08ed9 100644
--- a/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
+++ b/applications/nfsserver/deploy/templates-compose/nfsserver-deployment.yaml
@@ -1,13 +1,19 @@
{{- define "nfsserver.deployment" }}
-{{- $nfs := .apps.nfsserver}}
+{{- with .apps.nfsserver}}
-{{ $nfs.name }}:
- image: {{ $nfs.harness.deployment.image }}
+{{ .name }}:
+ image: {{ .harness.deployment.image }}
environment:
- # NFS useDNS? {{ $nfs.nfs.useDNS }}
- {{- if $nfs.nfs.useDNS }}
+ # NFS useDNS? {{ .nfs.useDNS }}
+ {{- if .nfs.useDNS }}
- NFS_SERVER={{ printf "nfs-server.%s.svc.cluster.local" .namespace }}
{{- end }}
- - NFS_PATH={{ $nfs.nfs.path }}
+ - NFS_PATH={{ .nfs.path }}
- PROVISIONER_NAME={{ printf "%s-nfs-provisioner" .namespace }}
+
+ volumes:
+ - type: volume
+ source: {{ .nfs.volumeName }}
+ target: {{ .nfs.path }}
+{{- end }}
{{- end }}
\ No newline at end of file
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 8fc62e833..a8ac40cf7 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -168,4 +168,11 @@ volumes: # this inclusion needs to be conditional
dshm:
{{- end }}
{{- end }}
+ {{- if eq $app_name "nfsserver" }}
+ {{ $app_config.nfs.volumeName }}:
+ # driver_opts:
+ # type: "nfs"
+ # o: "{{ join "," $app_config.nfs.mountOptions }}"
+ # device: ":{{ $app_config.nfs.path }}"
+ {{- end }}
{{- end }}
From 46ee1408a87baf8f4f387a45ba4173af77b6a0e4 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 26 Feb 2024 11:36:48 -0600
Subject: [PATCH 38/94] CH-100 Add volumes conditionnally
---
.../compose/templates/auto-compose.yaml | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index a8ac40cf7..c2aad0147 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -157,7 +157,20 @@ networks:
ch:
name: ch_network
-volumes: # this inclusion needs to be conditional
+{{- range $app_name, $app_config := .Values.apps }}
+ {{- with $app_config.harness.deployment.volume }}
+volumes:
+ {{- break }}
+ {{- end }}
+ {{- with $app_config.harness.database }}
+volumes:
+ {{- break }}
+ {{- end }}
+ {{- if eq $app_name "nfsserver" }}
+volumes:
+ {{- break }}
+ {{- end }}
+{{- end }}
{{- range $app_name, $app_config := .Values.apps }}
{{- with $app_config.harness.deployment.volume }}
{{ .name }}:
From e135c6a6dbef827854d742e188f4866703da2b00 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 26 Feb 2024 12:27:56 -0600
Subject: [PATCH 39/94] CH-100 Add first tutorial about docker compose
---
...clock-application-with-docker-compose.adoc | 328 ++++++++++++++++++
1 file changed, 328 insertions(+)
create mode 100644 docs/tutorials/simple-date-clock-application-with-docker-compose.adoc
diff --git a/docs/tutorials/simple-date-clock-application-with-docker-compose.adoc b/docs/tutorials/simple-date-clock-application-with-docker-compose.adoc
new file mode 100644
index 000000000..70e8f1e22
--- /dev/null
+++ b/docs/tutorials/simple-date-clock-application-with-docker-compose.adoc
@@ -0,0 +1,328 @@
+:repo_url: ../../../../../
+:ch: CloudHarness
+:dc: Docker Compose
+:dc-: docker compose
+:repo_fragment: MetaCell/cloud-harness
+
+
+= A simple date-clock application: tutorial {ch} targeting {dc}
+
+In this small tutorial, we will see different aspects about the development of applications with {ch} through the development from scratch of a small webapp that fetches information from a server on a regular basis.
+This tutorial will show you how to generate the `{dc-}` configuration and how to build and deploy this simple application.
+
+{ch} generates the initial files and folders for your project depending on some templates tacking different aspects of your app depending on your requirements, __e.g.__, for a webapp project, it generates the frontend initial files for ReactJS and the initial Flask files for the backend.
+For the API part, {ch} relies on OpenAPI 3 to deal with the endpoints/model description.
+
+The different aspects that will be covered here are:
+
+* how to bootstrap a new app, build it, and deploy it on {dc};
+* how to modify/update the app, built it and run it again.
+
+== The tools you need to deploy/build your application
+
+The following tools, beside python, are not required to work with {ch}.
+Before installing everything, please be sure you have the following tools installed on your machine:
+
+* `python`
+* `yarn`
+* `{ch}` -- if not installed, please check other documentation and tutorials
+* `helm` -- to deal with the generation of the {dc}
+* `skaffold` -- to build the different images that will run on {dc}
+* `{dc-}` -- to actually run the built application
+
+
+== Creating a very simple webapp
+
+Now that we know how to configure/run/deploy apps on our local cluster, we will create a very simple webapp.
+In this first time, we will only generate the project's artifacts using the `harness-application`, then, we will build/run/deploy it.
+In a second time, we will modify the API to add new endpoints and deal with the frontend accordingly.
+
+=== Creating a new webapp and building the frontend
+
+The webapp that we will create will be a useless webapp that will fetch the current date and time when a button is pressed.
+Nothing fancy, just a way to see how to interact with the generated sources and get everything running on your local cluster.
+
+The first step is to generate the projects files.
+In our case, we want to develop a webapp, meaning that we want a frontend and a backend.
+We use `harness-application` to generate the first files with a specific templates: `webapp` and `flask-server`.
+We first place ourself in the parent directory of where you cloned the `cloud-harness` repository.
+
+[NOTE]
+We could place ourself anywhere, we would just have to remember the path towards the `cloud-harness` repository.
+
+.Generating the first project's file
+[source,bash]
+----
+harness-application clockdate -t webapp -t flask-server
+----
+
+The name of the application is `clockdate` and we use the `webapp` and `flask-server` template.
+There is various existing templates with different purpose: for DB interaction, backend, frontend, ...
+
+We observe now that a new directory had been created in an `applications` folder named `clockdate`.
+The folder is organized with many sub-folders, all playing a different role in the app.
+
+.Ensuring that the backend is considered as a webapp
+We will now make a small modification, or ensure that the code of the backend includes its activation as "webapp".
+Open the file generated in `clockdate/backend/clockdate/__main__.py` and check that the following line has the keyword parameter `webapp` set to `True`.
+
+[source,python]
+----
+app = init_flask(title="clockdate", init_app_fn=None, webapp=True)
+----
+
+This option ensures the registration of some specific endpoints by {ch}.
+In this case, it ensures that the `/` endpoint will be mapped to the `index.html` produced for the frontend.
+
+.Building the frontend
+
+In this tutorial, before generating the configuration files for {dc} by {ch}, we will build the frontend using `yarn`.
+Enter in the `clockdate/frontend` folder and just type
+
+[source, bash]
+----
+yarn install
+----
+
+This will generate a `yarn.lock` which is required later for the build of the Docker images.
+
+[NOTE]
+This step could have been done later, but it has to be done *before* the build of the different Docker images using `skaffold`.
+
+
+.Generating the `{dc-}` configuration files for our `clockdate` app
+[source,bash]
+----
+# run in the directory that contains the cloud-harness repository
+harness-deployment cloud-harness . -u -dtls -l -d azathoth.local -e local -n azathoth -i clockdate --docker-compose
+----
+
+The key here is to add the `--docker-compose` option that will trigger the generation of a set of files in the `deployment` folder,
+as well as a slightly modified version of the `skaffold.yaml` file.
+
+As a result, in the `deployment` folder, we should have something that looks like this:
+
+[source]
+----
++- CURRENT_DIRECTORY
+ [...]
+ + deployment/
+ + compose/ -> the template files and some generated files dedicated to docker compose
+ `- docker-compose.yaml -> the main file used by {dc} to deploy everything
+ `- skaffold.yaml -> used by skaffold to build the Docker images
+----
+
+Now you can build/deploy/run it using `skaffold`.
+
+[source,bash]
+----
+skaffold build
+----
+
+.Deploying your app on {dc}
+
+To deploy the application on {dc}, you only need to position yourself in the directory where the `docker-compose.yaml` file was generated, so in the `deployment` folder.
+
+[source,bash]
+----
+cd deployment
+docker compose up
+----
+
+This command will download the necessary images and reuses the ones built by `skaffold` to deploy everything.
+
+Now, to be sure to access properly the app, a small addition to your `/etc/hosts` file is required as such:
+
+[source]
+----
+127.0.0.1 clockdate.azathoth.local
+----
+
+Now you can open your browser to `http://clockdate.azathoth.local` and see that everything is running properly.
+You can also go to `http://clockdate.azathoth.local/api/ping` and check that you have a message.
+
+
+=== Modifying your webapp, adding behavior
+
+We are currently capable of generating/running applications, but we did not add our own behavior.
+We need to modify the generated sources to do so.
+If we take a deeper look to the folder generated by `harness-application`, we observe three folders that are the one we will modify on a normal usage/base:
+
+.Generated directory organization
+[source]
+----
++- api -> owns the OpenAPI definition of the endpoints/resources handled by the API
++- backend
+ `- clockdate -> the project backend files
+ |- controllers -> the controller definition
+ `- models -> the resources exposed by the API
++- frontend -> the webpage files
+----
+
+In a first time, we will modify the backend to add a new endpoint that will answer in a string the current date and time.
+The process is the following:
+
+. we add the new endpoint in the `openapi` folder, modifying the `openapi.yaml` file,
+. we regenerate the code of the application using `harness-generate`
+. we code the behavior of the endpoint in the dedicated method generated in the `backend/clockdate/controllers` folder.
+. we build/deploy/run the code to see it running (this step can be changed with a pure python run of the backend for a quicker dev loop).
+
+==== Adding the new endpoint to the openapi specification
+
+We will add a new endpoint named `currentdate` that will answer a string when `GET`.
+To do so, we add a special path in the `path` section.
+
+.Modifying the `api/openapi.yaml` file
+[source,yaml]
+----
+paths:
+ /currentdate:
+ get:
+ operationId: currentdate
+ responses:
+ "200":
+ content:
+ application/json:
+ schema:
+ type: string
+ description: Current date and time
+ "500":
+ description: System cannot give the current time
+ summary: Gets the current date and time
+ tags: [datetime]
+----
+
+[NOTE]
+The name of the controller in which the function related to the endpoint will be generated depends on the `tags` value in defined in the `api/openapi.yaml` file.
+
+We validate that our openAPI specification is correct.
+
+[source]
+----
+$ openapi-spec-validator applications/clockdate/api/openapi.yaml
+OK
+----
+
+Now we generate again the code the application using `harness-application` another time.
+
+.Regenerating the code of our modified app
+[source,bash]
+----
+harness-application clockdate -t flask-server -t webapp
+----
+
+This will add a new `datetime_controller.py` in the `backend/clockdate/controllers` package.
+
+[IMPORTANT]
+You need to notice that all the controllers files (and all the files) are overridden in the `backend` directory.
+To prevent files of being overridden, you need to edit the `.openapi-generator-ignore` file, that acts like a `.gitignore` file (in a way), by marking the files/directories that needs to be ignored by the generation.
+
+When we open this file, we get the following controller method:
+
+[source,python]
+----
+def currentdate(): # noqa: E501
+ """Gets the current date and time
+
+ # noqa: E501
+
+
+ :rtype: str
+ """
+ return 'do some magic!'
+----
+
+This is the moment to add the behavior we want:
+
+[source,python]
+----
+def currentdate(): # noqa: E501
+ """Gets the current date and time
+
+ # noqa: E501
+
+
+ :rtype: str
+ """
+ from datetime import datetime
+ return f'{datetime.now()}'
+----
+
+We simply import the `datetime` module and type, and we ask for the current date and time.
+Here a string interpolation is used only to force the result to be considered and formatted as a string.
+It's not mandatory.
+
+Now that our new endpoint is coded, we can build/deploy/run it on our local cluster using `skaffold build` then `{dc-} up`.
+Once the deployment is done, we can navigate to: http://clockdate.azathoth.local/api/currentdate to appreciate the result.
+
+
+=== A quick and dirty frontend to test our endpoint
+
+
+Now that we have the "backend" running, we will modify the frontend to get a label and a button that will fetch the information about date and time from the new endpoint we defined.
+If we look in the frontend source code generated, we see a `src/rest/api.ts` file.
+The generated code targets ReactJS as framework.
+This module provides clients for the API generated from the `api/openapi.yaml` specification.
+Exactly, it provides one client by `tag` defined in the openAPI specification.
+In our case, we defined a tag `datetime`, so we find in `api.ts` a class `DatetimeApi`.
+This is the class we will instantiate and use to deal with the call to the API and the endpoint we defined in the previous section.
+
+First, we are going to code a new React component that will provide a header with the current date and time and a button to ask for a "fetch" of the current date and time from the server.
+
+We call this component `DateTime` inside of a `DateTime.tsx` file that is placed in the `src/components` directory.
+
+.Code of the `frontend/src/component/DateTime.tsx` component
+[source,javascript]
+----
+import React, { useState, useEffect, useCallback } from 'react';
+import { DatetimeApi } from '../rest/api'
+
+const api = new DatetimeApi() <1>
+
+const DateTime = () => {
+ const [datetime, setDatetime] = useState('unavailable');
+ useEffect(() => updateDate(), []);
+
+ const updateDate = useCallback(() => {
+ api.currentdate().then(r => setDatetime(r.data)); <2>
+ }, []);
+
+ return (
+
+
{datetime}
+ Fetch
+
+ )
+}
+
+export default DateTime;
+----
+
+<1> The `DatetimeApi` class is instantiated, this is now the instance we will use everytime we need to perform a request toward an API endpoint.
+<2> is where is actually perform the call. The `currentdate` method is generated by {ch}.
+
+Now that we have our dedicated component, we will integrate it in the current page.
+To do that, we need to modify the `App.tsx` component.
+This component is located in `frontend/src/App.tsx`.
+We modify the content of this file this way:
+
+.Code of the `frontend/src/App.tsx` component
+[source,javascript]
+----
+import React from 'react';
+import './styles/style.less';
+import DateTime from './components/DateTime';
+
+const Main = () => (
+ <>
+ Ask for date and time
+
+ See api documentation here
+ >
+);
+
+export default Main;
+----
+
+Once this is done, we can build/deploy/run again our webapp on our local cluster using `skaffold buld` then `{dc-} up`.
+That's it!
From 0f701461b25885eec04a25c2d02d8085204967c0 Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 29 Feb 2024 12:35:18 -0600
Subject: [PATCH 40/94] CH-100 Add first real config for gatekeeper
---
.../compose/templates/auto-compose.yaml | 18 +++
.../compose/templates/auto-gatekeepers.yaml | 134 ++++++------------
2 files changed, 63 insertions(+), 89 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index c2aad0147..67154da49 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -23,10 +23,28 @@ services:
- "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
{{- range $app_name, $app_config := .Values.apps }}
+ {{- if eq $app_name "argo" -}}
+ {{- continue -}}
+ {{- end -}}
{{ $deployment := $app_config.harness.deployment }}
{{- if eq $app_name "nfsserver" }}
{{- include "nfsserver.deployment" $.Values | indent 2 }}
{{- end }}
+ {{- if $.Values.secured_gatekeepers }}
+ {{ if and (hasKey $app_config "port") $app_config.harness.secured }}
+ # Gatekeeper for {{ $app_config.harness.service.name }}-gk
+{{- include "securedservice.deploy" (dict "root" $ "app" $app_config) | indent 2 }}
+ {{- end }}
+ {{- range $subapp := $app_config }}
+ {{- if contains "map" (typeOf $subapp) }}
+ {{- if and (hasKey $subapp "harness.port") (hasKey $subapp "harness.secured") }}
+ {{- if $subapp.harness.secured }}
+{{ include "securedservice.deploy" (dict "root" $ "app" $subapp) | indent 2 }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
{{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }}
{{- continue }}
{{- end}}
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index 898995cd6..f0d68c7f6 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -1,14 +1,35 @@
{{/* Secured Services/Deployments */}}
-{{- define "deploy_utils.securedservice" }}
+{{- define "securedservice.deploy" }}
{{- $tls := not (not .root.Values.tls) }}
-apiVersion: v1
-kind: ConfigMap
-metadata:
- name: "{{ .app.harness.service.name }}-gk"
- labels:
- app: "{{ .app.harness.service.name }}-gk"
-data:
- proxy.yml: |-
+{{ .app.harness.service.name }}-gk:
+ image: quay.io/gogatekeeper/gatekeeper:1.3.8
+ expose:
+ - '8080'
+ - '8443'
+ deploy:
+ mode: replicated
+ replicas: 1
+ resources:
+ limits:
+ cpus: 100m
+ memory: 64M
+ reservations:
+ cpus: 50m
+ memory: 32M
+ environment:
+ - PROXY_CONFIG_FILE=/opt/proxy.yml
+ volumes:
+ - compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml
+ - compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
+ - compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl
+{{- end }}
+
+{{- define "securedservice.deploy.resources" }}
+{{- $tls := not (not .root.Values.tls) }}
+cloudharness-metadata:
+ path: resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml
+
+data: |-
verbose: {{ .root.Values.debug }}
discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }}
client-id: {{ .root.Values.apps.accounts.webclient.id | quote }}
@@ -40,8 +61,17 @@ data:
skip-openid-provider-tls-verify: true
skip-upstream-tls-verify: true
{{- end }}
- cacert.crt: {{ .files.Get "resources/certs/cacert.crt" | quote }}
- access-denied.html.tmpl: |-
+---
+cloudharness-metadata:
+ path: resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt
+
+data: |-
+{{ .files.Get "resources/certs/cacert.crt" | indent 2 }}
+---
+cloudharness-metadata:
+ path: resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl
+
+data: |-
@@ -79,93 +109,19 @@ data:
---
-apiVersion: v1
-kind: Service
-metadata:
- name: "{{ .app.harness.service.name }}-gk"
- labels:
- app: "{{ .app.harness.service.name }}-gk"
-spec:
- ports:
- - name: http
- port: 8080
- selector:
- app: "{{ .app.harness.service.name }}-gk"
- type: ClusterIP
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: "{{ .app.harness.service.name }}-gk"
- labels:
- app: "{{ .app.harness.service.name }}-gk"
-
-spec:
- replicas: 1
- selector:
- matchLabels:
- app: "{{ .app.harness.service.name }}-gk"
- template:
- metadata:
- annotations:
- checksum/config: {{ .app.harness.uri_role_mapping | toString | sha256sum }}
- labels:
- app: "{{ .app.harness.service.name }}-gk"
- spec:
-{{ include "deploy_utils.etcHosts" .root | indent 6 }}
- containers:
- - name: {{ .app.harness.service.name | quote }}
- image: "quay.io/gogatekeeper/gatekeeper:1.3.8"
- imagePullPolicy: IfNotPresent
- {{ if .root.Values.local }}
- securityContext:
- allowPrivilegeEscalation: false
- runAsUser: 0
- {{- end }}
- env:
- - name: PROXY_CONFIG_FILE
- value: /opt/proxy.yml
- volumeMounts:
- - name: "{{ .app.harness.service.name }}-gk-proxy-config"
- mountPath: /opt/proxy.yml
- subPath: proxy.yml
- - name: "{{ .app.harness.service.name }}-gk-proxy-config"
- mountPath: /etc/pki/ca-trust/source/anchors/cacert.crt
- subPath: cacert.crt
- - name: "{{ .app.harness.service.name }}-gk-proxy-config"
- mountPath: /templates/access-denied.html.tmpl
- subPath: access-denied.html.tmpl
- ports:
- - name: http
- containerPort: 8080
- - name: https
- containerPort: 8443
- resources:
- requests:
- memory: "32Mi"
- cpu: "50m"
- limits:
- memory: "64Mi"
- cpu: "100m"
- volumes:
- - name: "{{ .app.harness.service.name }}-gk-proxy-config"
- configMap:
- name: "{{ .app.harness.service.name }}-gk"
----
{{- end }}
+
{{- if .Values.secured_gatekeepers }}
{{ $files := .Files }}
{{- range $app := .Values.apps }}
{{- if and (hasKey $app "port") ($app.harness.secured) }}
----
- {{ include "deploy_utils.securedservice" (dict "root" $ "app" $app "files" $files) }}
- {{- end }}
+ {{ include "securedservice.deploy.resources" (dict "root" $ "app" $app "files" $files) }}
+ {{- end }}
{{- range $subapp := $app }}
{{- if contains "map" (typeOf $subapp) }}
{{- if and (hasKey $subapp "harness.port") (hasKey $subapp "harness.secured") }}
{{- if $subapp.harness.secured }}
----
- {{ include "deploy_utils.securedservice" (dict "root" $ "app" $subapp "files" $files) }}
+ {{ include "securedservice.deploy.resources" (dict "root" $ "app" $subapp "files" $files) }}
{{- end }}
{{- end }}
{{- end }}
From 6bbef64e633cecebc14d0c85cfc18393f843122d Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 29 Feb 2024 12:55:39 -0600
Subject: [PATCH 41/94] CH-100 Add reverse proxy config for gatekeeper
---
.../compose/templates/auto-compose.yaml | 25 ++++++++++++-------
.../compose/templates/auto-gatekeepers.yaml | 5 ++++
2 files changed, 21 insertions(+), 9 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 67154da49..9379c57df 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -30,8 +30,9 @@ services:
{{- if eq $app_name "nfsserver" }}
{{- include "nfsserver.deployment" $.Values | indent 2 }}
{{- end }}
- {{- if $.Values.secured_gatekeepers }}
- {{ if and (hasKey $app_config "port") $app_config.harness.secured }}
+ {{- $isSecured := (and $.Values.secured_gatekeepers ( and (hasKey $app_config "port") $app_config.harness.secured )) -}}
+ {{ if $isSecured }}
+
# Gatekeeper for {{ $app_config.harness.service.name }}-gk
{{- include "securedservice.deploy" (dict "root" $ "app" $app_config) | indent 2 }}
{{- end }}
@@ -44,7 +45,6 @@ services:
{{- end }}
{{- end }}
{{- end }}
- {{- end }}
{{- if or (not $deployment.auto) (not $app_config.harness.service.auto) }}
{{- continue }}
{{- end}}
@@ -104,19 +104,24 @@ services:
{{- end }}
{{- with $app_config.harness.dependencies.soft }}
links:
- {{- range . }}
- {{- $service_name := (get $.Values.apps .).harness.service.name }}
- {{- if eq . "events"}}
+ {{- range . -}}
+ {{- $service_name := (get $.Values.apps .).harness.service.name -}}
+ {{- if eq . "events" }}
# - {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
{{- else }}
- {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
{{- end }}
- {{- end }}
- {{- end }}
+ {{- end -}}
+ {{- end -}}
{{/* Takes the hard deps, removes argo and adds the db if there is one */}}
{{/* To be sure to add the db properly, we "dig" the "harness" config for "database.name" and return "" if one of the keys doesn't exist */}}
{{/* "compact" in the beginning is to remove empty values */}}
- {{- with compact (append (without $app_config.harness.dependencies.hard "argo") (dig "database" "name" "" $app_config.harness) ) }}
+ {{- with compact
+ (append
+ (append
+ (without $app_config.harness.dependencies.hard "argo")
+ (dig "database" "name" "" $app_config.harness))
+ (ternary (printf "%s-gk" $app_config.harness.service.name) "" $isSecured)) -}}
depends_on:
{{- range . }}
- {{ . }}
@@ -153,6 +158,7 @@ services:
{{- end }}
{{- end }}
{{- end }}
+ {{- if not $isSecured }}
labels:
- "traefik.enable=true"
{{- with $app_config.harness.service.port }}
@@ -162,6 +168,7 @@ services:
# - "traefik.http.routers.{{ .app_name }}.middlewares=redirect-middleware"
- "traefik.http.routers.{{ $app_name }}.rule=Host(`{{ $app_config.harness.subdomain }}.{{ $.Values.domain }}`)"
- "traefik.http.routers.{{ $app_name }}.entrypoints=web"
+ {{- end }}
{{- with $app_config.harness.database }}
{{- if not .auto }}
{{- continue}}
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index f0d68c7f6..d27a6f501 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -22,6 +22,11 @@
- compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml
- compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
- compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl
+ labels:
+ - "traefik.enable=true"
+ - "traefik.http.services.{{ .app.harness.service.name }}-gk.loadbalancer.server.port={{ .app.harness.service.port }}"
+ - "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)"
+ - "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web"
{{- end }}
{{- define "securedservice.deploy.resources" }}
From 3b437cf018a01085e13d462f168e7796568376d7 Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 29 Feb 2024 13:27:30 -0600
Subject: [PATCH 42/94] CH-100 Fix path to generated resources
---
.../compose/templates/auto-compose.yaml | 2 +-
.../compose/templates/auto-gatekeepers.yaml | 12 +++++++++---
2 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 9379c57df..ba7dd79ee 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -141,7 +141,7 @@ services:
{{- with $app_config.harness.resources }}
{{- range .}}
- type: bind
- source: compose/resources/generated/{{ $app_name }}/{{ .src }}
+ source: ./compose/resources/generated/{{ $app_name }}/{{ .src }}
target: {{ .dst }}
{{- end }}
{{- end}}
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index d27a6f501..fc27efd0e 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -19,14 +19,20 @@
environment:
- PROXY_CONFIG_FILE=/opt/proxy.yml
volumes:
- - compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml
- - compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
- - compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl
+ - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml
+ - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
+ - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl
labels:
- "traefik.enable=true"
- "traefik.http.services.{{ .app.harness.service.name }}-gk.loadbalancer.server.port={{ .app.harness.service.port }}"
- "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)"
- "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web"
+ depends_on:
+ - accounts
+ links:
+ - accounts:accounts.{{ .root.Values.domain }}
+ extra_hosts:
+ - "accounts.{{ .root.Values.domain }}=127.0.0.11"
{{- end }}
{{- define "securedservice.deploy.resources" }}
From 1135b992db0b533997b8866ee0fa5becff7543b5 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 1 Mar 2024 07:25:07 -0600
Subject: [PATCH 43/94] CH-100 Fix bad GK configuration
---
.../compose/templates/auto-compose.yaml | 13 ++++++++++---
.../compose/templates/auto-gatekeepers.yaml | 10 ++++++----
2 files changed, 16 insertions(+), 7 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index ba7dd79ee..7ef773861 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -86,6 +86,13 @@ services:
{{- with $deployment.command }}
# entrypoint: {{ cat . $deployment.args }}
{{- end }}
+ {{- if eq $app_name "accounts" }}
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/auth/realms/azathoth/account"]
+ interval: 1s
+ timeout: 3s
+ retries: 30
+ {{- end }}
environment:
- CH_CURRENT_APP_NAME={{ $app_name }}
@@ -117,11 +124,9 @@ services:
{{/* To be sure to add the db properly, we "dig" the "harness" config for "database.name" and return "" if one of the keys doesn't exist */}}
{{/* "compact" in the beginning is to remove empty values */}}
{{- with compact
- (append
(append
(without $app_config.harness.dependencies.hard "argo")
- (dig "database" "name" "" $app_config.harness))
- (ternary (printf "%s-gk" $app_config.harness.service.name) "" $isSecured)) -}}
+ (dig "database" "name" "" $app_config.harness)) -}}
depends_on:
{{- range . }}
- {{ . }}
@@ -146,6 +151,7 @@ services:
{{- end }}
{{- end}}
{{- end }}
+ {{/*
{{- if $.Values.local }}
# Extra /etc/hosts list
{{- $domain := $.Values.domain }}
@@ -158,6 +164,7 @@ services:
{{- end }}
{{- end }}
{{- end }}
+ */}}
{{- if not $isSecured }}
labels:
- "traefik.enable=true"
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index fc27efd0e..a91bd96af 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -2,6 +2,9 @@
{{- define "securedservice.deploy" }}
{{- $tls := not (not .root.Values.tls) }}
{{ .app.harness.service.name }}-gk:
+ networks:
+ - ch
+ restart: always
image: quay.io/gogatekeeper/gatekeeper:1.3.8
expose:
- '8080'
@@ -28,11 +31,10 @@
- "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)"
- "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web"
depends_on:
- - accounts
+ accounts:
+ condition: service_healthy
links:
- accounts:accounts.{{ .root.Values.domain }}
- extra_hosts:
- - "accounts.{{ .root.Values.domain }}=127.0.0.11"
{{- end }}
{{- define "securedservice.deploy.resources" }}
@@ -42,7 +44,7 @@ cloudharness-metadata:
data: |-
verbose: {{ .root.Values.debug }}
- discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }}
+ discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}:8080/auth/realms/{{ .root.Values.namespace }}
client-id: {{ .root.Values.apps.accounts.webclient.id | quote }}
client-secret: {{ .root.Values.apps.accounts.webclient.secret }}
secure-cookie: {{ $tls }}
From b23f03b87319d4dce2c1b1288834ad00b95b4755 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 1 Mar 2024 08:04:40 -0600
Subject: [PATCH 44/94] CH-100 Adapt gk configuration
---
.../compose/templates/auto-compose.yaml | 10 +++++-----
.../compose/templates/auto-gatekeepers.yaml | 17 ++++++++++-------
2 files changed, 15 insertions(+), 12 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 7ef773861..ff6145cbc 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -64,14 +64,14 @@ services:
image: {{ . }}
{{- end }}
{{- if eq $.Values.mainapp $app_name }}
- # {{- with $app_config.harness.service.port }}
- # ports:
- # - "{{ . }}:{{ $app_config.harness.deployment.port }}"
- # {{- end }}
+ {{- with $app_config.harness.service.port }}
+ ports:
+ - "{{ . }}:{{ $app_config.harness.deployment.port }}"
+ {{- end }}
{{- end }}
{{- with $app_config.harness.deployment.port }}
expose:
- - {{ . | quote }}
+ - {{ . }}
{{- end}}
deploy:
mode: "replicated"
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index a91bd96af..b9e6f8cb4 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -1,7 +1,8 @@
{{/* Secured Services/Deployments */}}
{{- define "securedservice.deploy" }}
{{- $tls := not (not .root.Values.tls) }}
-{{ .app.harness.service.name }}-gk:
+{{- $gk_name := printf "%s-gk" .app.harness.service.name }}
+{{ $gk_name }}:
networks:
- ch
restart: always
@@ -22,19 +23,21 @@
environment:
- PROXY_CONFIG_FILE=/opt/proxy.yml
volumes:
- - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/proxy.yml:/opt/proxy.yml
- - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
- - ./compose/resources/generated/{{ .app.harness.service.name }}-gk/access-denied.html.tmpl:/templates/access-denied.html.tmpl
+ - ./compose/resources/generated/{{ $gk_name }}/proxy.yml:/opt/proxy.yml
+ - ./compose/resources/generated/{{ $gk_name }}/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
+ - ./compose/resources/generated/{{ $gk_name }}/access-denied.html.tmpl:/templates/access-denied.html.tmpl
labels:
- "traefik.enable=true"
- - "traefik.http.services.{{ .app.harness.service.name }}-gk.loadbalancer.server.port={{ .app.harness.service.port }}"
- - "traefik.http.routers.{{ .app.harness.service.name }}-gk.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)"
- - "traefik.http.routers.{{ .app.harness.service.name }}-gk.entrypoints=web"
+ - "traefik.http.services.{{ $gk_name }}.loadbalancer.server.port={{ .app.harness.service.port }}"
+ - "traefik.http.routers.gatekeeper.middlewares=redirect-middleware"
+ - "traefik.http.routers.{{ $gk_name }}.rule=Host(`{{ .app.harness.subdomain }}.{{ .root.Values.domain }}`)"
+ - "traefik.http.routers.{{ $gk_name }}.entrypoints=web"
depends_on:
accounts:
condition: service_healthy
links:
- accounts:accounts.{{ .root.Values.domain }}
+ - {{ .app.harness.service.name }}:{{ .app.harness.service.name }}.default
{{- end }}
{{- define "securedservice.deploy.resources" }}
From 361ca4633a302a8b3d27e10db862cd1bba370eeb Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 1 Mar 2024 13:02:16 -0600
Subject: [PATCH 45/94] CH-100 Fix gk configuration
---
applications/samples/frontend/webpack.config.js | 2 +-
deployment-configuration/compose/templates/auto-compose.yaml | 2 +-
.../compose/templates/auto-gatekeepers.yaml | 4 ++--
3 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/applications/samples/frontend/webpack.config.js b/applications/samples/frontend/webpack.config.js
index ad5ee5566..69f80cb99 100644
--- a/applications/samples/frontend/webpack.config.js
+++ b/applications/samples/frontend/webpack.config.js
@@ -29,7 +29,7 @@ module.exports = function webpacking(envVariables) {
const output = {
path: path.resolve(__dirname, "dist"),
- filename: "[name].[contenthash].js",
+ filename: "js/[name].[contenthash].js",
publicPath: "/"
};
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index ff6145cbc..156547a4a 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -23,7 +23,7 @@ services:
- "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
{{- range $app_name, $app_config := .Values.apps }}
- {{- if eq $app_name "argo" -}}
+ {{- if has $app_name (list "argo" "events") -}}
{{- continue -}}
{{- end -}}
{{ $deployment := $app_config.harness.deployment }}
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index b9e6f8cb4..4094a9250 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -36,7 +36,7 @@
accounts:
condition: service_healthy
links:
- - accounts:accounts.{{ .root.Values.domain }}
+ - traefik:accounts.{{ .root.Values.domain }}
- {{ .app.harness.service.name }}:{{ .app.harness.service.name }}.default
{{- end }}
@@ -47,7 +47,7 @@ cloudharness-metadata:
data: |-
verbose: {{ .root.Values.debug }}
- discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}:8080/auth/realms/{{ .root.Values.namespace }}
+ discovery-url: {{ ternary "https" "http" $tls}}://{{ .root.Values.apps.accounts.harness.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }}
client-id: {{ .root.Values.apps.accounts.webclient.id | quote }}
client-secret: {{ .root.Values.apps.accounts.webclient.secret }}
secure-cookie: {{ $tls }}
From 74d074eec720731bd2c1b4d4601914ef07e3c7f8 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 1 Mar 2024 13:15:04 -0600
Subject: [PATCH 46/94] CH-100 Comment unused "ports"
---
deployment-configuration/compose/templates/auto-compose.yaml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 156547a4a..52f5a157e 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -63,12 +63,13 @@ services:
{{- with $app_config.image }}
image: {{ . }}
{{- end }}
+ {{/*
{{- if eq $.Values.mainapp $app_name }}
{{- with $app_config.harness.service.port }}
ports:
- "{{ . }}:{{ $app_config.harness.deployment.port }}"
{{- end }}
- {{- end }}
+ {{- end }}*/}}
{{- with $app_config.harness.deployment.port }}
expose:
- {{ . }}
From fec25bee546836c823020e25aa45fd62c69a7b88 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 15 Mar 2024 06:05:26 -0600
Subject: [PATCH 47/94] CH-100 Fix issue with service names in dependencies
---
.../compose/templates/auto-compose.yaml | 29 +++++++++++++------
1 file changed, 20 insertions(+), 9 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 52f5a157e..8f1543bdc 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -23,7 +23,7 @@ services:
- "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
{{- range $app_name, $app_config := .Values.apps }}
- {{- if has $app_name (list "argo" "events") -}}
+ {{- if has $app_name (list "argo" "events" "nfsserver") -}}
{{- continue -}}
{{- end -}}
{{ $deployment := $app_config.harness.deployment }}
@@ -113,12 +113,17 @@ services:
{{- with $app_config.harness.dependencies.soft }}
links:
{{- range . -}}
- {{- $service_name := (get $.Values.apps .).harness.service.name -}}
- {{- if eq . "events" }}
-# - {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
+ {{- $service := .}}
+ {{- range $name, $conf := $.Values.apps }}
+ {{- if eq $conf.harness.name $service }}
+ {{- if has . (list "events" "nfsserver") }}
+# - {{ $name }}:{{ $service }}.{{ $.Values.domain }}
{{- else }}
- - {{ . }}:{{ $service_name }}.{{ $.Values.domain }}
- {{- end }}
+ - {{ $name }}:{{ $service }}.{{ $.Values.domain }}
+ {{- end }}
+ {{- break -}}
+ {{- end -}}
+ {{- end -}}
{{- end -}}
{{- end -}}
{{/* Takes the hard deps, removes argo and adds the db if there is one */}}
@@ -129,9 +134,15 @@ services:
(without $app_config.harness.dependencies.hard "argo")
(dig "database" "name" "" $app_config.harness)) -}}
depends_on:
- {{- range . }}
- - {{ . }}
- {{- end }}
+ {{- range . -}}
+ {{- $service := .}}
+ {{- range $name, $conf := $.Values.apps -}}
+ {{- if eq $conf.harness.name $service }}
+ - {{ $name }}
+ {{- break -}}
+ {{- end -}}
+ {{- end -}}
+ {{- end -}}
{{- end }}
volumes:
- ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
From 0db12d801a4b729d68c3126aaddd251f0dfbe721 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 15 Mar 2024 06:19:44 -0600
Subject: [PATCH 48/94] CH-100 Fix issue with db-volumes
---
.../compose/templates/auto-compose.yaml | 8 ++++++--
.../compose/templates/auto-database.yaml | 2 +-
2 files changed, 7 insertions(+), 3 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 8f1543bdc..07d120ddb 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -138,11 +138,15 @@ services:
{{- $service := .}}
{{- range $name, $conf := $.Values.apps -}}
{{- if eq $conf.harness.name $service }}
+ {{- if has $name (list "events" "nfsserver") }}
+# - {{ $name }}
+ {{- else }}
- {{ $name }}
+ {{- end }}
{{- break -}}
{{- end -}}
{{- end -}}
- {{- end -}}
+ {{- end }}
{{- end }}
volumes:
- ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
@@ -222,7 +226,7 @@ volumes:
{{- with $app_config.harness.database }}
{{ .name }}:
{{- if eq .type "postgres" }}
- dshm:
+ dshm-{{ $app_name }}:
{{- end }}
{{- end }}
{{- if eq $app_name "nfsserver" }}
diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml
index 70bda63a5..569bb2209 100644
--- a/deployment-configuration/compose/templates/auto-database.yaml
+++ b/deployment-configuration/compose/templates/auto-database.yaml
@@ -25,7 +25,7 @@
target: /data/db
{{- if eq .type "postgres" }}
- type: volume
- source: dshm
+ source: dshm-{{ .name }}
target: /dev/shm
{{- include "deploy_utils.database.postgres" . }}
{{- end }}
From f8d4c84c909440358a6e345e69402dd0ddcb3036 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 15 Mar 2024 07:14:17 -0600
Subject: [PATCH 49/94] CH-100 Fix health check for account services
---
deployment-configuration/compose/templates/auto-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 07d120ddb..e932362dd 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -89,7 +89,7 @@ services:
{{- end }}
{{- if eq $app_name "accounts" }}
healthcheck:
- test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/auth/realms/azathoth/account"]
+ test: ["CMD", "curl", "-f", "http://127.0.0.1:8080/auth/realms/{{ $.Values.namespace }}/account"]
interval: 1s
timeout: 3s
retries: 30
@@ -226,7 +226,7 @@ volumes:
{{- with $app_config.harness.database }}
{{ .name }}:
{{- if eq .type "postgres" }}
- dshm-{{ $app_name }}:
+ dshm-{{ .name }}:
{{- end }}
{{- end }}
{{- if eq $app_name "nfsserver" }}
From 41211804e81089366b1efdbc7b46e82ac9006d5d Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 15 Mar 2024 09:51:44 -0600
Subject: [PATCH 50/94] CH-100 Fix issue with dependencies
---
.../compose/templates/auto-compose.yaml | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index e932362dd..1302040e8 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -110,13 +110,13 @@ services:
{{- range $app_config.harness.env }}
- {{ .name }}={{ .value }}
{{- end }}
- {{- with $app_config.harness.dependencies.soft }}
+ {{- with (concat (without $app_config.harness.dependencies.hard "argo") $app_config.harness.dependencies.soft) }}
links:
{{- range . -}}
{{- $service := .}}
{{- range $name, $conf := $.Values.apps }}
{{- if eq $conf.harness.name $service }}
- {{- if has . (list "events" "nfsserver") }}
+ {{- if has $name (list "events" "nfsserver") }}
# - {{ $name }}:{{ $service }}.{{ $.Values.domain }}
{{- else }}
- {{ $name }}:{{ $service }}.{{ $.Values.domain }}
@@ -131,9 +131,11 @@ services:
{{/* "compact" in the beginning is to remove empty values */}}
{{- with compact
(append
- (without $app_config.harness.dependencies.hard "argo")
+ (without $app_config.harness.dependencies.hard "argo" )
(dig "database" "name" "" $app_config.harness)) -}}
+ {{- with without $app_config.harness.dependencies.hard "argo" "events" }}
depends_on:
+ {{- end }}
{{- range . -}}
{{- $service := .}}
{{- range $name, $conf := $.Values.apps -}}
From 5bf5532d88e651ac443b5082be7dafd4ec25a932 Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 26 Mar 2024 12:50:24 -0600
Subject: [PATCH 51/94] CH-100 Change tagPolicy for docker-compose target
---
tools/deployment-cli-tools/ch_cli_tools/skaffold.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
index 55efc587e..7859d043b 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
@@ -199,6 +199,11 @@ def identify_unicorn_based_main(candidates):
'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']]
}
}
+ skaffold_conf['build']['tagPolicy'] = {
+ 'envTemplate': {
+ 'template': "TAG"
+ }
+ }
skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
merge_to_yaml_file(skaffold_conf, os.path.join(
@@ -212,7 +217,7 @@ def git_clone_hook(conf: GitDependencyConfig, context_path: str):
join(os.path.dirname(os.path.dirname(HERE)), 'clone.sh'),
conf.branch_tag,
conf.url,
- join(context_path, "dependencies", conf.path or os.path.basename(conf.url).split('.')[0])
+ join(context_path, "dependencies", conf.path or os.path.basename(conf.url).split('.')[0])
]
}
From 319352b3c8762a536568b4b916f86b2f442bf912 Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 26 Mar 2024 13:37:08 -0600
Subject: [PATCH 52/94] CH-100 Add first information for the docker compose
target
---
README.md | 28 ++++++++++++-------
docs/build-deploy/README.md | 54 +++++++++++++++++++++++++++++++------
2 files changed, 65 insertions(+), 17 deletions(-)
diff --git a/README.md b/README.md
index 459b29294..46c1cdf24 100644
--- a/README.md
+++ b/README.md
@@ -2,12 +2,12 @@
-CloudHarness is a base infrastructure facilitator for microservice based applications deployed on Kubernetes.
+CloudHarness is a base infrastructure facilitator for microservice based applications deployed on Kubernetes and Docker Compose.
Can scaffold and maintain your cloud solution on top of Cloudharness without writing
Kubernetes templates, with in place common utilities and applications already configured for you.
What building your cloud solution with CloudHarness gives to you:
-- Common framework and utilities to develop and deploy micro-service application
+- Common framework and utilities to develop and deploy micro-service application
- Helm chart automatic generation
- deployments
- services
@@ -17,6 +17,12 @@ What building your cloud solution with CloudHarness gives to you:
- access gatekeepers configuration
- secrets
- templated config maps from files
+ - Docker compose configuration generation
+ - services
+ - traefik configuration
+ - databases (postgreql)
+ - access gatekeepers configuration
+ - secrets
* Automatic build and push of images
* REST-API scaffolding building based on OpenApi
* Continuous deployment script generation
@@ -46,14 +52,14 @@ In particular, these questions may rise:
- How to manage databases without being locked to a specific vendor solution?
- How to perform database backups?
- How to manage secret data?
- - What about having a precounfigured account management application?
- - Sooner rather than later I'll need an orchestration queue. Why not have that just ready to use?
+ - What about having a precounfigured account management application?
+ - Sooner rather than later I'll need an orchestration queue. Why not have that just ready to use?
# Command line tools
CloudHarness provides the following command line tools to help application scaffolding and deployment.
-* `harness-deployment` - generate the helm chart to deploy on Kubernetes.
+* `harness-deployment` - generate the helm chart to deploy on Kubernetes.
* `harness-application` - create a new CloudHarness REST application.
* `harness-generate` - generates server and client code for all CloudHarness REST applications.
* `harness-test` - run end to end tests
@@ -67,13 +73,13 @@ Cloudharness can be used on all major operative systems.
- Linux: supported and tested
- MacOS: supported and tested
- Windows/WSL2: supported and tested
-- Windows native: mostly working, unsupported
+- Windows native: mostly working, unsupported
### Python
Python 3.9 must be installed.
It is recommended to setup a virtual environment.
-With conda:
+With conda:
```bash
conda create --name ch python=3.9
conda activate ch
@@ -94,6 +100,10 @@ conda activate ch
[Skaffold](https://skaffold.dev/docs/install/) is the way to go to build and debug your application in your local development environment.
+### Docker compose
+
+[Docker Compose](https://docs.docker.com/compose/) is required if the docker compose system is the target (instead of Kubernetes).
+
### Node environment
A node environment with npm is required for developing web applications and to run end to end tests.
@@ -139,7 +149,7 @@ or simply copy the *blueprint* folder.
The script `harness-deployment` scans your applications and configurations to create the build and deploy artifacts.
Created artifacts include:
- - Helm chart
+ - Helm chart (or docker compose configuration file)
- Skaffold build and run configuration
- Visual Studio Code debug and run configuration
- Codefresh pipeline yaml specification (optional)
@@ -153,7 +163,7 @@ infrastructure
cloud-harness
```
-run
+run
```
harness-deployment cloud-harness . [PARAMS]
diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md
index 35e5fbdc9..03046971b 100644
--- a/docs/build-deploy/README.md
+++ b/docs/build-deploy/README.md
@@ -17,7 +17,7 @@ infrastructure
cloud-harness
```
-run
+run
```
harness-deployment cloud-harness . [PARAMS]
@@ -34,7 +34,7 @@ Deployment definition:
- `--env`, `-e`: sets a custom environment (default: none)
- `--namespace`, `-n`: set the kubernetes namespace (default: ch)
- `--tag`, `-t`: define build tag (default: latest)
-- `--registry`, `-r`: set the Docker registry where images are pushed on build
+- `--registry`, `-r`: set the Docker registry where images are pushed on build
- `--include`, `-i`: set application(s) to include (with their dependencies). If not set, every application will be included
- `--exclude`, `-ex`: explicitly exclude applications or task images
@@ -46,6 +46,7 @@ Development parameters:
Optional settings
- `--output`, `-o`: specify helm chart base path (default `./deployment)
+- `--docker-compose`: targets Docker Compose instead of Kubernetes (see details below)
Build and deploy (deprecated, use Skaffold instead)
- `--build`, `-b`: builds and pushes Docker images in the specified registry (if any)
@@ -70,7 +71,7 @@ harness-deployment cloud-harness . -d mydomain.dev.org -n mynamespace -e dev -r
**Note: Docker registry**
-By default `skaffold` builds the images in the local Docker registry. In order to make the deploy work, we need to specify a
+By default `skaffold` builds the images in the local Docker registry. In order to make the deploy work, we need to specify a
registry that is visible from inside the cluster. The parameter `--registry` allows to specify a registry in which images are pushed after the build.
Any public registry will work. The suggested way to go is to install a registry on localhost:5000 inside
the kube cluster and push on that registry, also forwarded to localhost.
@@ -90,21 +91,21 @@ for instance with a Google Cloud cluster or a local Kubernetes like Minikube or
1. Create the namespace `kubectl create ns ch`
1. Build images and Install or upgrade the helm chart with `skaffold deploy`
-To build and reploy
+To build and reploy
## Continuous deployment with Codefresh
See [here](./codefresh.md).
## Relevant files and directory structure
-Deployment files are automatically generated with the script
+Deployment files are automatically generated with the script
`harness-deployment`.
all the resources intended to install and deploy the platform on Kubernetes.
- `codefresh`: codefresh build related files (automatically generated)
- `deployment/helm`: the helm chart
-What this script does is to go through all the defined applications and use templates to define all the required
+What this script does is to go through all the defined applications and use templates to define all the required
definitions and variables.
General templates are defined inside `deployment-configuration`.
@@ -116,7 +117,7 @@ Applications can override templates values by defining a file `values.yaml` in t
The following deployment files are generated by `harness-deployment`:
- Helm chart configuration for custom deployment: **./helm/values.yaml**
-- Codefresh build and deploment definition: **./codefresh/codefresh.yaml**
+- Codefresh build and deploment definition: **./codefresh/codefresh.yaml**
The script `harness-deployment` also generates a build script to be used by codefresh.
@@ -131,6 +132,43 @@ Things to notice:
- A Helm chart was created under `deployment/helm` path to handle deployments.
- To populate the generated file `deployment/helm/values.yaml` is used.
+## Docker compose target
+
+The Docker compose target, still in alpha stage, allows you to generate the adequate configuration to run all your services on docker compose.
+
+Not all features of the Kubernetes target are supported by this target.
+Currently, the Docker compose target supports:
+
+- generation of services and links/bindings between them
+- generation of volumes for the services
+- traefik configuration
+- databases (postgreql)
+- access gatekeepers configuration
+- secrets
+- dedicated Skaffold configuration
+
+The following deployment files are generated by `harness-deployment ... --docker-compose`:
+
+- Docker compose configuration: **deployemnt/docker-compose.yaml**
+- Configuration files for the services that needs to be mounted in each docker container: **deployment/resources/**
+- Skaffold configuration file for building the services images: **skaffold.yaml**
+
+The process to build and run the services in Docker compose is the following:
+
+1. generate the Skaffold and Docker compose configuration (option `--docker-compose` for `harness-deployment`)
+2. build the images for all the services using Skaffold
+3. run Docker compose
+
+Translated to the command line:
+
+```bash
+harmess-deployment ... --docker-compose # replace ... by your options
+skaffold build
+cd deployment
+docker compose up # or "docker-compose up" depending on your installation
+```
+
+
## Manual configurations
- [Configure user accounts](../accounts.md)
@@ -141,5 +179,5 @@ In order to access the applications from your browser, set up your hosts file as
Example: after running `harness-deployment -d mydomain.local -i samples`, set
```
-127.0.0.1 samples.mydomain.local workflows.mydomain.local events.mydomain.local argo.mydomain.local
+127.0.0.1 samples.mydomain.local workflows.mydomain.local events.mydomain.local argo.mydomain.local
```
From 7ec48e15f2fdf5b3af7949c0c522087de88b20d9 Mon Sep 17 00:00:00 2001
From: aranega
Date: Wed, 27 Mar 2024 11:05:47 -0600
Subject: [PATCH 53/94] CH-100 Update documentation
---
docs/build-deploy/README.md | 16 ++++++++++++++--
1 file changed, 14 insertions(+), 2 deletions(-)
diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md
index 03046971b..c3b9b72fc 100644
--- a/docs/build-deploy/README.md
+++ b/docs/build-deploy/README.md
@@ -46,7 +46,7 @@ Development parameters:
Optional settings
- `--output`, `-o`: specify helm chart base path (default `./deployment)
-- `--docker-compose`: targets Docker Compose instead of Kubernetes (see details below)
+- `--docker-compose`: targets Docker Compose instead of Kubernetes (see [details below](#docker-compose-target))
Build and deploy (deprecated, use Skaffold instead)
- `--build`, `-b`: builds and pushes Docker images in the specified registry (if any)
@@ -143,7 +143,7 @@ Currently, the Docker compose target supports:
- generation of volumes for the services
- traefik configuration
- databases (postgreql)
-- access gatekeepers configuration
+- access gatekeepers configuration (Keycloak)
- secrets
- dedicated Skaffold configuration
@@ -168,6 +168,18 @@ cd deployment
docker compose up # or "docker-compose up" depending on your installation
```
+### Unsupported features
+
+There is still some features that are not supported by the Docker compose target.
+Some are planned, others will not be, or not in a form that is compatible with the original k8s target.
+
+Here is a list of the unsupported features at the moment:
+
+- certificates
+- Argo (will not be supported as Argo is a dedicated solution for k8s)
+- events through Kafka
+- NFS server
+- DB backups
## Manual configurations
From 7edfcc467e3ac61bd5f542b8544359d713d6e12b Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Wed, 27 Mar 2024 19:35:32 +0100
Subject: [PATCH 54/94] #CH-100 improve documentation
---
.gitignore | 1 +
README.md | 20 ++++----
blueprint/.gitignore | 2 +-
docs/README.md | 1 +
docs/build-deploy/README.md | 48 ++----------------
docs/build-deploy/docker-compose.md | 78 +++++++++++++++++++++++++++++
docs/dev.md | 40 ++++++++++++++-
7 files changed, 133 insertions(+), 57 deletions(-)
create mode 100644 docs/build-deploy/docker-compose.md
diff --git a/.gitignore b/.gitignore
index 90f643c08..c6add695e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,6 +7,7 @@ node_modules
.coverage
*.DS_Store
deployment/helm
+deployment/compose
*.egg-info
*.idea
/build
diff --git a/README.md b/README.md
index 46c1cdf24..cee34fbd6 100644
--- a/README.md
+++ b/README.md
@@ -2,7 +2,7 @@
-CloudHarness is a base infrastructure facilitator for microservice based applications deployed on Kubernetes and Docker Compose.
+CloudHarness is a base infrastructure facilitator for microservice based applications deployed primarily on Kubernetes.
Can scaffold and maintain your cloud solution on top of Cloudharness without writing
Kubernetes templates, with in place common utilities and applications already configured for you.
@@ -22,7 +22,7 @@ What building your cloud solution with CloudHarness gives to you:
- traefik configuration
- databases (postgreql)
- access gatekeepers configuration
- - secrets
+ - secrets and configmaps
* Automatic build and push of images
* REST-API scaffolding building based on OpenApi
* Continuous deployment script generation
@@ -63,6 +63,7 @@ CloudHarness provides the following command line tools to help application scaff
* `harness-application` - create a new CloudHarness REST application.
* `harness-generate` - generates server and client code for all CloudHarness REST applications.
* `harness-test` - run end to end tests
+
# Get started
## Prerequisites
@@ -137,13 +138,14 @@ To (re)generate the code for your applications, run `harness-generate` from the
The script will look for all openapi applications, and regenerate the Flask server code and documentation.
Note: the script will eventually override any manually modified file. To avoid that, define a file openapi-generator-ignore.
-# Extend CloudHarness to build your solution
-CloudHarness is born to be extended. In order to extend CloudHarness you just need to mirror the folder structure:
-* **applications**: place here your custom applications, or override default ones
-* **deployment-configuration**: override the helm chart default values and templates
-* **infrastructure**: define base images to use in your application
+# Extend CloudHarness to build your project
+
+CloudHarness is born to be extended.
+
+The quickest way to start is to install Cloud Harness, copy the *blueprint* folder and build from that with the cli tools, such as
+`harness-application`, `harness-generate`, `harness-deployment`.
-or simply copy the *blueprint* folder.
+See the [developers documentation](docs/dev.md#start-your-project) for more information.
# Build and deploy
@@ -154,7 +156,7 @@ Created artifacts include:
- Visual Studio Code debug and run configuration
- Codefresh pipeline yaml specification (optional)
-With your solution folder structure looking like
+With your project folder structure looking like
```
applications
diff --git a/blueprint/.gitignore b/blueprint/.gitignore
index 0ea6b2248..54e3e15f9 100644
--- a/blueprint/.gitignore
+++ b/blueprint/.gitignore
@@ -2,7 +2,6 @@
.idea
*.iml
node_modules
-mnp-custom
.openapi-generator
*.pyc
.vscode
@@ -12,6 +11,7 @@ cloud-harness
build
skaffold.yaml
/deployment.yaml
+/deployment/compose
/.run/
*.egg-info
.overrides
diff --git a/docs/README.md b/docs/README.md
index b6a8087f8..df93d9353 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -2,6 +2,7 @@
- [Create and run a deployment](./build-deploy/README.md)
- [Create a codefresh continuous deployment](./build-deploy/codefresh.md)
- [Configure the Helm chart](./build-deploy/helm-configuration.md)
+ - [Docker Compose target generation](./build-deploy/docker-compose.md)
- [Set up environments](./build-deploy/environments.md)
- [Work with local deployments](./build-deploy/local-deploy/README.md)
- [Debug your applications](./build-deploy/local-deploy/debug.md)
diff --git a/docs/build-deploy/README.md b/docs/build-deploy/README.md
index c3b9b72fc..4b1f4b995 100644
--- a/docs/build-deploy/README.md
+++ b/docs/build-deploy/README.md
@@ -134,52 +134,10 @@ Things to notice:
## Docker compose target
-The Docker compose target, still in alpha stage, allows you to generate the adequate configuration to run all your services on docker compose.
+Docker compose is partially supported as a deployment target as an alternative when Kubernetes
+is not available option or for local development.
-Not all features of the Kubernetes target are supported by this target.
-Currently, the Docker compose target supports:
-
-- generation of services and links/bindings between them
-- generation of volumes for the services
-- traefik configuration
-- databases (postgreql)
-- access gatekeepers configuration (Keycloak)
-- secrets
-- dedicated Skaffold configuration
-
-The following deployment files are generated by `harness-deployment ... --docker-compose`:
-
-- Docker compose configuration: **deployemnt/docker-compose.yaml**
-- Configuration files for the services that needs to be mounted in each docker container: **deployment/resources/**
-- Skaffold configuration file for building the services images: **skaffold.yaml**
-
-The process to build and run the services in Docker compose is the following:
-
-1. generate the Skaffold and Docker compose configuration (option `--docker-compose` for `harness-deployment`)
-2. build the images for all the services using Skaffold
-3. run Docker compose
-
-Translated to the command line:
-
-```bash
-harmess-deployment ... --docker-compose # replace ... by your options
-skaffold build
-cd deployment
-docker compose up # or "docker-compose up" depending on your installation
-```
-
-### Unsupported features
-
-There is still some features that are not supported by the Docker compose target.
-Some are planned, others will not be, or not in a form that is compatible with the original k8s target.
-
-Here is a list of the unsupported features at the moment:
-
-- certificates
-- Argo (will not be supported as Argo is a dedicated solution for k8s)
-- events through Kafka
-- NFS server
-- DB backups
+See [here](./docker-compose.md) for more information.
## Manual configurations
diff --git a/docs/build-deploy/docker-compose.md b/docs/build-deploy/docker-compose.md
new file mode 100644
index 000000000..361ae1730
--- /dev/null
+++ b/docs/build-deploy/docker-compose.md
@@ -0,0 +1,78 @@
+# Docker compose target generation
+
+The Docker compose target, still in alpha stage, allows you to generate the base
+configuration to run your services on docker compose.
+
+This feature is intended for limited and development purposes, so no all features of
+the Kubernetes target are supported by this target.
+
+## How to use
+
+The process to build and run the services in Docker compose is the following:
+
+1. generate the Skaffold and Docker compose configuration (option `--docker-compose` for `harness-deployment`)
+2. build the images for all the services using Skaffold
+3. run Docker compose
+
+Translated to the command line:
+
+```bash
+harness-deployment ... --docker-compose # replace ... by your options
+skaffold build
+cd deployment
+docker compose up # or "docker-compose up" depending on your installation
+```
+
+
+## Supported features
+In general, the supported scope for docker compose services includes the ones that are automatically
+handled by Cloud Harness, hence:
+
+```yaml
+harness:
+ deployment:
+ auto: true
+ service:
+ auto: true
+```
+Other custom Kubernetes templates are not included in the deployment.
+
+Currently, the Docker compose target supports:
+
+- generation of "auto" deployment/services (service and deployment bing to the same artifact in Docker)
+ - environmental variables
+ - links/bindings between services
+ - readiness/liveness probes as healthchecks
+ - Resources requests and limits controls
+ - replicas
+- generation of "auto" volumes and mounting on the services
+- resources (handled as configmaps in Kubernetes, handled as file mounts here)
+- reverse proxy (traefik configuration)
+- "auto" databases (postgresql)
+- secured: access gatekeepers configuration (Keycloak)
+- secrets (no encryption)
+
+The following deployment files are generated by `harness-deployment ... --docker-compose`:
+
+- Docker compose configuration: **deployment/docker-compose.yaml**
+- Configuration files for the services that needs to be mounted in each docker container: **deployment/compose/resources/**
+- Skaffold configuration file for building the services images: **skaffold.yaml**
+
+## Unsupported features
+
+There are still some features that are not supported by the Docker compose target.
+Some are planned, others will not be, or not in a form that is compatible with the original k8s target.
+
+Here is a list of the unsupported features at the moment that are in the roadmap:
+
+- TLS certificates
+- application proxy (use_services) specification
+- definition of custom compose templates
+- Events (through Kafka)
+- Jupyterhub
+
+These features are not currently in the roadmap for Docker compose
+- Unsupported Kubernetes features from Docker
+- NFS server
+- Workflows and tasks (will not be supported as Argo is a dedicated solution for k8s)
+- DB backups
\ No newline at end of file
diff --git a/docs/dev.md b/docs/dev.md
index 113e0c863..8871806e5 100644
--- a/docs/dev.md
+++ b/docs/dev.md
@@ -4,6 +4,42 @@ This documentation is meant to be read by developers that needs to make modifica
The goal of this doc is to show how CloudHarness is internally built, the different parts of the code/files that are relative to specific features, and to provide a map to be able to modify or implement new features quickly.
CloudHarness is a project that allows you to: quickly generate the code of your webapp, considering that it runs in the cloud with a micro-service architecture, and to easily connect all those micro-services together to finally build the final app.
+
+## Prerequisites and installation
+
+This information is covered in the [main readme](../../README.md#prerequisites)
+
+## Start your project
+
+A Cloud Harness project can go to a simple service deployed on Kubernetes and so taking advantage of the
+Helm Chart and CI/CD generation, to anything more structured using a mix of custom applications and
+applications that are built-in in Cloud Harness.
+
+The quickest way to start your project is to copy the **blueprint** directory from cloudharness somewhere and
+commit that to your repository and start building from that.
+
+What the blueprint gives us is basically a mirror the folder structure that can be recognized by Cloud Harness:
+* **applications**: place here your custom applications, or override default ones
+* **deployment-configuration**: override the helm chart default values and templates
+* **infrastructure**: define base images to use in your application
+
+An initial workflow to start the first project with Cloud Harness can look like this:
+
+1. Copy blueprint to a *my-project* folder
+2. Commit push to a (git or any other) repository
+3. Clone cloud-harness inside it. Cloud harness can be placed anywhere and shared across different projects but it's easier to start our tutorials with this structure.
+4. Use `harness-application myapp` to create one service/application from one of the available templates.
+5. Play with the `applications/myapp/deploy/values.yaml` file to configure your deployment and add a database, a volume, or other applications as dependencies
+6. Use `harness-deployment cloud-harness . -i myapp` to start generating a deployment including your application and its dependencies
+7. Run locally with `skaffold dev`
+
+The above workflow based on an application template is a great place to get started, but anything can be deployed with Cloud Harness,
+including custom templates and even helm charts.
+
+The above workflow and more is covered in our [tutorials](./tutorials/).
+
+
+## Built-in applications and features
Currently, the tools that CloudHarness can consider to build the final app are the following:
* [OpenAPI](https://www.openapis.org/) for generating the model and API of your application (based on an OpenAPI specification),
@@ -14,8 +50,8 @@ Currently, the tools that CloudHarness can consider to build the final app are t
* [JupyterHub](https://jupyter.org/hub) to provide jupyter notebooks access to a group of users,
* [Volume Manager](../applications/volumemanager/) to deal with external file system,
* [NFS Server](../applications/nfsserver/) to provide storage of file on an external NFS file system,
-* [Kubernete](https://kubernetes.io/) is used to manage the auto-scaling, deployements, ... of micro-services on a cluster,
-* [Code Fresh](https://codefresh.io/) for the remote build of the application, and it is configured to initiate a deployment on a remote Kubernete cluster,
+* [Kubernetes](https://kubernetes.io/) is used to manage the auto-scaling, deployements, ... of micro-services on a cluster,
+* [Codefresh](https://codefresh.io/) for the remote build of the application, and it is configured to initiate a deployment on a remote Kubernete cluster,
* [Helm Chart](https://helm.sh/docs/topics/charts/) for the packaging of Kubernete resources to simplify the deployment of the application,
* [Skaffold](https://skaffold.dev/) to help deploying the packaged application in a Kubernete cluster.
From e5d56b7fb6941020ddedb50341a908f78253b404 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 07:28:50 -0600
Subject: [PATCH 55/94] CH-100 First refactoring
---
.../compose/templates/auto-compose.yaml | 14 -
.../ch_cli_tools/codefresh.py | 16 +-
.../ch_cli_tools/configurationgenerator.py | 654 ++++++++++
.../ch_cli_tools/dockercompose.py | 1138 ++++++++---------
.../deployment-cli-tools/ch_cli_tools/helm.py | 1089 ++++++++--------
.../tests/test_codefresh.py | 7 +-
tools/deployment-cli-tools/tests/test_helm.py | 25 +-
7 files changed, 1782 insertions(+), 1161 deletions(-)
create mode 100644 tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 1302040e8..ca024eddf 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -27,9 +27,6 @@ services:
{{- continue -}}
{{- end -}}
{{ $deployment := $app_config.harness.deployment }}
- {{- if eq $app_name "nfsserver" }}
- {{- include "nfsserver.deployment" $.Values | indent 2 }}
- {{- end }}
{{- $isSecured := (and $.Values.secured_gatekeepers ( and (hasKey $app_config "port") $app_config.harness.secured )) -}}
{{ if $isSecured }}
@@ -213,10 +210,6 @@ volumes:
{{- break }}
{{- end }}
{{- with $app_config.harness.database }}
-volumes:
- {{- break }}
- {{- end }}
- {{- if eq $app_name "nfsserver" }}
volumes:
{{- break }}
{{- end }}
@@ -231,11 +224,4 @@ volumes:
dshm-{{ .name }}:
{{- end }}
{{- end }}
- {{- if eq $app_name "nfsserver" }}
- {{ $app_config.nfs.volumeName }}:
- # driver_opts:
- # type: "nfs"
- # o: "{{ join "," $app_config.nfs.mountOptions }}"
- # device: ":{{ $app_config.nfs.path }}"
- {{- end }}
{{- end }}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
index c4b7dd264..bb4b84342 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
@@ -11,7 +11,7 @@
from cloudharness_utils.testing.util import get_app_environment
from .models import HarnessMainConfig, ApplicationTestConfig, ApplicationHarnessConfig
from cloudharness_utils.constants import *
-from .helm import KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, generate_tag_from_content
+from .configurationgenerator import KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES
from .utils import check_docker_manifest_exists, find_dockerfiles_paths, get_app_relative_to_base_path, guess_build_dependencies_from_dockerfile, \
get_image_name, get_template, dict_merge, app_name_from_path, clean_path
from cloudharness_utils.testing.api import get_api_filename, get_schemathesis_command, get_urls_from_api_file
@@ -74,7 +74,7 @@ def check_image_exists(name, image):
else:
env[app_specific_tag_variable(name) + "_NEW"] = 1
-
+
for app in helm_values.apps.values():
if app.harness and app.harness.deployment.image:
@@ -128,7 +128,7 @@ def create_codefresh_deployment_scripts(root_paths, envs=(), include=(), exclude
for root_path in root_paths:
for e in envs:
-
+
template_name = f"codefresh-template-{e}.yaml"
template_path = join(
root_path, DEPLOYMENT_CONFIGURATION_PATH, template_name)
@@ -245,7 +245,7 @@ def codefresh_steps_from_base_path(base_path, build_step, fixed_context=None, in
clean_path(dockerfile_relative_to_root), app_name),
environment=e2e_test_environment(app_config)
)
-
+
def add_unit_test_step(app_config: ApplicationHarnessConfig):
# Create a run step for each application with tests/unit.yaml file using the corresponding image built at the previous step
@@ -280,7 +280,7 @@ def add_unit_test_step(app_config: ApplicationHarnessConfig):
codefresh_steps_from_base_path(join(
root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False)
steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
-
+
if not codefresh:
logging.warning(
"No template file found. Codefresh script not created.")
@@ -420,7 +420,7 @@ def codefresh_app_build_spec(app_name, app_context_path, dockerfile_path="Docker
title=title,
working_directory='./' + app_context_path,
dockerfile=dockerfile_path)
-
+
tag = app_specific_tag_variable(app_name)
build["tag"] = "${{%s}}" % tag
@@ -450,7 +450,7 @@ def add_arg_dependencies(dependencies):
helm_values.apps[values_key].harness.dependencies.build)
except (KeyError, AttributeError):
add_arg_dependencies(helm_values['task-images'])
-
+
when_condition = existing_build_when_condition(tag)
build["when"] = when_condition
return build
@@ -471,5 +471,5 @@ def existing_build_when_condition(tag):
}
}
}
-
+
return when_condition
diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
new file mode 100644
index 000000000..9a445456f
--- /dev/null
+++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
@@ -0,0 +1,654 @@
+"""
+Utilities to create a helm chart from a CloudHarness directory structure
+"""
+import yaml
+from ruamel.yaml import YAML
+import os
+import shutil
+import logging
+from hashlib import sha1
+import subprocess
+from functools import cache
+import tarfile
+from docker import from_env as DockerClient
+from pathlib import Path
+import copy
+
+
+from . import HERE, CH_ROOT
+from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
+ DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE
+from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
+ get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \
+ find_dockerfiles_paths, find_file_paths
+
+from .models import HarnessMainConfig
+
+
+KEY_HARNESS = 'harness'
+KEY_SERVICE = 'service'
+KEY_DATABASE = 'database'
+KEY_DEPLOYMENT = 'deployment'
+KEY_APPS = 'apps'
+KEY_TASK_IMAGES = 'task-images'
+# KEY_TASK_IMAGES_BUILD = f"{KEY_TASK_IMAGES}-build"
+KEY_TEST_IMAGES = 'test-images'
+
+DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
+
+
+class ConfigurationGenerator(object):
+ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
+ namespace=None, templates_path=HELM_PATH):
+ assert domain, 'A domain must be specified'
+ self.root_paths = [Path(r) for r in root_paths]
+ self.tag = tag
+ if registry and not registry.endswith('/'):
+ self.registry = f'{registry}/'
+ else:
+ self.registry = registry
+ self.local = local
+ self.domain = domain
+ self.exclude = exclude
+ self.secured = secured
+ self.output_path = Path(output_path)
+ self.include = include
+ self.registry_secret = registry_secret
+ self.tls = tls
+ self.env = env
+ self.namespace = namespace
+
+ self.templates_path = templates_path
+ self.dest_deployment_path = self.output_path / templates_path
+ self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml'
+ self.__init_deployment()
+
+ self.static_images = set()
+ self.base_images = {}
+ self.all_images = {}
+
+ def __init_deployment(self):
+ """
+ Create the base helm chart
+ """
+ if self.dest_deployment_path.exists():
+ shutil.rmtree(self.dest_deployment_path)
+ # Initialize with default
+ copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path)
+
+ # Override for every cloudharness scaffolding
+ for root_path in self.root_paths:
+ copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
+ base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path)
+ collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
+ dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
+
+ def _adjust_missing_values(self, helm_values):
+ if 'name' not in helm_values:
+ with open(self.helm_chart_path) as f:
+ chart_idx_content = yaml.safe_load(f)
+ helm_values['name'] = chart_idx_content['name'].lower()
+
+ def _process_applications(self, helm_values, base_image_name):
+ for root_path in self.root_paths:
+ app_values = init_app_values(
+ root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
+ helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ app_values)
+
+ app_base_path = root_path / APPS_PATH
+ app_values = self.collect_app_values(
+ app_base_path, base_image_name=base_image_name)
+ helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ app_values)
+
+ def collect_app_values(self, app_base_path, base_image_name=None):
+ values = {}
+
+ for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
+ app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}")
+
+ if app_name in self.exclude:
+ continue
+ app_key = app_name.replace('-', '_')
+
+ app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
+
+ # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None)
+ # # for dockerfile_path in app_path.rglob('**/Dockerfile'):
+ # # parent_name = dockerfile_path.parent.name.replace("-", "_")
+ # # if parent_name == app_key:
+ # # app_values['build'] = {
+ # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
+ # # 'dockerfile': "Dockerfile",
+ # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # }
+ # # elif "tasks/" in f"{dockerfile_path}":
+ # # parent_name = parent_name.upper()
+ # # values.setdefault("task-images-build", {})[parent_name] = {
+ # # 'dockerfile': "Dockerfile",
+ # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # }
+ # # import ipdb; ipdb.set_trace() # fmt: skip
+
+ # if dockerfile_path:
+ # app_values['build'] = {
+ # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
+ # 'dockerfile': "Dockerfile",
+ # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # }
+
+ values[app_key] = dict_merge(
+ values[app_key], app_values) if app_key in values else app_values
+
+ return values
+
+ def _init_static_images(self, base_image_name):
+ for static_img_dockerfile in self.static_images:
+ img_name = image_name_from_dockerfile_path(os.path.basename(
+ static_img_dockerfile), base_name=base_image_name)
+ self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
+ img_name, build_context_path=static_img_dockerfile)
+
+ def _assign_static_build_dependencies(self, helm_values):
+ for static_img_dockerfile in self.static_images:
+ key = os.path.basename(static_img_dockerfile)
+ if key in helm_values[KEY_TASK_IMAGES]:
+ dependencies = guess_build_dependencies_from_dockerfile(
+ f"{static_img_dockerfile}")
+ for dep in dependencies:
+ if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
+ helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
+ # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = {
+ # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent),
+ # 'dockerfile': 'Dockerfile',
+ # }
+
+ for image_name in list(helm_values[KEY_TASK_IMAGES].keys()):
+ if image_name in self.exclude:
+ del helm_values[KEY_TASK_IMAGES][image_name]
+ # del helm_values[KEY_TASK_IMAGES_BUILD][image_name]
+
+ def _init_base_images(self, base_image_name):
+
+ for root_path in self.root_paths:
+ for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
+ img_name = image_name_from_dockerfile_path(
+ os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ img_name, build_context_path=root_path)
+
+ self.static_images.update(find_dockerfiles_paths(
+ os.path.join(root_path, STATIC_IMAGES_PATH)))
+ return self.base_images
+
+ def _init_test_images(self, base_image_name):
+ test_images = {}
+ for root_path in self.root_paths:
+ for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
+ img_name = image_name_from_dockerfile_path(
+ os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ img_name, build_context_path=base_img_dockerfile)
+
+ return test_images
+
+ def __find_static_dockerfile_paths(self, root_path):
+ return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
+
+ def _merge_base_helm_values(self, helm_values):
+ # Override for every cloudharness scaffolding
+ for root_path in self.root_paths:
+ helm_values = dict_merge(
+ helm_values,
+ collect_helm_values(root_path, env=self.env)
+ )
+
+ return helm_values
+
+ def _get_default_helm_values(self):
+ ch_root_path = Path(CH_ROOT)
+ values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml'
+ helm_values = get_template(values_yaml_path)
+ helm_values = dict_merge(helm_values,
+ collect_helm_values(ch_root_path, env=self.env))
+
+ return helm_values
+
+ def create_tls_certificate(self, helm_values):
+ if not self.tls:
+ helm_values['tls'] = None
+ return
+ if not self.local:
+ return
+ helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
+
+ bootstrap_file = 'bootstrap.sh'
+ certs_parent_folder_path = self.output_path / 'helm' / 'resources'
+ certs_folder_path = certs_parent_folder_path / 'certs'
+
+ # if os.path.exists(os.path.join(certs_folder_path)):
+ if certs_folder_path.exists():
+ # don't overwrite the certificate if it exists
+ return
+
+ try:
+ client = DockerClient()
+ client.ping()
+ except:
+ raise ConnectionRefusedError(
+ '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
+
+ # Create CA and sign cert for domain
+ container = client.containers.run(image='frapsoft/openssl',
+ command=f'sleep 60',
+ entrypoint="",
+ detach=True,
+ environment=[
+ f"DOMAIN={self.domain}"],
+ )
+
+ container.exec_run('mkdir -p /mnt/vol1')
+ container.exec_run('mkdir -p /mnt/certs')
+
+ # copy bootstrap file
+ cur_dir = os.getcwd()
+ os.chdir(Path(HERE) / 'scripts')
+ tar = tarfile.open(bootstrap_file + '.tar', mode='w')
+ try:
+ tar.add(bootstrap_file)
+ finally:
+ tar.close()
+ data = open(bootstrap_file + '.tar', 'rb').read()
+ container.put_archive('/mnt/vol1', data)
+ os.chdir(cur_dir)
+ container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
+
+ # exec bootstrap file
+ container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
+
+ # retrieve the certs from the container
+ bits, stat = container.get_archive('/mnt/certs')
+ if not certs_folder_path.exists():
+ certs_folder_path.mkdir(parents=True)
+ certs_tar = certs_parent_folder_path / 'certs.tar'
+ with open(certs_tar, 'wb') as f:
+ for chunk in bits:
+ f.write(chunk)
+ cf = tarfile.open(certs_tar)
+ cf.extractall(path=certs_parent_folder_path)
+
+ logs = container.logs()
+ logging.info(f'openssl container logs: {logs}')
+
+ # stop the container
+ container.kill()
+
+ logging.info("Created certificates for local deployment")
+
+ def _clear_unused_db_configuration(self, harness_config):
+ database_config = harness_config[KEY_DATABASE]
+ database_type = database_config.get('type', None)
+ if database_type is None:
+ del harness_config[KEY_DATABASE]
+ return
+ db_specific_keys = [k for k, v in database_config.items()
+ if isinstance(v, dict) and 'image' in v and 'ports' in v]
+ for db in db_specific_keys:
+ if database_type != db:
+ del database_config[db]
+
+ def image_tag(self, image_name, build_context_path=None, dependencies=()):
+ tag = self.tag
+ if tag is None and not self.local:
+ logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
+ ignore_path = os.path.join(build_context_path, '.dockerignore')
+ ignore = set(DEFAULT_IGNORE)
+ if os.path.exists(ignore_path):
+ with open(ignore_path) as f:
+ ignore = ignore.union({line.strip() for line in f})
+ logging.info(f"Ignoring {ignore}")
+ tag = generate_tag_from_content(build_context_path, ignore)
+ logging.info(f"Content hash: {tag}")
+ dependencies = dependencies or guess_build_dependencies_from_dockerfile(f"{build_context_path}")
+ tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
+ logging.info(f"Generated tag: {tag}")
+ app_name = image_name.split("/")[-1] # the image name can have a prefix
+ self.all_images[app_name] = tag
+ return self.registry + image_name + (f':{tag}' if tag else '')
+
+
+def get_included_with_dependencies(values, include):
+ app_values = values['apps'].values()
+ directly_included = [app for app in app_values if any(
+ inc == app[KEY_HARNESS]['name'] for inc in include)]
+
+ dependent = set(include)
+ for app in directly_included:
+ if app['harness']['dependencies'].get('hard', None):
+ dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
+ if app['harness']['dependencies'].get('soft', None):
+ dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
+ if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
+ dependent.add('accounts')
+ if len(dependent) == len(include):
+ return dependent
+ return get_included_with_dependencies(values, dependent)
+
+
+def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
+ pass
+
+
+def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
+ if not base_helm_chart.exists():
+ return
+ if dest_helm_chart_path.exists():
+ logging.info("Merging/overriding all files in directory %s",
+ dest_helm_chart_path)
+ merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
+ else:
+ logging.info("Copying base deployment chart from %s to %s",
+ base_helm_chart, dest_helm_chart_path)
+ shutil.copytree(base_helm_chart, dest_helm_chart_path)
+
+
+def collect_helm_values(deployment_root, env=()):
+ """
+ Creates helm values from a cloudharness deployment scaffolding
+ """
+ values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml'
+
+ values = get_template(values_template_path)
+
+ for e in env:
+ specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
+ f'values-template-{e}.yaml')
+ if os.path.exists(specific_template_path):
+ logging.info(
+ "Specific environment values template found: " + specific_template_path)
+ with open(specific_template_path) as f:
+ values_env_specific = yaml.safe_load(f)
+ values = dict_merge(values, values_env_specific)
+ return values
+
+
+def init_app_values(deployment_root, exclude, values=None):
+ values = values if values is not None else {}
+ app_base_path = os.path.join(deployment_root, APPS_PATH)
+ overridden_template_path = os.path.join(
+ deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+ default_values_path = os.path.join(
+ CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+
+ for app_path in get_sub_paths(app_base_path):
+
+ app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+
+ if app_name in exclude:
+ continue
+ app_key = app_name.replace('-', '_')
+ if app_key not in values:
+ default_values = get_template(default_values_path)
+ values[app_key] = default_values
+ overridden_defaults = get_template(overridden_template_path)
+ values[app_key] = dict_merge(values[app_key], overridden_defaults)
+
+ return values
+
+
+def values_from_legacy(values):
+ if KEY_HARNESS not in values:
+ values[KEY_HARNESS] = {}
+ harness = values[KEY_HARNESS]
+ if KEY_SERVICE not in harness:
+ harness[KEY_SERVICE] = {}
+ if KEY_DEPLOYMENT not in harness:
+ harness[KEY_DEPLOYMENT] = {}
+ if KEY_DATABASE not in harness:
+ harness[KEY_DATABASE] = {}
+
+ if 'subdomain' in values:
+ harness['subdomain'] = values['subdomain']
+ if 'autodeploy' in values:
+ harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
+ if 'autoservice' in values:
+ harness[KEY_SERVICE]['auto'] = values['autoservice']
+ if 'secureme' in values:
+ harness['secured'] = values['secureme']
+ if 'resources' in values:
+ harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
+ if 'replicas' in values:
+ harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
+ if 'image' in values:
+ harness[KEY_DEPLOYMENT]['image'] = values['image']
+ if 'port' in values:
+ harness[KEY_DEPLOYMENT]['port'] = values['port']
+ harness[KEY_SERVICE]['port'] = values['port']
+
+
+def values_set_legacy(values):
+ harness = values[KEY_HARNESS]
+ if 'image' in harness[KEY_DEPLOYMENT]:
+ values['image'] = harness[KEY_DEPLOYMENT]['image']
+
+ values['name'] = harness['name']
+ if harness[KEY_DEPLOYMENT].get('port', None):
+ values['port'] = harness[KEY_DEPLOYMENT]['port']
+ if 'resources' in harness[KEY_DEPLOYMENT]:
+ values['resources'] = harness[KEY_DEPLOYMENT]['resources']
+
+
+def generate_tag_from_content(content_path, ignore=()):
+ from dirhash import dirhash
+ return dirhash(content_path, 'sha1', ignore=ignore)
+
+
+def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
+ if isinstance(values, dict):
+ newenvs = list(envs)
+ for key, value in values.items():
+ v = extract_env_variables_from_values(
+ value, envs, f"{prefix}_{key}".replace('-', '_').upper())
+ if key in ('name', 'port', 'subdomain'):
+ newenvs.extend(v)
+ return newenvs
+ else:
+ return [env_variable(prefix, values)]
+
+
+def create_env_variables(values):
+ for app_name, value in values[KEY_APPS].items():
+ if KEY_HARNESS in value:
+ values['env'].extend(extract_env_variables_from_values(
+ value[KEY_HARNESS], prefix='CH_' + app_name))
+ values['env'].append(env_variable('CH_DOMAIN', values['domain']))
+ values['env'].append(env_variable(
+ 'CH_IMAGE_REGISTRY', values['registry']['name']))
+ values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
+
+
+def hosts_info(values):
+ domain = values['domain']
+ namespace = values['namespace']
+ subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
+ KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
+ KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
+ try:
+ ip = get_cluster_ip()
+ except:
+ logging.warning('Cannot get cluster ip')
+ return
+ logging.info(
+ "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
+
+ deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
+ for app in values[KEY_APPS].values() if KEY_HARNESS in app)
+
+ logging.info(
+ "\nTo run locally some apps, also those references may be needed")
+ for appname in values[KEY_APPS]:
+ app = values[KEY_APPS][appname]['harness']
+ if 'deployment' not in app:
+ continue
+ print(
+ "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
+ app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
+
+ print(
+ f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
+
+
+class ValuesValidationException(Exception):
+ pass
+
+
+def validate_helm_values(values):
+ validate_dependencies(values)
+
+
+def validate_dependencies(values):
+ all_apps = {a for a in values["apps"]}
+ for app in all_apps:
+ app_values = values["apps"][app]
+ if 'dependencies' in app_values[KEY_HARNESS]:
+ soft_dependencies = {
+ d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
+ not_found = {d for d in soft_dependencies if d not in all_apps}
+ if not_found:
+ logging.warning(
+ f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
+ hard_dependencies = {
+ d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
+ not_found = {d for d in hard_dependencies if d not in all_apps}
+ if not_found:
+ raise ValuesValidationException(
+ f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
+
+ build_dependencies = {
+ d for d in app_values[KEY_HARNESS]['dependencies']['build']}
+
+ not_found = {
+ d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
+ not_found = {d for d in not_found if d not in all_apps}
+ if not_found:
+ raise ValuesValidationException(
+ f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
+
+ if 'use_services' in app_values[KEY_HARNESS]:
+ service_dependencies = {d['name'].replace(
+ "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
+
+ not_found = {d for d in service_dependencies if d not in all_apps}
+ if not_found:
+ raise ValuesValidationException(
+ f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
+
+
+def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path=HELM_PATH, exclude=(), include=None):
+ """
+ Searches recursively for helm templates inside the applications and collects the templates in the destination
+
+ :param search_root:
+ :param dest_helm_chart_path: collected helm templates destination folder
+ :param exclude:
+ :return:
+ """
+ app_base_path = search_root / APPS_PATH
+
+ for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
+ app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path))
+ if app_name in exclude or (include and not any(inc in app_name for inc in include)):
+ continue
+ if templates_path == HELM_PATH:
+ template_dir = app_path / 'deploy' / 'templates'
+ else:
+ template_dir = app_path / 'deploy' / f'templates-{templates_path}'
+ if template_dir.exists():
+ dest_dir = dest_helm_chart_path / 'templates' / app_name
+
+ logging.info(
+ "Collecting templates for application %s to %s", app_name, dest_dir)
+ if dest_dir.exists():
+ logging.warning(
+ "Merging/overriding all files in directory %s", dest_dir)
+ merge_configuration_directories(f"{template_dir}", f"{dest_dir}")
+ else:
+ shutil.copytree(template_dir, dest_dir)
+ resources_dir = app_path / 'deploy' / 'resources'
+ if resources_dir.exists():
+ dest_dir = dest_helm_chart_path / 'resources' / app_name
+
+ logging.info(
+ "Collecting resources for application %s to %s", app_name, dest_dir)
+
+ merge_configuration_directories(f"{resources_dir}", f"{dest_dir}")
+
+ if templates_path == HELM_PATH:
+ subchart_dir = app_path / 'deploy/charts'
+ if subchart_dir.exists():
+ dest_dir = dest_helm_chart_path / 'charts' / app_name
+
+ logging.info(
+ "Collecting templates for application %s to %s", app_name, dest_dir)
+ if dest_dir.exists():
+ logging.warning(
+ "Merging/overriding all files in directory %s", dest_dir)
+ merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
+ else:
+ shutil.copytree(subchart_dir, dest_dir)
+
+
+# def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path=None, exclude=(), include=None):
+# """
+# Searches recursively for helm templates inside the applications and collects the templates in the destination
+
+# :param search_root:
+# :param dest_helm_chart_path: collected helm templates destination folder
+# :param exclude:
+# :return:
+# """
+# app_base_path = os.path.join(search_root, APPS_PATH)
+
+# import ipdb; ipdb.set_trace() # fmt: skip
+
+# for app_path in get_sub_paths(app_base_path):
+# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+# if app_name in exclude or (include and not any(inc in app_name for inc in include)):
+# continue
+# template_dir = os.path.join(app_path, 'deploy', 'templates')
+# if os.path.exists(template_dir):
+# dest_dir = os.path.join(
+# dest_helm_chart_path, 'templates', app_name)
+
+# logging.info(
+# "Collecting templates for application %s to %s", app_name, dest_dir)
+# if os.path.exists(dest_dir):
+# logging.warning(
+# "Merging/overriding all files in directory %s", dest_dir)
+# merge_configuration_directories(template_dir, dest_dir)
+# else:
+# shutil.copytree(template_dir, dest_dir)
+# resources_dir = os.path.join(app_path, 'deploy/resources')
+# if os.path.exists(resources_dir):
+# dest_dir = os.path.join(
+# dest_helm_chart_path, 'resources', app_name)
+
+# logging.info(
+# "Collecting resources for application %s to %s", app_name, dest_dir)
+
+# merge_configuration_directories(resources_dir, dest_dir)
+
+# subchart_dir = os.path.join(app_path, 'deploy/charts')
+# if os.path.exists(subchart_dir):
+# dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name)
+
+# logging.info(
+# "Collecting templates for application %s to %s", app_name, dest_dir)
+# if os.path.exists(dest_dir):
+# logging.warning(
+# "Merging/overriding all files in directory %s", dest_dir)
+# merge_configuration_directories(subchart_dir, dest_dir)
+# else:
+# shutil.copytree(subchart_dir, dest_dir)
\ No newline at end of file
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index bafe5a003..0e75ed7ea 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -24,89 +24,81 @@
from .models import HarnessMainConfig
-KEY_HARNESS = 'harness'
-KEY_SERVICE = 'service'
-KEY_DATABASE = 'database'
-KEY_DEPLOYMENT = 'deployment'
-KEY_APPS = 'apps'
-KEY_TASK_IMAGES = 'task-images'
-# KEY_TASK_IMAGES_BUILD = f"{KEY_TASK_IMAGES}-build"
-KEY_TEST_IMAGES = 'test-images'
+from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates
-DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
env = [env]
- return CloudHarnessHelm(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured,
+ return CloudHarnessDockerCompose(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured,
output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env,
namespace=namespace, templates_path=COMPOSE).process_values()
-class CloudHarnessHelm:
- def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
- output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- namespace=None, templates_path=HELM_PATH):
- assert domain, 'A domain must be specified'
- self.root_paths = [Path(r) for r in root_paths]
- self.tag = tag
- if registry and not registry.endswith('/'):
- self.registry = f'{registry}/'
- else:
- self.registry = registry
- self.local = local
- self.domain = domain
- self.exclude = exclude
- self.secured = secured
- self.output_path = Path(output_path)
- self.include = include
- self.registry_secret = registry_secret
- self.tls = tls
- self.env = env
- self.namespace = namespace
-
- self.templates_path = templates_path
- self.dest_deployment_path = self.output_path / templates_path
- self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml'
- self.__init_deployment()
-
- self.static_images = set()
- self.base_images = {}
- self.all_images = {}
-
- def __init_deployment(self):
- """
- Create the base helm chart
- """
- if self.dest_deployment_path.exists():
- shutil.rmtree(self.dest_deployment_path)
- # Initialize with default
- copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path)
-
- # Override for every cloudharness scaffolding
- for root_path in self.root_paths:
- copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
- base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path)
- collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
- dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
-
- def __adjust_missing_values(self, helm_values):
- if 'name' not in helm_values:
- with open(self.helm_chart_path) as f:
- chart_idx_content = yaml.safe_load(f)
- helm_values['name'] = chart_idx_content['name'].lower()
+class CloudHarnessDockerCompose(ConfigurationGenerator):
+ # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
+ # namespace=None, templates_path=HELM_PATH):
+ # assert domain, 'A domain must be specified'
+ # self.root_paths = [Path(r) for r in root_paths]
+ # self.tag = tag
+ # if registry and not registry.endswith('/'):
+ # self.registry = f'{registry}/'
+ # else:
+ # self.registry = registry
+ # self.local = local
+ # self.domain = domain
+ # self.exclude = exclude
+ # self.secured = secured
+ # self.output_path = Path(output_path)
+ # self.include = include
+ # self.registry_secret = registry_secret
+ # self.tls = tls
+ # self.env = env
+ # self.namespace = namespace
+
+ # self.templates_path = templates_path
+ # self.dest_deployment_path = self.output_path / templates_path
+ # self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml'
+ # self.__init_deployment()
+
+ # self.static_images = set()
+ # self.base_images = {}
+ # self.all_images = {}
+
+ # def __init_deployment(self):
+ # """
+ # Create the base helm chart
+ # """
+ # if self.dest_deployment_path.exists():
+ # shutil.rmtree(self.dest_deployment_path)
+ # # Initialize with default
+ # copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path)
+
+ # # Override for every cloudharness scaffolding
+ # for root_path in self.root_paths:
+ # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
+ # base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path)
+ # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
+ # dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
+
+ # def __adjust_missing_values(self, helm_values):
+ # if 'name' not in helm_values:
+ # with open(self.helm_chart_path) as f:
+ # chart_idx_content = yaml.safe_load(f)
+ # helm_values['name'] = chart_idx_content['name'].lower()
def process_values(self) -> HarnessMainConfig:
"""
Creates values file for the helm chart
"""
- helm_values = self.__get_default_helm_values()
+ helm_values = self._get_default_helm_values()
- self.__adjust_missing_values(helm_values)
+ self._adjust_missing_values(helm_values)
- helm_values = self.__merge_base_helm_values(helm_values)
+ helm_values = self._merge_base_helm_values(helm_values)
helm_values[KEY_APPS] = {}
@@ -114,18 +106,18 @@ def process_values(self) -> HarnessMainConfig:
helm_values[KEY_TASK_IMAGES] = {}
- self.__init_base_images(base_image_name)
- self.__init_static_images(base_image_name)
- helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name)
+ self._init_base_images(base_image_name)
+ self._init_static_images(base_image_name)
+ helm_values[KEY_TEST_IMAGES] = self._init_test_images(base_image_name)
- self.__process_applications(helm_values, base_image_name)
+ self._process_applications(helm_values, base_image_name)
# self.create_tls_certificate(helm_values)
values, include = self.__finish_helm_values(values=helm_values)
# Adjust dependencies from static (common) images
- self.__assign_static_build_dependencies(helm_values)
+ self._assign_static_build_dependencies(helm_values)
for root_path in self.root_paths:
collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
@@ -184,132 +176,132 @@ def __post_process_multiple_document_docker_compose(self, yaml_document):
main_document = document # we need to save the main document later
yaml_handler.dump(main_document, yaml_document)
- def __process_applications(self, helm_values, base_image_name):
- for root_path in self.root_paths:
- app_values = init_app_values(
- root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
- helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- app_values)
-
- app_base_path = root_path / APPS_PATH
- app_values = self.collect_app_values(
- app_base_path, base_image_name=base_image_name)
- helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- app_values)
-
- def collect_app_values(self, app_base_path, base_image_name=None):
- values = {}
-
- for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
- app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}")
-
- if app_name in self.exclude:
- continue
- app_key = app_name.replace('-', '_')
-
- app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
-
- # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None)
- # # for dockerfile_path in app_path.rglob('**/Dockerfile'):
- # # parent_name = dockerfile_path.parent.name.replace("-", "_")
- # # if parent_name == app_key:
- # # app_values['build'] = {
- # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
- # # 'dockerfile': "Dockerfile",
- # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
- # # }
- # # elif "tasks/" in f"{dockerfile_path}":
- # # parent_name = parent_name.upper()
- # # values.setdefault("task-images-build", {})[parent_name] = {
- # # 'dockerfile': "Dockerfile",
- # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
- # # }
- # # import ipdb; ipdb.set_trace() # fmt: skip
-
- # if dockerfile_path:
- # app_values['build'] = {
- # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
- # 'dockerfile': "Dockerfile",
- # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
- # }
-
- values[app_key] = dict_merge(
- values[app_key], app_values) if app_key in values else app_values
-
- return values
-
- def __init_static_images(self, base_image_name):
- for static_img_dockerfile in self.static_images:
- img_name = image_name_from_dockerfile_path(os.path.basename(
- static_img_dockerfile), base_name=base_image_name)
- self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=static_img_dockerfile)
-
- def __assign_static_build_dependencies(self, helm_values):
- for static_img_dockerfile in self.static_images:
- key = os.path.basename(static_img_dockerfile)
- if key in helm_values[KEY_TASK_IMAGES]:
- dependencies = guess_build_dependencies_from_dockerfile(
- static_img_dockerfile)
- for dep in dependencies:
- if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
- helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
- # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = {
- # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent),
- # 'dockerfile': 'Dockerfile',
- # }
-
- for image_name in helm_values[KEY_TASK_IMAGES].keys():
- if image_name in self.exclude:
- del helm_values[KEY_TASK_IMAGES][image_name]
- # del helm_values[KEY_TASK_IMAGES_BUILD][image_name]
-
- def __init_base_images(self, base_image_name):
-
- for root_path in self.root_paths:
- for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
- img_name = image_name_from_dockerfile_path(
- os.path.basename(base_img_dockerfile), base_name=base_image_name)
- self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=root_path)
-
- self.static_images.update(find_dockerfiles_paths(
- os.path.join(root_path, STATIC_IMAGES_PATH)))
- return self.base_images
-
- def __init_test_images(self, base_image_name):
- test_images = {}
- for root_path in self.root_paths:
- for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
- img_name = image_name_from_dockerfile_path(
- os.path.basename(base_img_dockerfile), base_name=base_image_name)
- test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=base_img_dockerfile)
-
- return test_images
-
-
- def __find_static_dockerfile_paths(self, root_path):
- return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
-
- def __merge_base_helm_values(self, helm_values):
- # Override for every cloudharness scaffolding
- for root_path in self.root_paths:
- helm_values = dict_merge(
- helm_values,
- collect_helm_values(root_path, env=self.env)
- )
-
- return helm_values
-
- def __get_default_helm_values(self):
- ch_root_path = Path(CH_ROOT)
- values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml'
- helm_values = get_template(values_yaml_path)
- helm_values = dict_merge(helm_values,
- collect_helm_values(ch_root_path, env=self.env))
-
- return helm_values
+ # def __process_applications(self, helm_values, base_image_name):
+ # for root_path in self.root_paths:
+ # app_values = init_app_values(
+ # root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
+ # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ # app_values)
+
+ # app_base_path = root_path / APPS_PATH
+ # app_values = self.collect_app_values(
+ # app_base_path, base_image_name=base_image_name)
+ # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ # app_values)
+
+ # def collect_app_values(self, app_base_path, base_image_name=None):
+ # values = {}
+
+ # for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
+ # app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}")
+
+ # if app_name in self.exclude:
+ # continue
+ # app_key = app_name.replace('-', '_')
+
+ # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
+
+ # # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None)
+ # # # for dockerfile_path in app_path.rglob('**/Dockerfile'):
+ # # # parent_name = dockerfile_path.parent.name.replace("-", "_")
+ # # # if parent_name == app_key:
+ # # # app_values['build'] = {
+ # # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
+ # # # 'dockerfile': "Dockerfile",
+ # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # # }
+ # # # elif "tasks/" in f"{dockerfile_path}":
+ # # # parent_name = parent_name.upper()
+ # # # values.setdefault("task-images-build", {})[parent_name] = {
+ # # # 'dockerfile': "Dockerfile",
+ # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # # }
+ # # # import ipdb; ipdb.set_trace() # fmt: skip
+
+ # # if dockerfile_path:
+ # # app_values['build'] = {
+ # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
+ # # 'dockerfile': "Dockerfile",
+ # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
+ # # }
+
+ # values[app_key] = dict_merge(
+ # values[app_key], app_values) if app_key in values else app_values
+
+ # return values
+
+ # def __init_static_images(self, base_image_name):
+ # for static_img_dockerfile in self.static_images:
+ # img_name = image_name_from_dockerfile_path(os.path.basename(
+ # static_img_dockerfile), base_name=base_image_name)
+ # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
+ # img_name, build_context_path=static_img_dockerfile)
+
+ # def __assign_static_build_dependencies(self, helm_values):
+ # for static_img_dockerfile in self.static_images:
+ # key = os.path.basename(static_img_dockerfile)
+ # if key in helm_values[KEY_TASK_IMAGES]:
+ # dependencies = guess_build_dependencies_from_dockerfile(
+ # static_img_dockerfile)
+ # for dep in dependencies:
+ # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
+ # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
+ # # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = {
+ # # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent),
+ # # 'dockerfile': 'Dockerfile',
+ # # }
+
+ # for image_name in helm_values[KEY_TASK_IMAGES].keys():
+ # if image_name in self.exclude:
+ # del helm_values[KEY_TASK_IMAGES][image_name]
+ # # del helm_values[KEY_TASK_IMAGES_BUILD][image_name]
+
+ # def __init_base_images(self, base_image_name):
+
+ # for root_path in self.root_paths:
+ # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
+ # img_name = image_name_from_dockerfile_path(
+ # os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ # img_name, build_context_path=root_path)
+
+ # self.static_images.update(find_dockerfiles_paths(
+ # os.path.join(root_path, STATIC_IMAGES_PATH)))
+ # return self.base_images
+
+ # def __init_test_images(self, base_image_name):
+ # test_images = {}
+ # for root_path in self.root_paths:
+ # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
+ # img_name = image_name_from_dockerfile_path(
+ # os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ # img_name, build_context_path=base_img_dockerfile)
+
+ # return test_images
+
+
+ # def __find_static_dockerfile_paths(self, root_path):
+ # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
+
+ # def __merge_base_helm_values(self, helm_values):
+ # # Override for every cloudharness scaffolding
+ # for root_path in self.root_paths:
+ # helm_values = dict_merge(
+ # helm_values,
+ # collect_helm_values(root_path, env=self.env)
+ # )
+
+ # return helm_values
+
+ # def __get_default_helm_values(self):
+ # ch_root_path = Path(CH_ROOT)
+ # values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml'
+ # helm_values = get_template(values_yaml_path)
+ # helm_values = dict_merge(helm_values,
+ # collect_helm_values(ch_root_path, env=self.env))
+
+ # return helm_values
def __get_default_helm_values_with_secrets(self, helm_values):
helm_values = copy.deepcopy(helm_values)
@@ -323,76 +315,76 @@ def __get_default_helm_values_with_secrets(self, helm_values):
helm_values['apps'][key]['harness']['secrets'] = {}
return helm_values
- def create_tls_certificate(self, helm_values):
- if not self.tls:
- helm_values['tls'] = None
- return
- if not self.local:
- return
- helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
-
- bootstrap_file = 'bootstrap.sh'
- certs_parent_folder_path = self.output_path / 'helm' / 'resources'
- certs_folder_path = certs_parent_folder_path / 'certs'
-
- # if os.path.exists(os.path.join(certs_folder_path)):
- if certs_folder_path.exists():
- # don't overwrite the certificate if it exists
- return
-
- try:
- client = DockerClient()
- client.ping()
- except:
- raise ConnectionRefusedError(
- '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
-
- # Create CA and sign cert for domain
- container = client.containers.run(image='frapsoft/openssl',
- command=f'sleep 60',
- entrypoint="",
- detach=True,
- environment=[
- f"DOMAIN={self.domain}"],
- )
-
- container.exec_run('mkdir -p /mnt/vol1')
- container.exec_run('mkdir -p /mnt/certs')
-
- # copy bootstrap file
- cur_dir = os.getcwd()
- os.chdir(Path(HERE) / 'scripts')
- tar = tarfile.open(bootstrap_file + '.tar', mode='w')
- try:
- tar.add(bootstrap_file)
- finally:
- tar.close()
- data = open(bootstrap_file + '.tar', 'rb').read()
- container.put_archive('/mnt/vol1', data)
- os.chdir(cur_dir)
- container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
-
- # exec bootstrap file
- container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
-
- # retrieve the certs from the container
- bits, stat = container.get_archive('/mnt/certs')
- if not certs_folder_path.exists():
- certs_folder_path.mkdir(parents=True)
- certs_tar = certs_parent_folder_path / 'certs.tar'
- with open(certs_tar, 'wb') as f:
- for chunk in bits:
- f.write(chunk)
- cf = tarfile.open(certs_tar)
- cf.extractall(path=certs_parent_folder_path)
-
- logs = container.logs()
- logging.info(f'openssl container logs: {logs}')
-
- # stop the container
- container.kill()
-
- logging.info("Created certificates for local deployment")
+ # def create_tls_certificate(self, helm_values):
+ # if not self.tls:
+ # helm_values['tls'] = None
+ # return
+ # if not self.local:
+ # return
+ # helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
+
+ # bootstrap_file = 'bootstrap.sh'
+ # certs_parent_folder_path = self.output_path / 'helm' / 'resources'
+ # certs_folder_path = certs_parent_folder_path / 'certs'
+
+ # # if os.path.exists(os.path.join(certs_folder_path)):
+ # if certs_folder_path.exists():
+ # # don't overwrite the certificate if it exists
+ # return
+
+ # try:
+ # client = DockerClient()
+ # client.ping()
+ # except:
+ # raise ConnectionRefusedError(
+ # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
+
+ # # Create CA and sign cert for domain
+ # container = client.containers.run(image='frapsoft/openssl',
+ # command=f'sleep 60',
+ # entrypoint="",
+ # detach=True,
+ # environment=[
+ # f"DOMAIN={self.domain}"],
+ # )
+
+ # container.exec_run('mkdir -p /mnt/vol1')
+ # container.exec_run('mkdir -p /mnt/certs')
+
+ # # copy bootstrap file
+ # cur_dir = os.getcwd()
+ # os.chdir(Path(HERE) / 'scripts')
+ # tar = tarfile.open(bootstrap_file + '.tar', mode='w')
+ # try:
+ # tar.add(bootstrap_file)
+ # finally:
+ # tar.close()
+ # data = open(bootstrap_file + '.tar', 'rb').read()
+ # container.put_archive('/mnt/vol1', data)
+ # os.chdir(cur_dir)
+ # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
+
+ # # exec bootstrap file
+ # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
+
+ # # retrieve the certs from the container
+ # bits, stat = container.get_archive('/mnt/certs')
+ # if not certs_folder_path.exists():
+ # certs_folder_path.mkdir(parents=True)
+ # certs_tar = certs_parent_folder_path / 'certs.tar'
+ # with open(certs_tar, 'wb') as f:
+ # for chunk in bits:
+ # f.write(chunk)
+ # cf = tarfile.open(certs_tar)
+ # cf.extractall(path=certs_parent_folder_path)
+
+ # logs = container.logs()
+ # logging.info(f'openssl container logs: {logs}')
+
+ # # stop the container
+ # container.kill()
+
+ # logging.info("Created certificates for local deployment")
def __finish_helm_values(self, values):
"""
@@ -444,7 +436,7 @@ def __finish_helm_values(self, values):
if harness[KEY_DATABASE] and not harness[KEY_DATABASE].get('name', None):
harness[KEY_DATABASE]['name'] = app_name.strip() + '-db'
- self.__clear_unused_db_configuration(harness)
+ self._clear_unused_db_configuration(harness)
values_set_legacy(v)
if self.include:
@@ -464,36 +456,36 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- def __clear_unused_db_configuration(self, harness_config):
- database_config = harness_config[KEY_DATABASE]
- database_type = database_config.get('type', None)
- if database_type is None:
- del harness_config[KEY_DATABASE]
- return
- db_specific_keys = [k for k, v in database_config.items()
- if isinstance(v, dict) and 'image' in v and 'ports' in v]
- for db in db_specific_keys:
- if database_type != db:
- del database_config[db]
-
- def image_tag(self, image_name, build_context_path=None, dependencies=()):
- tag = self.tag
- if tag is None and not self.local:
- logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
- ignore_path = os.path.join(build_context_path, '.dockerignore')
- ignore = set(DEFAULT_IGNORE)
- if os.path.exists(ignore_path):
- with open(ignore_path) as f:
- ignore = ignore.union({line.strip() for line in f})
- logging.info(f"Ignoring {ignore}")
- tag = generate_tag_from_content(build_context_path, ignore)
- logging.info(f"Content hash: {tag}")
- dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
- tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
- logging.info(f"Generated tag: {tag}")
- app_name = image_name.split("/")[-1] # the image name can have a prefix
- self.all_images[app_name] = tag
- return self.registry + image_name + (f':{tag}' if tag else '')
+ # def __clear_unused_db_configuration(self, harness_config):
+ # database_config = harness_config[KEY_DATABASE]
+ # database_type = database_config.get('type', None)
+ # if database_type is None:
+ # del harness_config[KEY_DATABASE]
+ # return
+ # db_specific_keys = [k for k, v in database_config.items()
+ # if isinstance(v, dict) and 'image' in v and 'ports' in v]
+ # for db in db_specific_keys:
+ # if database_type != db:
+ # del database_config[db]
+
+ # def image_tag(self, image_name, build_context_path=None, dependencies=()):
+ # tag = self.tag
+ # if tag is None and not self.local:
+ # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
+ # ignore_path = os.path.join(build_context_path, '.dockerignore')
+ # ignore = set(DEFAULT_IGNORE)
+ # if os.path.exists(ignore_path):
+ # with open(ignore_path) as f:
+ # ignore = ignore.union({line.strip() for line in f})
+ # logging.info(f"Ignoring {ignore}")
+ # tag = generate_tag_from_content(build_context_path, ignore)
+ # logging.info(f"Content hash: {tag}")
+ # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
+ # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
+ # logging.info(f"Generated tag: {tag}")
+ # app_name = image_name.split("/")[-1] # the image name can have a prefix
+ # self.all_images[app_name] = tag
+ # return self.registry + image_name + (f':{tag}' if tag else '')
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
@@ -559,8 +551,6 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
task_path, app_path.parent))
img_name = image_name_from_dockerfile_path(task_name, base_image_name)
- # import ipdb; ipdb.set_trace() # fmt: skip
-
# values[KEY_TASK_IMAGES][task_name] = self.image_tag(
# img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys())
# values.setdefault(KEY_TASK_IMAGES_BUILD, {})[task_name] = {
@@ -591,281 +581,281 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path):
helm_values[KEY_HARNESS]['deployment']['args'] = f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'
-def get_included_with_dependencies(values, include):
- app_values = values['apps'].values()
- directly_included = [app for app in app_values if any(
- inc == app[KEY_HARNESS]['name'] for inc in include)]
-
- dependent = set(include)
- for app in directly_included:
- if app['harness']['dependencies'].get('hard', None):
- dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
- if app['harness']['dependencies'].get('soft', None):
- dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
- if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
- dependent.add('accounts')
- if len(dependent) == len(include):
- return dependent
- return get_included_with_dependencies(values, dependent)
-
-
-def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
- pass
-
-
-def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None):
- """
- Searches recursively for helm templates inside the applications and collects the templates in the destination
-
- :param search_root:
- :param dest_helm_chart_path: collected helm templates destination folder
- :param exclude:
- :return:
- """
- app_base_path = search_root / APPS_PATH
-
- for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
- app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path))
- if app_name in exclude or (include and not any(inc in app_name for inc in include)):
- continue
- template_dir = app_path / 'deploy' / f'templates-{templates_path}'
- if template_dir.exists():
- dest_dir = dest_helm_chart_path / 'templates' / app_name
-
- logging.info(
- "Collecting templates for application %s to %s", app_name, dest_dir)
- if dest_dir.exists():
- logging.warning(
- "Merging/overriding all files in directory %s", dest_dir)
- merge_configuration_directories(f"{template_dir}", f"{dest_dir}")
- else:
- shutil.copytree(template_dir, dest_dir)
- resources_dir = app_path / 'deploy' / 'resources'
- if resources_dir.exists():
- dest_dir = dest_helm_chart_path / 'resources' / app_name
-
- logging.info(
- "Collecting resources for application %s to %s", app_name, dest_dir)
-
- merge_configuration_directories(f"{resources_dir}", f"{dest_dir}")
-
- # subchart_dir = app_path / 'deploy/charts'
- # if subchart_dir.exists():
- # dest_dir = dest_helm_chart_path / 'charts' / app_name
-
- # logging.info(
- # "Collecting templates for application %s to %s", app_name, dest_dir)
- # if dest_dir.exists():
- # logging.warning(
- # "Merging/overriding all files in directory %s", dest_dir)
- # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
- # else:
- # shutil.copytree(subchart_dir, dest_dir)
-
-
-def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
- if not base_helm_chart.exists():
- return
- if dest_helm_chart_path.exists():
- logging.info("Merging/overriding all files in directory %s",
- dest_helm_chart_path)
- merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
- else:
- logging.info("Copying base deployment chart from %s to %s",
- base_helm_chart, dest_helm_chart_path)
- shutil.copytree(base_helm_chart, dest_helm_chart_path)
-
-
-def collect_helm_values(deployment_root, env=()):
- """
- Creates helm values from a cloudharness deployment scaffolding
- """
- values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml'
-
- values = get_template(values_template_path)
-
- for e in env:
- specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
- f'values-template-{e}.yaml')
- if os.path.exists(specific_template_path):
- logging.info(
- "Specific environment values template found: " + specific_template_path)
- with open(specific_template_path) as f:
- values_env_specific = yaml.safe_load(f)
- values = dict_merge(values, values_env_specific)
- return values
-
-
-def init_app_values(deployment_root, exclude, values=None):
- values = values if values is not None else {}
- app_base_path = os.path.join(deployment_root, APPS_PATH)
- overridden_template_path = os.path.join(
- deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
- default_values_path = os.path.join(
- CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
-
- for app_path in get_sub_paths(app_base_path):
-
- app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
-
- if app_name in exclude:
- continue
- app_key = app_name.replace('-', '_')
- if app_key not in values:
- default_values = get_template(default_values_path)
- values[app_key] = default_values
- overridden_defaults = get_template(overridden_template_path)
- values[app_key] = dict_merge(values[app_key], overridden_defaults)
-
- return values
-
-
-def values_from_legacy(values):
- if KEY_HARNESS not in values:
- values[KEY_HARNESS] = {}
- harness = values[KEY_HARNESS]
- if KEY_SERVICE not in harness:
- harness[KEY_SERVICE] = {}
- if KEY_DEPLOYMENT not in harness:
- harness[KEY_DEPLOYMENT] = {}
- if KEY_DATABASE not in harness:
- harness[KEY_DATABASE] = {}
-
- if 'subdomain' in values:
- harness['subdomain'] = values['subdomain']
- if 'autodeploy' in values:
- harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
- if 'autoservice' in values:
- harness[KEY_SERVICE]['auto'] = values['autoservice']
- if 'secureme' in values:
- harness['secured'] = values['secureme']
- if 'resources' in values:
- harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
- if 'replicas' in values:
- harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
- if 'image' in values:
- harness[KEY_DEPLOYMENT]['image'] = values['image']
- if 'port' in values:
- harness[KEY_DEPLOYMENT]['port'] = values['port']
- harness[KEY_SERVICE]['port'] = values['port']
-
-
-def values_set_legacy(values):
- harness = values[KEY_HARNESS]
- if 'image' in harness[KEY_DEPLOYMENT]:
- values['image'] = harness[KEY_DEPLOYMENT]['image']
-
- values['name'] = harness['name']
- if harness[KEY_DEPLOYMENT].get('port', None):
- values['port'] = harness[KEY_DEPLOYMENT]['port']
- if 'resources' in harness[KEY_DEPLOYMENT]:
- values['resources'] = harness[KEY_DEPLOYMENT]['resources']
-
-
-def generate_tag_from_content(content_path, ignore=()):
- from dirhash import dirhash
- return dirhash(content_path, 'sha1', ignore=ignore)
-
-
-def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
- if isinstance(values, dict):
- newenvs = list(envs)
- for key, value in values.items():
- v = extract_env_variables_from_values(
- value, envs, f"{prefix}_{key}".replace('-', '_').upper())
- if key in ('name', 'port', 'subdomain'):
- newenvs.extend(v)
- return newenvs
- else:
- return [env_variable(prefix, values)]
-
-
-def create_env_variables(values):
- for app_name, value in values[KEY_APPS].items():
- if KEY_HARNESS in value:
- values['env'].extend(extract_env_variables_from_values(
- value[KEY_HARNESS], prefix='CH_' + app_name))
- values['env'].append(env_variable('CH_DOMAIN', values['domain']))
- values['env'].append(env_variable(
- 'CH_IMAGE_REGISTRY', values['registry']['name']))
- values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
-
-
-def hosts_info(values):
- domain = values['domain']
- namespace = values['namespace']
- subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
- KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
- KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
- try:
- ip = get_cluster_ip()
- except:
- logging.warning('Cannot get cluster ip')
- return
- logging.info(
- "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
-
- deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
- for app in values[KEY_APPS].values() if KEY_HARNESS in app)
-
- logging.info(
- "\nTo run locally some apps, also those references may be needed")
- for appname in values[KEY_APPS]:
- app = values[KEY_APPS][appname]['harness']
- if 'deployment' not in app:
- continue
- print(
- "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
- app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
-
- print(
- f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
-
-
-class ValuesValidationException(Exception):
- pass
-
-
-def validate_helm_values(values):
- validate_dependencies(values)
-
-
-def validate_dependencies(values):
- all_apps = {a for a in values["apps"]}
- for app in all_apps:
- app_values = values["apps"][app]
- if 'dependencies' in app_values[KEY_HARNESS]:
- soft_dependencies = {
- d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
- not_found = {d for d in soft_dependencies if d not in all_apps}
- if not_found:
- logging.warning(
- f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
- hard_dependencies = {
- d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
- not_found = {d for d in hard_dependencies if d not in all_apps}
- if not_found:
- raise ValuesValidationException(
- f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
-
- build_dependencies = {
- d for d in app_values[KEY_HARNESS]['dependencies']['build']}
-
- not_found = {
- d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
- not_found = {d for d in not_found if d not in all_apps}
- if not_found:
- raise ValuesValidationException(
- f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
-
- if 'use_services' in app_values[KEY_HARNESS]:
- service_dependencies = {d['name'].replace(
- "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
-
- not_found = {d for d in service_dependencies if d not in all_apps}
- if not_found:
- raise ValuesValidationException(
- f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
+# def get_included_with_dependencies(values, include):
+# app_values = values['apps'].values()
+# directly_included = [app for app in app_values if any(
+# inc == app[KEY_HARNESS]['name'] for inc in include)]
+
+# dependent = set(include)
+# for app in directly_included:
+# if app['harness']['dependencies'].get('hard', None):
+# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
+# if app['harness']['dependencies'].get('soft', None):
+# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
+# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
+# dependent.add('accounts')
+# if len(dependent) == len(include):
+# return dependent
+# return get_included_with_dependencies(values, dependent)
+
+
+# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
+# pass
+
+
+# def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None):
+# """
+# Searches recursively for helm templates inside the applications and collects the templates in the destination
+
+# :param search_root:
+# :param dest_helm_chart_path: collected helm templates destination folder
+# :param exclude:
+# :return:
+# """
+# app_base_path = search_root / APPS_PATH
+
+# for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
+# app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path))
+# if app_name in exclude or (include and not any(inc in app_name for inc in include)):
+# continue
+# template_dir = app_path / 'deploy' / f'templates-{templates_path}'
+# if template_dir.exists():
+# dest_dir = dest_helm_chart_path / 'templates' / app_name
+
+# logging.info(
+# "Collecting templates for application %s to %s", app_name, dest_dir)
+# if dest_dir.exists():
+# logging.warning(
+# "Merging/overriding all files in directory %s", dest_dir)
+# merge_configuration_directories(f"{template_dir}", f"{dest_dir}")
+# else:
+# shutil.copytree(template_dir, dest_dir)
+# resources_dir = app_path / 'deploy' / 'resources'
+# if resources_dir.exists():
+# dest_dir = dest_helm_chart_path / 'resources' / app_name
+
+# logging.info(
+# "Collecting resources for application %s to %s", app_name, dest_dir)
+
+# merge_configuration_directories(f"{resources_dir}", f"{dest_dir}")
+
+# # subchart_dir = app_path / 'deploy/charts'
+# # if subchart_dir.exists():
+# # dest_dir = dest_helm_chart_path / 'charts' / app_name
+
+# # logging.info(
+# # "Collecting templates for application %s to %s", app_name, dest_dir)
+# # if dest_dir.exists():
+# # logging.warning(
+# # "Merging/overriding all files in directory %s", dest_dir)
+# # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
+# # else:
+# # shutil.copytree(subchart_dir, dest_dir)
+
+
+# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
+# if not base_helm_chart.exists():
+# return
+# if dest_helm_chart_path.exists():
+# logging.info("Merging/overriding all files in directory %s",
+# dest_helm_chart_path)
+# merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
+# else:
+# logging.info("Copying base deployment chart from %s to %s",
+# base_helm_chart, dest_helm_chart_path)
+# shutil.copytree(base_helm_chart, dest_helm_chart_path)
+
+
+# def collect_helm_values(deployment_root, env=()):
+# """
+# Creates helm values from a cloudharness deployment scaffolding
+# """
+# values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml'
+
+# values = get_template(values_template_path)
+
+# for e in env:
+# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
+# f'values-template-{e}.yaml')
+# if os.path.exists(specific_template_path):
+# logging.info(
+# "Specific environment values template found: " + specific_template_path)
+# with open(specific_template_path) as f:
+# values_env_specific = yaml.safe_load(f)
+# values = dict_merge(values, values_env_specific)
+# return values
+
+
+# def init_app_values(deployment_root, exclude, values=None):
+# values = values if values is not None else {}
+# app_base_path = os.path.join(deployment_root, APPS_PATH)
+# overridden_template_path = os.path.join(
+# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+# default_values_path = os.path.join(
+# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+
+# for app_path in get_sub_paths(app_base_path):
+
+# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+
+# if app_name in exclude:
+# continue
+# app_key = app_name.replace('-', '_')
+# if app_key not in values:
+# default_values = get_template(default_values_path)
+# values[app_key] = default_values
+# overridden_defaults = get_template(overridden_template_path)
+# values[app_key] = dict_merge(values[app_key], overridden_defaults)
+
+# return values
+
+
+# def values_from_legacy(values):
+# if KEY_HARNESS not in values:
+# values[KEY_HARNESS] = {}
+# harness = values[KEY_HARNESS]
+# if KEY_SERVICE not in harness:
+# harness[KEY_SERVICE] = {}
+# if KEY_DEPLOYMENT not in harness:
+# harness[KEY_DEPLOYMENT] = {}
+# if KEY_DATABASE not in harness:
+# harness[KEY_DATABASE] = {}
+
+# if 'subdomain' in values:
+# harness['subdomain'] = values['subdomain']
+# if 'autodeploy' in values:
+# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
+# if 'autoservice' in values:
+# harness[KEY_SERVICE]['auto'] = values['autoservice']
+# if 'secureme' in values:
+# harness['secured'] = values['secureme']
+# if 'resources' in values:
+# harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
+# if 'replicas' in values:
+# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
+# if 'image' in values:
+# harness[KEY_DEPLOYMENT]['image'] = values['image']
+# if 'port' in values:
+# harness[KEY_DEPLOYMENT]['port'] = values['port']
+# harness[KEY_SERVICE]['port'] = values['port']
+
+
+# def values_set_legacy(values):
+# harness = values[KEY_HARNESS]
+# if 'image' in harness[KEY_DEPLOYMENT]:
+# values['image'] = harness[KEY_DEPLOYMENT]['image']
+
+# values['name'] = harness['name']
+# if harness[KEY_DEPLOYMENT].get('port', None):
+# values['port'] = harness[KEY_DEPLOYMENT]['port']
+# if 'resources' in harness[KEY_DEPLOYMENT]:
+# values['resources'] = harness[KEY_DEPLOYMENT]['resources']
+
+
+# def generate_tag_from_content(content_path, ignore=()):
+# from dirhash import dirhash
+# return dirhash(content_path, 'sha1', ignore=ignore)
+
+
+# def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
+# if isinstance(values, dict):
+# newenvs = list(envs)
+# for key, value in values.items():
+# v = extract_env_variables_from_values(
+# value, envs, f"{prefix}_{key}".replace('-', '_').upper())
+# if key in ('name', 'port', 'subdomain'):
+# newenvs.extend(v)
+# return newenvs
+# else:
+# return [env_variable(prefix, values)]
+
+
+# def create_env_variables(values):
+# for app_name, value in values[KEY_APPS].items():
+# if KEY_HARNESS in value:
+# values['env'].extend(extract_env_variables_from_values(
+# value[KEY_HARNESS], prefix='CH_' + app_name))
+# values['env'].append(env_variable('CH_DOMAIN', values['domain']))
+# values['env'].append(env_variable(
+# 'CH_IMAGE_REGISTRY', values['registry']['name']))
+# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
+
+
+# def hosts_info(values):
+# domain = values['domain']
+# namespace = values['namespace']
+# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
+# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
+# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
+# try:
+# ip = get_cluster_ip()
+# except:
+# logging.warning('Cannot get cluster ip')
+# return
+# logging.info(
+# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
+
+# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
+# for app in values[KEY_APPS].values() if KEY_HARNESS in app)
+
+# logging.info(
+# "\nTo run locally some apps, also those references may be needed")
+# for appname in values[KEY_APPS]:
+# app = values[KEY_APPS][appname]['harness']
+# if 'deployment' not in app:
+# continue
+# print(
+# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
+# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
+
+# print(
+# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
+
+
+# class ValuesValidationException(Exception):
+# pass
+
+
+# def validate_helm_values(values):
+# validate_dependencies(values)
+
+
+# def validate_dependencies(values):
+# all_apps = {a for a in values["apps"]}
+# for app in all_apps:
+# app_values = values["apps"][app]
+# if 'dependencies' in app_values[KEY_HARNESS]:
+# soft_dependencies = {
+# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
+# not_found = {d for d in soft_dependencies if d not in all_apps}
+# if not_found:
+# logging.warning(
+# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
+# hard_dependencies = {
+# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
+# not_found = {d for d in hard_dependencies if d not in all_apps}
+# if not_found:
+# raise ValuesValidationException(
+# f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
+
+# build_dependencies = {
+# d for d in app_values[KEY_HARNESS]['dependencies']['build']}
+
+# not_found = {
+# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
+# not_found = {d for d in not_found if d not in all_apps}
+# if not_found:
+# raise ValuesValidationException(
+# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
+
+# if 'use_services' in app_values[KEY_HARNESS]:
+# service_dependencies = {d['name'].replace(
+# "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
+
+# not_found = {d for d in service_dependencies if d not in all_apps}
+# if not_found:
+# raise ValuesValidationException(
+# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
def identify_unicorn_based_main(candidates, app_path):
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index f5eb560fa..bd49f8eee 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -21,15 +21,8 @@
from .models import HarnessMainConfig
-KEY_HARNESS = 'harness'
-KEY_SERVICE = 'service'
-KEY_DATABASE = 'database'
-KEY_DEPLOYMENT = 'deployment'
-KEY_APPS = 'apps'
-KEY_TASK_IMAGES = 'task-images'
-KEY_TEST_IMAGES = 'test-images'
+from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates
-DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
def deploy(namespace, output_path='./deployment'):
@@ -51,70 +44,70 @@ def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain=
namespace=namespace).process_values()
-class CloudHarnessHelm:
- def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
- output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- namespace=None):
- assert domain, 'A domain must be specified'
- self.root_paths = root_paths
- self.tag = tag
- if registry and registry[-1] != '/':
- self.registry = registry + '/'
- else:
- self.registry = registry
- self.local = local
- self.domain = domain
- self.exclude = exclude
- self.secured = secured
- self.output_path = output_path
- self.include = include
- self.registry_secret = registry_secret
- self.tls = tls
- self.env = env
- self.namespace = namespace
-
- self.dest_deployment_path = os.path.join(
- self.output_path, HELM_CHART_PATH)
- self.helm_chart_path = os.path.join(
- self.dest_deployment_path, 'Chart.yaml')
- self.__init_deployment()
-
- self.static_images = set()
- self.base_images = {}
- self.all_images = {}
-
- def __init_deployment(self):
- """
- Create the base helm chart
- """
- if os.path.exists(self.dest_deployment_path):
- shutil.rmtree(self.dest_deployment_path)
- # Initialize with default
- copy_merge_base_deployment(self.dest_deployment_path, os.path.join(
- CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH))
-
- # Override for every cloudharness scaffolding
- for root_path in self.root_paths:
- copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
- base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH))
- collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
- dest_helm_chart_path=self.dest_deployment_path)
-
- def __adjust_missing_values(self, helm_values):
- if 'name' not in helm_values:
- with open(self.helm_chart_path) as f:
- chart_idx_content = yaml.safe_load(f)
- helm_values['name'] = chart_idx_content['name'].lower()
+class CloudHarnessHelm(ConfigurationGenerator):
+ # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
+ # namespace=None):
+ # assert domain, 'A domain must be specified'
+ # self.root_paths = root_paths
+ # self.tag = tag
+ # if registry and registry[-1] != '/':
+ # self.registry = registry + '/'
+ # else:
+ # self.registry = registry
+ # self.local = local
+ # self.domain = domain
+ # self.exclude = exclude
+ # self.secured = secured
+ # self.output_path = output_path
+ # self.include = include
+ # self.registry_secret = registry_secret
+ # self.tls = tls
+ # self.env = env
+ # self.namespace = namespace
+
+ # self.dest_deployment_path = os.path.join(
+ # self.output_path, HELM_CHART_PATH)
+ # self.helm_chart_path = os.path.join(
+ # self.dest_deployment_path, 'Chart.yaml')
+ # self.__init_deployment()
+
+ # self.static_images = set()
+ # self.base_images = {}
+ # self.all_images = {}
+
+ # def __init_deployment(self):
+ # """
+ # Create the base helm chart
+ # """
+ # if os.path.exists(self.dest_deployment_path):
+ # shutil.rmtree(self.dest_deployment_path)
+ # # Initialize with default
+ # copy_merge_base_deployment(self.dest_deployment_path, os.path.join(
+ # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH))
+
+ # # Override for every cloudharness scaffolding
+ # for root_path in self.root_paths:
+ # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
+ # base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH))
+ # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
+ # dest_helm_chart_path=self.dest_deployment_path)
+
+ # def __adjust_missing_values(self, helm_values):
+ # if 'name' not in helm_values:
+ # with open(self.helm_chart_path) as f:
+ # chart_idx_content = yaml.safe_load(f)
+ # helm_values['name'] = chart_idx_content['name'].lower()
def process_values(self) -> HarnessMainConfig:
"""
Creates values file for the helm chart
"""
- helm_values = self.__get_default_helm_values()
+ helm_values = self._get_default_helm_values()
- self.__adjust_missing_values(helm_values)
+ self._adjust_missing_values(helm_values)
- helm_values = self.__merge_base_helm_values(helm_values)
+ helm_values = self._merge_base_helm_values(helm_values)
helm_values[KEY_APPS] = {}
@@ -122,18 +115,18 @@ def process_values(self) -> HarnessMainConfig:
helm_values[KEY_TASK_IMAGES] = {}
- self.__init_base_images(base_image_name)
- self.__init_static_images(base_image_name)
- helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name)
+ self._init_base_images(base_image_name)
+ self._init_static_images(base_image_name)
+ helm_values[KEY_TEST_IMAGES] = self._init_test_images(base_image_name)
- self.__process_applications(helm_values, base_image_name)
+ self._process_applications(helm_values, base_image_name)
self.create_tls_certificate(helm_values)
values, include = self.__finish_helm_values(values=helm_values)
# Adjust dependencies from static (common) images
- self.__assign_static_build_dependencies(helm_values)
+ self._assign_static_build_dependencies(helm_values)
for root_path in self.root_paths:
collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
@@ -148,174 +141,174 @@ def process_values(self) -> HarnessMainConfig:
validate_helm_values(merged_values)
return HarnessMainConfig.from_dict(merged_values)
- def __process_applications(self, helm_values, base_image_name):
- for root_path in self.root_paths:
- app_values = init_app_values(
- root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
- helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- app_values)
-
- app_base_path = os.path.join(root_path, APPS_PATH)
- app_values = self.collect_app_values(
- app_base_path, base_image_name=base_image_name)
- helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- app_values)
-
- def collect_app_values(self, app_base_path, base_image_name=None):
- values = {}
-
- for app_path in get_sub_paths(app_base_path):
- app_name = app_name_from_path(
- os.path.relpath(app_path, app_base_path))
-
- if app_name in self.exclude:
- continue
- app_key = app_name.replace('-', '_')
-
- app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
-
- values[app_key] = dict_merge(
- values[app_key], app_values) if app_key in values else app_values
-
- return values
-
- def __init_static_images(self, base_image_name):
- for static_img_dockerfile in self.static_images:
- img_name = image_name_from_dockerfile_path(os.path.basename(
- static_img_dockerfile), base_name=base_image_name)
- self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=static_img_dockerfile)
-
- def __assign_static_build_dependencies(self, helm_values):
- for static_img_dockerfile in self.static_images:
- key = os.path.basename(static_img_dockerfile)
- if key in helm_values[KEY_TASK_IMAGES]:
- dependencies = guess_build_dependencies_from_dockerfile(
- static_img_dockerfile)
- for dep in dependencies:
- if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
- helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
-
- for image_name in list(helm_values[KEY_TASK_IMAGES].keys()):
- if image_name in self.exclude:
- del helm_values[KEY_TASK_IMAGES][image_name]
-
- def __init_base_images(self, base_image_name):
-
- for root_path in self.root_paths:
- for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
- img_name = image_name_from_dockerfile_path(
- os.path.basename(base_img_dockerfile), base_name=base_image_name)
- self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=root_path)
-
- self.static_images.update(find_dockerfiles_paths(
- os.path.join(root_path, STATIC_IMAGES_PATH)))
- return self.base_images
-
- def __init_test_images(self, base_image_name):
- test_images = {}
- for root_path in self.root_paths:
- for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
- img_name = image_name_from_dockerfile_path(
- os.path.basename(base_img_dockerfile), base_name=base_image_name)
- test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=base_img_dockerfile)
-
- return test_images
-
-
- def __find_static_dockerfile_paths(self, root_path):
- return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
-
- def __merge_base_helm_values(self, helm_values):
- # Override for every cloudharness scaffolding
- for root_path in self.root_paths:
- helm_values = dict_merge(
- helm_values,
- collect_helm_values(root_path, env=self.env)
- )
-
- return helm_values
-
- def __get_default_helm_values(self):
- helm_values = get_template(os.path.join(
- CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml'))
- helm_values = dict_merge(helm_values,
- collect_helm_values(CH_ROOT, env=self.env))
-
- return helm_values
-
- def create_tls_certificate(self, helm_values):
- if not self.tls:
- helm_values['tls'] = None
- return
- if not self.local:
- return
- helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
-
- bootstrap_file = 'bootstrap.sh'
- certs_parent_folder_path = os.path.join(
- self.output_path, 'helm', 'resources')
- certs_folder_path = os.path.join(certs_parent_folder_path, 'certs')
-
- if os.path.exists(os.path.join(certs_folder_path)):
- # don't overwrite the certificate if it exists
- return
-
- try:
- client = DockerClient()
- client.ping()
- except:
- raise ConnectionRefusedError(
- '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
-
- # Create CA and sign cert for domain
- container = client.containers.run(image='frapsoft/openssl',
- command=f'sleep 60',
- entrypoint="",
- detach=True,
- environment=[
- f"DOMAIN={self.domain}"],
- )
-
- container.exec_run('mkdir -p /mnt/vol1')
- container.exec_run('mkdir -p /mnt/certs')
-
- # copy bootstrap file
- cur_dir = os.getcwd()
- os.chdir(os.path.join(HERE, 'scripts'))
- tar = tarfile.open(bootstrap_file + '.tar', mode='w')
- try:
- tar.add(bootstrap_file)
- finally:
- tar.close()
- data = open(bootstrap_file + '.tar', 'rb').read()
- container.put_archive('/mnt/vol1', data)
- os.chdir(cur_dir)
- container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
-
- # exec bootstrap file
- container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
-
- # retrieve the certs from the container
- bits, stat = container.get_archive('/mnt/certs')
- if not os.path.exists(certs_folder_path):
- os.makedirs(certs_folder_path)
- f = open(f'{certs_parent_folder_path}/certs.tar', 'wb')
- for chunk in bits:
- f.write(chunk)
- f.close()
- cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar')
- cf.extractall(path=certs_parent_folder_path)
-
- logs = container.logs()
- logging.info(f'openssl container logs: {logs}')
-
- # stop the container
- container.kill()
-
- logging.info("Created certificates for local deployment")
+ # def __process_applications(self, helm_values, base_image_name):
+ # for root_path in self.root_paths:
+ # app_values = init_app_values(
+ # root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
+ # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ # app_values)
+
+ # app_base_path = os.path.join(root_path, APPS_PATH)
+ # app_values = self.collect_app_values(
+ # app_base_path, base_image_name=base_image_name)
+ # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
+ # app_values)
+
+ # def collect_app_values(self, app_base_path, base_image_name=None):
+ # values = {}
+
+ # for app_path in get_sub_paths(app_base_path):
+ # app_name = app_name_from_path(
+ # os.path.relpath(app_path, app_base_path))
+
+ # if app_name in self.exclude:
+ # continue
+ # app_key = app_name.replace('-', '_')
+
+ # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
+
+ # values[app_key] = dict_merge(
+ # values[app_key], app_values) if app_key in values else app_values
+
+ # return values
+
+ # def __init_static_images(self, base_image_name):
+ # for static_img_dockerfile in self.static_images:
+ # img_name = image_name_from_dockerfile_path(os.path.basename(
+ # static_img_dockerfile), base_name=base_image_name)
+ # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
+ # img_name, build_context_path=static_img_dockerfile)
+
+ # def __assign_static_build_dependencies(self, helm_values):
+ # for static_img_dockerfile in self.static_images:
+ # key = os.path.basename(static_img_dockerfile)
+ # if key in helm_values[KEY_TASK_IMAGES]:
+ # dependencies = guess_build_dependencies_from_dockerfile(
+ # static_img_dockerfile)
+ # for dep in dependencies:
+ # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
+ # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
+
+ # for image_name in list(helm_values[KEY_TASK_IMAGES].keys()):
+ # if image_name in self.exclude:
+ # del helm_values[KEY_TASK_IMAGES][image_name]
+
+ # def __init_base_images(self, base_image_name):
+
+ # for root_path in self.root_paths:
+ # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
+ # img_name = image_name_from_dockerfile_path(
+ # os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ # img_name, build_context_path=root_path)
+
+ # self.static_images.update(find_dockerfiles_paths(
+ # os.path.join(root_path, STATIC_IMAGES_PATH)))
+ # return self.base_images
+
+ # def __init_test_images(self, base_image_name):
+ # test_images = {}
+ # for root_path in self.root_paths:
+ # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
+ # img_name = image_name_from_dockerfile_path(
+ # os.path.basename(base_img_dockerfile), base_name=base_image_name)
+ # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
+ # img_name, build_context_path=base_img_dockerfile)
+
+ # return test_images
+
+
+ # def __find_static_dockerfile_paths(self, root_path):
+ # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
+
+ # def __merge_base_helm_values(self, helm_values):
+ # # Override for every cloudharness scaffolding
+ # for root_path in self.root_paths:
+ # helm_values = dict_merge(
+ # helm_values,
+ # collect_helm_values(root_path, env=self.env)
+ # )
+
+ # return helm_values
+
+ # def __get_default_helm_values(self):
+ # helm_values = get_template(os.path.join(
+ # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml'))
+ # helm_values = dict_merge(helm_values,
+ # collect_helm_values(CH_ROOT, env=self.env))
+
+ # return helm_values
+
+ # def create_tls_certificate(self, helm_values):
+ # if not self.tls:
+ # helm_values['tls'] = None
+ # return
+ # if not self.local:
+ # return
+ # helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
+
+ # bootstrap_file = 'bootstrap.sh'
+ # certs_parent_folder_path = os.path.join(
+ # self.output_path, 'helm', 'resources')
+ # certs_folder_path = os.path.join(certs_parent_folder_path, 'certs')
+
+ # if os.path.exists(os.path.join(certs_folder_path)):
+ # # don't overwrite the certificate if it exists
+ # return
+
+ # try:
+ # client = DockerClient()
+ # client.ping()
+ # except:
+ # raise ConnectionRefusedError(
+ # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
+
+ # # Create CA and sign cert for domain
+ # container = client.containers.run(image='frapsoft/openssl',
+ # command=f'sleep 60',
+ # entrypoint="",
+ # detach=True,
+ # environment=[
+ # f"DOMAIN={self.domain}"],
+ # )
+
+ # container.exec_run('mkdir -p /mnt/vol1')
+ # container.exec_run('mkdir -p /mnt/certs')
+
+ # # copy bootstrap file
+ # cur_dir = os.getcwd()
+ # os.chdir(os.path.join(HERE, 'scripts'))
+ # tar = tarfile.open(bootstrap_file + '.tar', mode='w')
+ # try:
+ # tar.add(bootstrap_file)
+ # finally:
+ # tar.close()
+ # data = open(bootstrap_file + '.tar', 'rb').read()
+ # container.put_archive('/mnt/vol1', data)
+ # os.chdir(cur_dir)
+ # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
+
+ # # exec bootstrap file
+ # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
+
+ # # retrieve the certs from the container
+ # bits, stat = container.get_archive('/mnt/certs')
+ # if not os.path.exists(certs_folder_path):
+ # os.makedirs(certs_folder_path)
+ # f = open(f'{certs_parent_folder_path}/certs.tar', 'wb')
+ # for chunk in bits:
+ # f.write(chunk)
+ # f.close()
+ # cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar')
+ # cf.extractall(path=certs_parent_folder_path)
+
+ # logs = container.logs()
+ # logging.info(f'openssl container logs: {logs}')
+
+ # # stop the container
+ # container.kill()
+
+ # logging.info("Created certificates for local deployment")
def __finish_helm_values(self, values):
"""
@@ -370,7 +363,7 @@ def __finish_helm_values(self, values):
if harness[KEY_DATABASE] and not harness[KEY_DATABASE].get('name', None):
harness[KEY_DATABASE]['name'] = app_name.strip() + '-db'
- self.__clear_unused_db_configuration(harness)
+ self._clear_unused_db_configuration(harness)
values_set_legacy(v)
if self.include:
@@ -390,36 +383,36 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- def __clear_unused_db_configuration(self, harness_config):
- database_config = harness_config[KEY_DATABASE]
- database_type = database_config.get('type', None)
- if database_type is None:
- del harness_config[KEY_DATABASE]
- return
- db_specific_keys = [k for k, v in database_config.items()
- if isinstance(v, dict) and 'image' in v and 'ports' in v]
- for db in db_specific_keys:
- if database_type != db:
- del database_config[db]
-
- def image_tag(self, image_name, build_context_path=None, dependencies=()):
- tag = self.tag
- if tag is None and not self.local:
- logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
- ignore_path = os.path.join(build_context_path, '.dockerignore')
- ignore = set(DEFAULT_IGNORE)
- if os.path.exists(ignore_path):
- with open(ignore_path) as f:
- ignore = ignore.union({line.strip() for line in f})
- logging.info(f"Ignoring {ignore}")
- tag = generate_tag_from_content(build_context_path, ignore)
- logging.info(f"Content hash: {tag}")
- dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
- tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
- logging.info(f"Generated tag: {tag}")
- app_name = image_name.split("/")[-1] # the image name can have a prefix
- self.all_images[app_name] = tag
- return self.registry + image_name + (f':{tag}' if tag else '')
+ # def __clear_unused_db_configuration(self, harness_config):
+ # database_config = harness_config[KEY_DATABASE]
+ # database_type = database_config.get('type', None)
+ # if database_type is None:
+ # del harness_config[KEY_DATABASE]
+ # return
+ # db_specific_keys = [k for k, v in database_config.items()
+ # if isinstance(v, dict) and 'image' in v and 'ports' in v]
+ # for db in db_specific_keys:
+ # if database_type != db:
+ # del database_config[db]
+
+ # def image_tag(self, image_name, build_context_path=None, dependencies=()):
+ # tag = self.tag
+ # if tag is None and not self.local:
+ # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
+ # ignore_path = os.path.join(build_context_path, '.dockerignore')
+ # ignore = set(DEFAULT_IGNORE)
+ # if os.path.exists(ignore_path):
+ # with open(ignore_path) as f:
+ # ignore = ignore.union({line.strip() for line in f})
+ # logging.info(f"Ignoring {ignore}")
+ # tag = generate_tag_from_content(build_context_path, ignore)
+ # logging.info(f"Content hash: {tag}")
+ # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
+ # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
+ # logging.info(f"Generated tag: {tag}")
+ # app_name = image_name.split("/")[-1] # the image name can have a prefix
+ # self.all_images[app_name] = tag
+ # return self.registry + image_name + (f':{tag}' if tag else '')
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
@@ -487,282 +480,282 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
return values
-def get_included_with_dependencies(values, include):
- app_values = values['apps'].values()
- directly_included = [app for app in app_values if any(
- inc == app[KEY_HARNESS]['name'] for inc in include)]
-
- dependent = set(include)
- for app in directly_included:
- if app['harness']['dependencies'].get('hard', None):
- dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
- if app['harness']['dependencies'].get('soft', None):
- dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
- if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
- dependent.add('accounts')
- if len(dependent) == len(include):
- return dependent
- return get_included_with_dependencies(values, dependent)
-
-
-def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
- pass
-
-
-def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), include=None):
- """
- Searches recursively for helm templates inside the applications and collects the templates in the destination
-
- :param search_root:
- :param dest_helm_chart_path: collected helm templates destination folder
- :param exclude:
- :return:
- """
- app_base_path = os.path.join(search_root, APPS_PATH)
-
- for app_path in get_sub_paths(app_base_path):
- app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
- if app_name in exclude or (include and not any(inc in app_name for inc in include)):
- continue
- template_dir = os.path.join(app_path, 'deploy', 'templates')
- if os.path.exists(template_dir):
- dest_dir = os.path.join(
- dest_helm_chart_path, 'templates', app_name)
-
- logging.info(
- "Collecting templates for application %s to %s", app_name, dest_dir)
- if os.path.exists(dest_dir):
- logging.warning(
- "Merging/overriding all files in directory %s", dest_dir)
- merge_configuration_directories(template_dir, dest_dir)
- else:
- shutil.copytree(template_dir, dest_dir)
- resources_dir = os.path.join(app_path, 'deploy/resources')
- if os.path.exists(resources_dir):
- dest_dir = os.path.join(
- dest_helm_chart_path, 'resources', app_name)
-
- logging.info(
- "Collecting resources for application %s to %s", app_name, dest_dir)
-
- merge_configuration_directories(resources_dir, dest_dir)
-
- subchart_dir = os.path.join(app_path, 'deploy/charts')
- if os.path.exists(subchart_dir):
- dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name)
-
- logging.info(
- "Collecting templates for application %s to %s", app_name, dest_dir)
- if os.path.exists(dest_dir):
- logging.warning(
- "Merging/overriding all files in directory %s", dest_dir)
- merge_configuration_directories(subchart_dir, dest_dir)
- else:
- shutil.copytree(subchart_dir, dest_dir)
-
-
-def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
- if not os.path.exists(base_helm_chart):
- return
- if os.path.exists(dest_helm_chart_path):
- logging.info("Merging/overriding all files in directory %s",
- dest_helm_chart_path)
- merge_configuration_directories(base_helm_chart, dest_helm_chart_path)
- else:
- logging.info("Copying base deployment chart from %s to %s",
- base_helm_chart, dest_helm_chart_path)
- shutil.copytree(base_helm_chart, dest_helm_chart_path)
-
-
-def collect_helm_values(deployment_root, env=()):
- """
- Creates helm values from a cloudharness deployment scaffolding
- """
-
- values_template_path = os.path.join(
- deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml')
-
- values = get_template(values_template_path)
-
- for e in env:
- specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
- f'values-template-{e}.yaml')
- if os.path.exists(specific_template_path):
- logging.info(
- "Specific environment values template found: " + specific_template_path)
- with open(specific_template_path) as f:
- values_env_specific = yaml.safe_load(f)
- values = dict_merge(values, values_env_specific)
- return values
-
-
-def init_app_values(deployment_root, exclude, values=None):
- values = values if values is not None else {}
- app_base_path = os.path.join(deployment_root, APPS_PATH)
- overridden_template_path = os.path.join(
- deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
- default_values_path = os.path.join(
- CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
-
- for app_path in get_sub_paths(app_base_path):
-
- app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
-
- if app_name in exclude:
- continue
- app_key = app_name.replace('-', '_')
- if app_key not in values:
- default_values = get_template(default_values_path)
- values[app_key] = default_values
- overridden_defaults = get_template(overridden_template_path)
- values[app_key] = dict_merge(values[app_key], overridden_defaults)
-
- return values
-
-
-def values_from_legacy(values):
- if KEY_HARNESS not in values:
- values[KEY_HARNESS] = {}
- harness = values[KEY_HARNESS]
- if KEY_SERVICE not in harness:
- harness[KEY_SERVICE] = {}
- if KEY_DEPLOYMENT not in harness:
- harness[KEY_DEPLOYMENT] = {}
- if KEY_DATABASE not in harness:
- harness[KEY_DATABASE] = {}
-
- if 'subdomain' in values:
- harness['subdomain'] = values['subdomain']
- if 'autodeploy' in values:
- harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
- if 'autoservice' in values:
- harness[KEY_SERVICE]['auto'] = values['autoservice']
- if 'secureme' in values:
- harness['secured'] = values['secureme']
- if 'resources' in values:
- harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
- if 'replicas' in values:
- harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
- if 'image' in values:
- harness[KEY_DEPLOYMENT]['image'] = values['image']
- if 'port' in values:
- harness[KEY_DEPLOYMENT]['port'] = values['port']
- harness[KEY_SERVICE]['port'] = values['port']
-
-
-def values_set_legacy(values):
- harness = values[KEY_HARNESS]
- if 'image' in harness[KEY_DEPLOYMENT]:
- values['image'] = harness[KEY_DEPLOYMENT]['image']
-
- values['name'] = harness['name']
- if harness[KEY_DEPLOYMENT].get('port', None):
- values['port'] = harness[KEY_DEPLOYMENT]['port']
- if 'resources' in harness[KEY_DEPLOYMENT]:
- values['resources'] = harness[KEY_DEPLOYMENT]['resources']
-
-
-def generate_tag_from_content(content_path, ignore=()):
- from dirhash import dirhash
- return dirhash(content_path, 'sha1', ignore=ignore)
-
-
-def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
- if isinstance(values, dict):
- newenvs = list(envs)
- for key, value in values.items():
- v = extract_env_variables_from_values(
- value, envs, f"{prefix}_{key}".replace('-', '_').upper())
- if key in ('name', 'port', 'subdomain'):
- newenvs.extend(v)
- return newenvs
- else:
- return [env_variable(prefix, values)]
-
-
-def create_env_variables(values):
- for app_name, value in values[KEY_APPS].items():
- if KEY_HARNESS in value:
- values['env'].extend(extract_env_variables_from_values(
- value[KEY_HARNESS], prefix='CH_' + app_name))
- values['env'].append(env_variable('CH_DOMAIN', values['domain']))
- values['env'].append(env_variable(
- 'CH_IMAGE_REGISTRY', values['registry']['name']))
- values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
-
-
-def hosts_info(values):
- domain = values['domain']
- namespace = values['namespace']
- subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
- KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
- KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
- try:
- ip = get_cluster_ip()
- except:
- logging.warning('Cannot get cluster ip')
- return
- logging.info(
- "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
-
- deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
- for app in values[KEY_APPS].values() if KEY_HARNESS in app)
-
- logging.info(
- "\nTo run locally some apps, also those references may be needed")
- for appname in values[KEY_APPS]:
- app = values[KEY_APPS][appname]['harness']
- if 'deployment' not in app:
- continue
- print(
- "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
- app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
-
- print(
- f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
-
-
-class ValuesValidationException(Exception):
- pass
-
-
-def validate_helm_values(values):
- validate_dependencies(values)
-
-
-def validate_dependencies(values):
- all_apps = {a for a in values["apps"]}
- for app in all_apps:
- app_values = values["apps"][app]
- if 'dependencies' in app_values[KEY_HARNESS]:
- soft_dependencies = {
- d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
- not_found = {d for d in soft_dependencies if d not in all_apps}
- if not_found:
- logging.warning(
- f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
- hard_dependencies = {
- d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
- not_found = {d for d in hard_dependencies if d not in all_apps}
- if not_found:
- raise ValuesValidationException(
- f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
-
- build_dependencies = {
- d for d in app_values[KEY_HARNESS]['dependencies']['build']}
-
- not_found = {
- d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
- not_found = {d for d in not_found if d not in all_apps}
- if not_found:
- raise ValuesValidationException(
- f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
-
- if 'use_services' in app_values[KEY_HARNESS]:
- service_dependencies = {d['name'].replace(
- "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
-
- not_found = {d for d in service_dependencies if d not in all_apps}
- if not_found:
- raise ValuesValidationException(
- f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
+# def get_included_with_dependencies(values, include):
+# app_values = values['apps'].values()
+# directly_included = [app for app in app_values if any(
+# inc == app[KEY_HARNESS]['name'] for inc in include)]
+
+# dependent = set(include)
+# for app in directly_included:
+# if app['harness']['dependencies'].get('hard', None):
+# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
+# if app['harness']['dependencies'].get('soft', None):
+# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
+# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
+# dependent.add('accounts')
+# if len(dependent) == len(include):
+# return dependent
+# return get_included_with_dependencies(values, dependent)
+
+
+# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
+# pass
+
+
+# def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), include=None):
+# """
+# Searches recursively for helm templates inside the applications and collects the templates in the destination
+
+# :param search_root:
+# :param dest_helm_chart_path: collected helm templates destination folder
+# :param exclude:
+# :return:
+# """
+# app_base_path = os.path.join(search_root, APPS_PATH)
+
+# for app_path in get_sub_paths(app_base_path):
+# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+# if app_name in exclude or (include and not any(inc in app_name for inc in include)):
+# continue
+# template_dir = os.path.join(app_path, 'deploy', 'templates')
+# if os.path.exists(template_dir):
+# dest_dir = os.path.join(
+# dest_helm_chart_path, 'templates', app_name)
+
+# logging.info(
+# "Collecting templates for application %s to %s", app_name, dest_dir)
+# if os.path.exists(dest_dir):
+# logging.warning(
+# "Merging/overriding all files in directory %s", dest_dir)
+# merge_configuration_directories(template_dir, dest_dir)
+# else:
+# shutil.copytree(template_dir, dest_dir)
+# resources_dir = os.path.join(app_path, 'deploy/resources')
+# if os.path.exists(resources_dir):
+# dest_dir = os.path.join(
+# dest_helm_chart_path, 'resources', app_name)
+
+# logging.info(
+# "Collecting resources for application %s to %s", app_name, dest_dir)
+
+# merge_configuration_directories(resources_dir, dest_dir)
+
+# subchart_dir = os.path.join(app_path, 'deploy/charts')
+# if os.path.exists(subchart_dir):
+# dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name)
+
+# logging.info(
+# "Collecting templates for application %s to %s", app_name, dest_dir)
+# if os.path.exists(dest_dir):
+# logging.warning(
+# "Merging/overriding all files in directory %s", dest_dir)
+# merge_configuration_directories(subchart_dir, dest_dir)
+# else:
+# shutil.copytree(subchart_dir, dest_dir)
+
+
+# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
+# if not os.path.exists(base_helm_chart):
+# return
+# if os.path.exists(dest_helm_chart_path):
+# logging.info("Merging/overriding all files in directory %s",
+# dest_helm_chart_path)
+# merge_configuration_directories(base_helm_chart, dest_helm_chart_path)
+# else:
+# logging.info("Copying base deployment chart from %s to %s",
+# base_helm_chart, dest_helm_chart_path)
+# shutil.copytree(base_helm_chart, dest_helm_chart_path)
+
+
+# def collect_helm_values(deployment_root, env=()):
+# """
+# Creates helm values from a cloudharness deployment scaffolding
+# """
+
+# values_template_path = os.path.join(
+# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml')
+
+# values = get_template(values_template_path)
+
+# for e in env:
+# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
+# f'values-template-{e}.yaml')
+# if os.path.exists(specific_template_path):
+# logging.info(
+# "Specific environment values template found: " + specific_template_path)
+# with open(specific_template_path) as f:
+# values_env_specific = yaml.safe_load(f)
+# values = dict_merge(values, values_env_specific)
+# return values
+
+
+# def init_app_values(deployment_root, exclude, values=None):
+# values = values if values is not None else {}
+# app_base_path = os.path.join(deployment_root, APPS_PATH)
+# overridden_template_path = os.path.join(
+# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+# default_values_path = os.path.join(
+# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
+
+# for app_path in get_sub_paths(app_base_path):
+
+# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
+
+# if app_name in exclude:
+# continue
+# app_key = app_name.replace('-', '_')
+# if app_key not in values:
+# default_values = get_template(default_values_path)
+# values[app_key] = default_values
+# overridden_defaults = get_template(overridden_template_path)
+# values[app_key] = dict_merge(values[app_key], overridden_defaults)
+
+# return values
+
+
+# def values_from_legacy(values):
+# if KEY_HARNESS not in values:
+# values[KEY_HARNESS] = {}
+# harness = values[KEY_HARNESS]
+# if KEY_SERVICE not in harness:
+# harness[KEY_SERVICE] = {}
+# if KEY_DEPLOYMENT not in harness:
+# harness[KEY_DEPLOYMENT] = {}
+# if KEY_DATABASE not in harness:
+# harness[KEY_DATABASE] = {}
+
+# if 'subdomain' in values:
+# harness['subdomain'] = values['subdomain']
+# if 'autodeploy' in values:
+# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
+# if 'autoservice' in values:
+# harness[KEY_SERVICE]['auto'] = values['autoservice']
+# if 'secureme' in values:
+# harness['secured'] = values['secureme']
+# if 'resources' in values:
+# harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
+# if 'replicas' in values:
+# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
+# if 'image' in values:
+# harness[KEY_DEPLOYMENT]['image'] = values['image']
+# if 'port' in values:
+# harness[KEY_DEPLOYMENT]['port'] = values['port']
+# harness[KEY_SERVICE]['port'] = values['port']
+
+
+# def values_set_legacy(values):
+# harness = values[KEY_HARNESS]
+# if 'image' in harness[KEY_DEPLOYMENT]:
+# values['image'] = harness[KEY_DEPLOYMENT]['image']
+
+# values['name'] = harness['name']
+# if harness[KEY_DEPLOYMENT].get('port', None):
+# values['port'] = harness[KEY_DEPLOYMENT]['port']
+# if 'resources' in harness[KEY_DEPLOYMENT]:
+# values['resources'] = harness[KEY_DEPLOYMENT]['resources']
+
+
+# def generate_tag_from_content(content_path, ignore=()):
+# from dirhash import dirhash
+# return dirhash(content_path, 'sha1', ignore=ignore)
+
+
+# def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
+# if isinstance(values, dict):
+# newenvs = list(envs)
+# for key, value in values.items():
+# v = extract_env_variables_from_values(
+# value, envs, f"{prefix}_{key}".replace('-', '_').upper())
+# if key in ('name', 'port', 'subdomain'):
+# newenvs.extend(v)
+# return newenvs
+# else:
+# return [env_variable(prefix, values)]
+
+
+# def create_env_variables(values):
+# for app_name, value in values[KEY_APPS].items():
+# if KEY_HARNESS in value:
+# values['env'].extend(extract_env_variables_from_values(
+# value[KEY_HARNESS], prefix='CH_' + app_name))
+# values['env'].append(env_variable('CH_DOMAIN', values['domain']))
+# values['env'].append(env_variable(
+# 'CH_IMAGE_REGISTRY', values['registry']['name']))
+# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
+
+
+# def hosts_info(values):
+# domain = values['domain']
+# namespace = values['namespace']
+# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
+# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
+# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
+# try:
+# ip = get_cluster_ip()
+# except:
+# logging.warning('Cannot get cluster ip')
+# return
+# logging.info(
+# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
+
+# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
+# for app in values[KEY_APPS].values() if KEY_HARNESS in app)
+
+# logging.info(
+# "\nTo run locally some apps, also those references may be needed")
+# for appname in values[KEY_APPS]:
+# app = values[KEY_APPS][appname]['harness']
+# if 'deployment' not in app:
+# continue
+# print(
+# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
+# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
+
+# print(
+# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
+
+
+# class ValuesValidationException(Exception):
+# pass
+
+
+# def validate_helm_values(values):
+# validate_dependencies(values)
+
+
+# def validate_dependencies(values):
+# all_apps = {a for a in values["apps"]}
+# for app in all_apps:
+# app_values = values["apps"][app]
+# if 'dependencies' in app_values[KEY_HARNESS]:
+# soft_dependencies = {
+# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
+# not_found = {d for d in soft_dependencies if d not in all_apps}
+# if not_found:
+# logging.warning(
+# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
+# hard_dependencies = {
+# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
+# not_found = {d for d in hard_dependencies if d not in all_apps}
+# if not_found:
+# raise ValuesValidationException(
+# f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
+
+# build_dependencies = {
+# d for d in app_values[KEY_HARNESS]['dependencies']['build']}
+
+# not_found = {
+# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
+# not_found = {d for d in not_found if d not in all_apps}
+# if not_found:
+# raise ValuesValidationException(
+# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
+
+# if 'use_services' in app_values[KEY_HARNESS]:
+# service_dependencies = {d['name'].replace(
+# "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
+
+# not_found = {d for d in service_dependencies if d not in all_apps}
+# if not_found:
+# raise ValuesValidationException(
+# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
diff --git a/tools/deployment-cli-tools/tests/test_codefresh.py b/tools/deployment-cli-tools/tests/test_codefresh.py
index 7e3abd4d5..e4505a9f0 100644
--- a/tools/deployment-cli-tools/tests/test_codefresh.py
+++ b/tools/deployment-cli-tools/tests/test_codefresh.py
@@ -1,6 +1,7 @@
from ch_cli_tools.preprocessing import preprocess_build_overrides
from ch_cli_tools.helm import *
+from ch_cli_tools.configurationgenerator import *
from ch_cli_tools.codefresh import *
HERE = os.path.dirname(os.path.realpath(__file__))
@@ -126,7 +127,7 @@ def test_create_codefresh_configuration():
assert len(
tstep['commands']) == 2, "Unit test commands are not properly loaded from the unit test configuration file"
assert tstep['commands'][0] == "tox", "Unit test commands are not properly loaded from the unit test configuration file"
-
+
assert len(l1_steps[CD_BUILD_STEP_DEPENDENCIES]['steps']) == 3, "3 clone steps should be included as we have 2 dependencies from myapp, plus cloudharness"
finally:
shutil.rmtree(BUILD_MERGE_DIR)
@@ -213,7 +214,7 @@ def test_create_codefresh_configuration_tests():
assert "test-api" in st_build_test_steps["test-api"]["dockerfile"], "test-api image must be built from root context"
-
+
e2e_steps = l1_steps[CD_E2E_TEST_STEP]['scale']
@@ -251,7 +252,7 @@ def test_create_codefresh_configuration_tests():
finally:
shutil.rmtree(BUILD_MERGE_DIR)
-
+
values = create_helm_chart(
[CLOUDHARNESS_ROOT, RESOURCES],
output_path=OUT,
diff --git a/tools/deployment-cli-tools/tests/test_helm.py b/tools/deployment-cli-tools/tests/test_helm.py
index ed53ab863..5fa269d64 100644
--- a/tools/deployment-cli-tools/tests/test_helm.py
+++ b/tools/deployment-cli-tools/tests/test_helm.py
@@ -1,6 +1,7 @@
import shutil
from ch_cli_tools.helm import *
+from ch_cli_tools.configurationgenerator import *
HERE = os.path.dirname(os.path.realpath(__file__))
RESOURCES = os.path.join(HERE, 'resources')
@@ -80,8 +81,9 @@ def exists(*args):
shutil.rmtree(OUT)
-def test_collect_helm_values_noreg_noinclude():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+def test_collect_helm_values_noreg_noinclude(tmp_path):
+ out_path = tmp_path / 'test_collect_helm_values_noreg_noinclude'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_path, domain="my.local",
namespace='test', env='dev', local=False, tag=1)
# Auto values
@@ -119,7 +121,7 @@ def test_collect_helm_values_noreg_noinclude():
assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] == True
assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] == False
- helm_path = os.path.join(OUT, HELM_CHART_PATH)
+ helm_path = out_path / HELM_CHART_PATH
def exists(*args):
return os.path.exists(os.path.join(*args))
@@ -137,8 +139,6 @@ def exists(*args):
assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'cloudharness/cloudharness-base:1'
assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1'
- shutil.rmtree(OUT)
-
def test_collect_helm_values_precedence():
values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
@@ -302,13 +302,13 @@ def create():
return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'],
exclude=['events'], domain="my.local",
namespace='test', env='dev', local=False, tag=None, registry='reg')
-
+
BASE_KEY = "cloudharness-base"
values = create()
# Auto values are set by using the directory hash
assert 'reg/cloudharness/myapp:' in values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image']
- assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image
+ assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image
assert 'cloudharness/myapp-mytask' in values[KEY_TASK_IMAGES]['myapp-mytask']
assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == values.apps['myapp'].harness.deployment.image
v1 = values.apps['myapp'].harness.deployment.image
@@ -320,7 +320,7 @@ def create():
assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value"
assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore"
-
+
try:
fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt')
with open(fname, 'w') as f:
@@ -355,7 +355,7 @@ def create():
assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore"
finally:
os.remove(fname)
-
+
# Dependencies test: if a dependency is changed, the hash should change
@@ -366,7 +366,7 @@ def create():
f.write('a')
values = create()
-
+
assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change"
assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change"
finally:
@@ -379,12 +379,9 @@ def create():
f.write('a')
values = create()
-
+
assert b1 != values["task-images"][BASE_KEY], "Content for base image is changed, the hash should change"
assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change"
assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change"
finally:
os.remove(fname)
-
-
-
From 368725aa5af146a1fcdaff219fd1cfbb5aa4d4c7 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 07:37:54 -0600
Subject: [PATCH 56/94] CH-100 Clean imports/comments
---
.../ch_cli_tools/dockercompose.py | 572 +-----------------
.../deployment-cli-tools/ch_cli_tools/helm.py | 547 +----------------
2 files changed, 6 insertions(+), 1113 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 0e75ed7ea..f65e352bf 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -4,23 +4,14 @@
import yaml
from ruamel.yaml import YAML
import os
-import shutil
import logging
-from hashlib import sha1
import subprocess
-from functools import cache
-import tarfile
-from docker import from_env as DockerClient
-from pathlib import Path
import copy
-from . import HERE, CH_ROOT
-from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
- DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE
-from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
- get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \
- find_dockerfiles_paths, find_file_paths
+from cloudharness_utils.constants import VALUES_MANUAL_PATH, COMPOSE
+from .utils import get_cluster_ip, image_name_from_dockerfile_path, get_template, \
+ merge_to_yaml_file, dict_merge, app_name_from_path, find_dockerfiles_paths, find_file_paths
from .models import HarnessMainConfig
@@ -38,57 +29,6 @@ def create_docker_compose_configuration(root_paths, tag='latest', registry='', l
class CloudHarnessDockerCompose(ConfigurationGenerator):
- # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
- # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- # namespace=None, templates_path=HELM_PATH):
- # assert domain, 'A domain must be specified'
- # self.root_paths = [Path(r) for r in root_paths]
- # self.tag = tag
- # if registry and not registry.endswith('/'):
- # self.registry = f'{registry}/'
- # else:
- # self.registry = registry
- # self.local = local
- # self.domain = domain
- # self.exclude = exclude
- # self.secured = secured
- # self.output_path = Path(output_path)
- # self.include = include
- # self.registry_secret = registry_secret
- # self.tls = tls
- # self.env = env
- # self.namespace = namespace
-
- # self.templates_path = templates_path
- # self.dest_deployment_path = self.output_path / templates_path
- # self.helm_chart_path = self.dest_deployment_path / 'Chart.yaml'
- # self.__init_deployment()
-
- # self.static_images = set()
- # self.base_images = {}
- # self.all_images = {}
-
- # def __init_deployment(self):
- # """
- # Create the base helm chart
- # """
- # if self.dest_deployment_path.exists():
- # shutil.rmtree(self.dest_deployment_path)
- # # Initialize with default
- # copy_merge_base_deployment(self.dest_deployment_path, Path(CH_ROOT) / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path)
-
- # # Override for every cloudharness scaffolding
- # for root_path in self.root_paths:
- # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
- # base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path)
- # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
- # dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
-
- # def __adjust_missing_values(self, helm_values):
- # if 'name' not in helm_values:
- # with open(self.helm_chart_path) as f:
- # chart_idx_content = yaml.safe_load(f)
- # helm_values['name'] = chart_idx_content['name'].lower()
def process_values(self) -> HarnessMainConfig:
"""
@@ -176,133 +116,6 @@ def __post_process_multiple_document_docker_compose(self, yaml_document):
main_document = document # we need to save the main document later
yaml_handler.dump(main_document, yaml_document)
- # def __process_applications(self, helm_values, base_image_name):
- # for root_path in self.root_paths:
- # app_values = init_app_values(
- # root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
- # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- # app_values)
-
- # app_base_path = root_path / APPS_PATH
- # app_values = self.collect_app_values(
- # app_base_path, base_image_name=base_image_name)
- # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- # app_values)
-
- # def collect_app_values(self, app_base_path, base_image_name=None):
- # values = {}
-
- # for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
- # app_name = app_name_from_path(f"{app_path.relative_to(app_base_path)}")
-
- # if app_name in self.exclude:
- # continue
- # app_key = app_name.replace('-', '_')
-
- # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
-
- # # dockerfile_path = next(app_path.rglob('**/Dockerfile'), None)
- # # # for dockerfile_path in app_path.rglob('**/Dockerfile'):
- # # # parent_name = dockerfile_path.parent.name.replace("-", "_")
- # # # if parent_name == app_key:
- # # # app_values['build'] = {
- # # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
- # # # 'dockerfile': "Dockerfile",
- # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
- # # # }
- # # # elif "tasks/" in f"{dockerfile_path}":
- # # # parent_name = parent_name.upper()
- # # # values.setdefault("task-images-build", {})[parent_name] = {
- # # # 'dockerfile': "Dockerfile",
- # # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
- # # # }
- # # # import ipdb; ipdb.set_trace() # fmt: skip
-
- # # if dockerfile_path:
- # # app_values['build'] = {
- # # # 'dockerfile': f"{dockerfile_path.relative_to(app_path)}",
- # # 'dockerfile': "Dockerfile",
- # # 'context': os.path.relpath(dockerfile_path.parent, self.dest_deployment_path.parent),
- # # }
-
- # values[app_key] = dict_merge(
- # values[app_key], app_values) if app_key in values else app_values
-
- # return values
-
- # def __init_static_images(self, base_image_name):
- # for static_img_dockerfile in self.static_images:
- # img_name = image_name_from_dockerfile_path(os.path.basename(
- # static_img_dockerfile), base_name=base_image_name)
- # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
- # img_name, build_context_path=static_img_dockerfile)
-
- # def __assign_static_build_dependencies(self, helm_values):
- # for static_img_dockerfile in self.static_images:
- # key = os.path.basename(static_img_dockerfile)
- # if key in helm_values[KEY_TASK_IMAGES]:
- # dependencies = guess_build_dependencies_from_dockerfile(
- # static_img_dockerfile)
- # for dep in dependencies:
- # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
- # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
- # # helm_values.setdefault(KEY_TASK_IMAGES_BUILD, {})[dep] = {
- # # 'context': os.path.relpath(static_img_dockerfile, self.dest_deployment_path.parent),
- # # 'dockerfile': 'Dockerfile',
- # # }
-
- # for image_name in helm_values[KEY_TASK_IMAGES].keys():
- # if image_name in self.exclude:
- # del helm_values[KEY_TASK_IMAGES][image_name]
- # # del helm_values[KEY_TASK_IMAGES_BUILD][image_name]
-
- # def __init_base_images(self, base_image_name):
-
- # for root_path in self.root_paths:
- # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
- # img_name = image_name_from_dockerfile_path(
- # os.path.basename(base_img_dockerfile), base_name=base_image_name)
- # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- # img_name, build_context_path=root_path)
-
- # self.static_images.update(find_dockerfiles_paths(
- # os.path.join(root_path, STATIC_IMAGES_PATH)))
- # return self.base_images
-
- # def __init_test_images(self, base_image_name):
- # test_images = {}
- # for root_path in self.root_paths:
- # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
- # img_name = image_name_from_dockerfile_path(
- # os.path.basename(base_img_dockerfile), base_name=base_image_name)
- # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- # img_name, build_context_path=base_img_dockerfile)
-
- # return test_images
-
-
- # def __find_static_dockerfile_paths(self, root_path):
- # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
-
- # def __merge_base_helm_values(self, helm_values):
- # # Override for every cloudharness scaffolding
- # for root_path in self.root_paths:
- # helm_values = dict_merge(
- # helm_values,
- # collect_helm_values(root_path, env=self.env)
- # )
-
- # return helm_values
-
- # def __get_default_helm_values(self):
- # ch_root_path = Path(CH_ROOT)
- # values_yaml_path = ch_root_path / DEPLOYMENT_CONFIGURATION_PATH / HELM_PATH / 'values.yaml'
- # helm_values = get_template(values_yaml_path)
- # helm_values = dict_merge(helm_values,
- # collect_helm_values(ch_root_path, env=self.env))
-
- # return helm_values
-
def __get_default_helm_values_with_secrets(self, helm_values):
helm_values = copy.deepcopy(helm_values)
# {{- $values_copy := deepCopy .Values }}
@@ -315,77 +128,6 @@ def __get_default_helm_values_with_secrets(self, helm_values):
helm_values['apps'][key]['harness']['secrets'] = {}
return helm_values
- # def create_tls_certificate(self, helm_values):
- # if not self.tls:
- # helm_values['tls'] = None
- # return
- # if not self.local:
- # return
- # helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
-
- # bootstrap_file = 'bootstrap.sh'
- # certs_parent_folder_path = self.output_path / 'helm' / 'resources'
- # certs_folder_path = certs_parent_folder_path / 'certs'
-
- # # if os.path.exists(os.path.join(certs_folder_path)):
- # if certs_folder_path.exists():
- # # don't overwrite the certificate if it exists
- # return
-
- # try:
- # client = DockerClient()
- # client.ping()
- # except:
- # raise ConnectionRefusedError(
- # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
-
- # # Create CA and sign cert for domain
- # container = client.containers.run(image='frapsoft/openssl',
- # command=f'sleep 60',
- # entrypoint="",
- # detach=True,
- # environment=[
- # f"DOMAIN={self.domain}"],
- # )
-
- # container.exec_run('mkdir -p /mnt/vol1')
- # container.exec_run('mkdir -p /mnt/certs')
-
- # # copy bootstrap file
- # cur_dir = os.getcwd()
- # os.chdir(Path(HERE) / 'scripts')
- # tar = tarfile.open(bootstrap_file + '.tar', mode='w')
- # try:
- # tar.add(bootstrap_file)
- # finally:
- # tar.close()
- # data = open(bootstrap_file + '.tar', 'rb').read()
- # container.put_archive('/mnt/vol1', data)
- # os.chdir(cur_dir)
- # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
-
- # # exec bootstrap file
- # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
-
- # # retrieve the certs from the container
- # bits, stat = container.get_archive('/mnt/certs')
- # if not certs_folder_path.exists():
- # certs_folder_path.mkdir(parents=True)
- # certs_tar = certs_parent_folder_path / 'certs.tar'
- # with open(certs_tar, 'wb') as f:
- # for chunk in bits:
- # f.write(chunk)
- # cf = tarfile.open(certs_tar)
- # cf.extractall(path=certs_parent_folder_path)
-
- # logs = container.logs()
- # logging.info(f'openssl container logs: {logs}')
-
- # # stop the container
- # container.kill()
-
- # logging.info("Created certificates for local deployment")
-
def __finish_helm_values(self, values):
"""
Sets default overridden values
@@ -456,37 +198,6 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- # def __clear_unused_db_configuration(self, harness_config):
- # database_config = harness_config[KEY_DATABASE]
- # database_type = database_config.get('type', None)
- # if database_type is None:
- # del harness_config[KEY_DATABASE]
- # return
- # db_specific_keys = [k for k, v in database_config.items()
- # if isinstance(v, dict) and 'image' in v and 'ports' in v]
- # for db in db_specific_keys:
- # if database_type != db:
- # del database_config[db]
-
- # def image_tag(self, image_name, build_context_path=None, dependencies=()):
- # tag = self.tag
- # if tag is None and not self.local:
- # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
- # ignore_path = os.path.join(build_context_path, '.dockerignore')
- # ignore = set(DEFAULT_IGNORE)
- # if os.path.exists(ignore_path):
- # with open(ignore_path) as f:
- # ignore = ignore.union({line.strip() for line in f})
- # logging.info(f"Ignoring {ignore}")
- # tag = generate_tag_from_content(build_context_path, ignore)
- # logging.info(f"Content hash: {tag}")
- # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
- # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
- # logging.info(f"Generated tag: {tag}")
- # app_name = image_name.split("/")[-1] # the image name can have a prefix
- # self.all_images[app_name] = tag
- # return self.registry + image_name + (f':{tag}' if tag else '')
-
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
@@ -581,283 +292,6 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path):
helm_values[KEY_HARNESS]['deployment']['args'] = f'/usr/src/app/{os.path.basename(task_main_file)}/__main__.py'
-# def get_included_with_dependencies(values, include):
-# app_values = values['apps'].values()
-# directly_included = [app for app in app_values if any(
-# inc == app[KEY_HARNESS]['name'] for inc in include)]
-
-# dependent = set(include)
-# for app in directly_included:
-# if app['harness']['dependencies'].get('hard', None):
-# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
-# if app['harness']['dependencies'].get('soft', None):
-# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
-# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
-# dependent.add('accounts')
-# if len(dependent) == len(include):
-# return dependent
-# return get_included_with_dependencies(values, dependent)
-
-
-# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
-# pass
-
-
-# def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_path, exclude=(), include=None):
-# """
-# Searches recursively for helm templates inside the applications and collects the templates in the destination
-
-# :param search_root:
-# :param dest_helm_chart_path: collected helm templates destination folder
-# :param exclude:
-# :return:
-# """
-# app_base_path = search_root / APPS_PATH
-
-# for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
-# app_name = app_name_from_path(os.path.relpath(f"{app_path}", app_base_path))
-# if app_name in exclude or (include and not any(inc in app_name for inc in include)):
-# continue
-# template_dir = app_path / 'deploy' / f'templates-{templates_path}'
-# if template_dir.exists():
-# dest_dir = dest_helm_chart_path / 'templates' / app_name
-
-# logging.info(
-# "Collecting templates for application %s to %s", app_name, dest_dir)
-# if dest_dir.exists():
-# logging.warning(
-# "Merging/overriding all files in directory %s", dest_dir)
-# merge_configuration_directories(f"{template_dir}", f"{dest_dir}")
-# else:
-# shutil.copytree(template_dir, dest_dir)
-# resources_dir = app_path / 'deploy' / 'resources'
-# if resources_dir.exists():
-# dest_dir = dest_helm_chart_path / 'resources' / app_name
-
-# logging.info(
-# "Collecting resources for application %s to %s", app_name, dest_dir)
-
-# merge_configuration_directories(f"{resources_dir}", f"{dest_dir}")
-
-# # subchart_dir = app_path / 'deploy/charts'
-# # if subchart_dir.exists():
-# # dest_dir = dest_helm_chart_path / 'charts' / app_name
-
-# # logging.info(
-# # "Collecting templates for application %s to %s", app_name, dest_dir)
-# # if dest_dir.exists():
-# # logging.warning(
-# # "Merging/overriding all files in directory %s", dest_dir)
-# # merge_configuration_directories(f"{subchart_dir}", f"{dest_dir}")
-# # else:
-# # shutil.copytree(subchart_dir, dest_dir)
-
-
-# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
-# if not base_helm_chart.exists():
-# return
-# if dest_helm_chart_path.exists():
-# logging.info("Merging/overriding all files in directory %s",
-# dest_helm_chart_path)
-# merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
-# else:
-# logging.info("Copying base deployment chart from %s to %s",
-# base_helm_chart, dest_helm_chart_path)
-# shutil.copytree(base_helm_chart, dest_helm_chart_path)
-
-
-# def collect_helm_values(deployment_root, env=()):
-# """
-# Creates helm values from a cloudharness deployment scaffolding
-# """
-# values_template_path = deployment_root / DEPLOYMENT_CONFIGURATION_PATH / 'values-template.yaml'
-
-# values = get_template(values_template_path)
-
-# for e in env:
-# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
-# f'values-template-{e}.yaml')
-# if os.path.exists(specific_template_path):
-# logging.info(
-# "Specific environment values template found: " + specific_template_path)
-# with open(specific_template_path) as f:
-# values_env_specific = yaml.safe_load(f)
-# values = dict_merge(values, values_env_specific)
-# return values
-
-
-# def init_app_values(deployment_root, exclude, values=None):
-# values = values if values is not None else {}
-# app_base_path = os.path.join(deployment_root, APPS_PATH)
-# overridden_template_path = os.path.join(
-# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
-# default_values_path = os.path.join(
-# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
-
-# for app_path in get_sub_paths(app_base_path):
-
-# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
-
-# if app_name in exclude:
-# continue
-# app_key = app_name.replace('-', '_')
-# if app_key not in values:
-# default_values = get_template(default_values_path)
-# values[app_key] = default_values
-# overridden_defaults = get_template(overridden_template_path)
-# values[app_key] = dict_merge(values[app_key], overridden_defaults)
-
-# return values
-
-
-# def values_from_legacy(values):
-# if KEY_HARNESS not in values:
-# values[KEY_HARNESS] = {}
-# harness = values[KEY_HARNESS]
-# if KEY_SERVICE not in harness:
-# harness[KEY_SERVICE] = {}
-# if KEY_DEPLOYMENT not in harness:
-# harness[KEY_DEPLOYMENT] = {}
-# if KEY_DATABASE not in harness:
-# harness[KEY_DATABASE] = {}
-
-# if 'subdomain' in values:
-# harness['subdomain'] = values['subdomain']
-# if 'autodeploy' in values:
-# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
-# if 'autoservice' in values:
-# harness[KEY_SERVICE]['auto'] = values['autoservice']
-# if 'secureme' in values:
-# harness['secured'] = values['secureme']
-# if 'resources' in values:
-# harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
-# if 'replicas' in values:
-# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
-# if 'image' in values:
-# harness[KEY_DEPLOYMENT]['image'] = values['image']
-# if 'port' in values:
-# harness[KEY_DEPLOYMENT]['port'] = values['port']
-# harness[KEY_SERVICE]['port'] = values['port']
-
-
-# def values_set_legacy(values):
-# harness = values[KEY_HARNESS]
-# if 'image' in harness[KEY_DEPLOYMENT]:
-# values['image'] = harness[KEY_DEPLOYMENT]['image']
-
-# values['name'] = harness['name']
-# if harness[KEY_DEPLOYMENT].get('port', None):
-# values['port'] = harness[KEY_DEPLOYMENT]['port']
-# if 'resources' in harness[KEY_DEPLOYMENT]:
-# values['resources'] = harness[KEY_DEPLOYMENT]['resources']
-
-
-# def generate_tag_from_content(content_path, ignore=()):
-# from dirhash import dirhash
-# return dirhash(content_path, 'sha1', ignore=ignore)
-
-
-# def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
-# if isinstance(values, dict):
-# newenvs = list(envs)
-# for key, value in values.items():
-# v = extract_env_variables_from_values(
-# value, envs, f"{prefix}_{key}".replace('-', '_').upper())
-# if key in ('name', 'port', 'subdomain'):
-# newenvs.extend(v)
-# return newenvs
-# else:
-# return [env_variable(prefix, values)]
-
-
-# def create_env_variables(values):
-# for app_name, value in values[KEY_APPS].items():
-# if KEY_HARNESS in value:
-# values['env'].extend(extract_env_variables_from_values(
-# value[KEY_HARNESS], prefix='CH_' + app_name))
-# values['env'].append(env_variable('CH_DOMAIN', values['domain']))
-# values['env'].append(env_variable(
-# 'CH_IMAGE_REGISTRY', values['registry']['name']))
-# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
-
-
-# def hosts_info(values):
-# domain = values['domain']
-# namespace = values['namespace']
-# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
-# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
-# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
-# try:
-# ip = get_cluster_ip()
-# except:
-# logging.warning('Cannot get cluster ip')
-# return
-# logging.info(
-# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
-
-# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
-# for app in values[KEY_APPS].values() if KEY_HARNESS in app)
-
-# logging.info(
-# "\nTo run locally some apps, also those references may be needed")
-# for appname in values[KEY_APPS]:
-# app = values[KEY_APPS][appname]['harness']
-# if 'deployment' not in app:
-# continue
-# print(
-# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
-# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
-
-# print(
-# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
-
-
-# class ValuesValidationException(Exception):
-# pass
-
-
-# def validate_helm_values(values):
-# validate_dependencies(values)
-
-
-# def validate_dependencies(values):
-# all_apps = {a for a in values["apps"]}
-# for app in all_apps:
-# app_values = values["apps"][app]
-# if 'dependencies' in app_values[KEY_HARNESS]:
-# soft_dependencies = {
-# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
-# not_found = {d for d in soft_dependencies if d not in all_apps}
-# if not_found:
-# logging.warning(
-# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
-# hard_dependencies = {
-# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
-# not_found = {d for d in hard_dependencies if d not in all_apps}
-# if not_found:
-# raise ValuesValidationException(
-# f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
-
-# build_dependencies = {
-# d for d in app_values[KEY_HARNESS]['dependencies']['build']}
-
-# not_found = {
-# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
-# not_found = {d for d in not_found if d not in all_apps}
-# if not_found:
-# raise ValuesValidationException(
-# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
-
-# if 'use_services' in app_values[KEY_HARNESS]:
-# service_dependencies = {d['name'].replace(
-# "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
-
-# not_found = {d for d in service_dependencies if d not in all_apps}
-# if not_found:
-# raise ValuesValidationException(
-# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
-
-
def identify_unicorn_based_main(candidates, app_path):
import re
gunicorn_pattern = re.compile(r"gunicorn")
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index bd49f8eee..1f7408f1f 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -3,20 +3,12 @@
"""
import yaml
import os
-import shutil
import logging
-from hashlib import sha1
import subprocess
-from functools import cache
-import tarfile
-from docker import from_env as DockerClient
-
-from . import HERE, CH_ROOT
-from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
- DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH
-from .utils import get_cluster_ip, get_git_commit_hash, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
- get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \
+from cloudharness_utils.constants import VALUES_MANUAL_PATH, HELM_CHART_PATH
+from .utils import get_cluster_ip, get_git_commit_hash, image_name_from_dockerfile_path, \
+ get_template, merge_to_yaml_file, dict_merge, app_name_from_path, \
find_dockerfiles_paths
from .models import HarnessMainConfig
@@ -45,59 +37,6 @@ def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain=
class CloudHarnessHelm(ConfigurationGenerator):
- # def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
- # output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- # namespace=None):
- # assert domain, 'A domain must be specified'
- # self.root_paths = root_paths
- # self.tag = tag
- # if registry and registry[-1] != '/':
- # self.registry = registry + '/'
- # else:
- # self.registry = registry
- # self.local = local
- # self.domain = domain
- # self.exclude = exclude
- # self.secured = secured
- # self.output_path = output_path
- # self.include = include
- # self.registry_secret = registry_secret
- # self.tls = tls
- # self.env = env
- # self.namespace = namespace
-
- # self.dest_deployment_path = os.path.join(
- # self.output_path, HELM_CHART_PATH)
- # self.helm_chart_path = os.path.join(
- # self.dest_deployment_path, 'Chart.yaml')
- # self.__init_deployment()
-
- # self.static_images = set()
- # self.base_images = {}
- # self.all_images = {}
-
- # def __init_deployment(self):
- # """
- # Create the base helm chart
- # """
- # if os.path.exists(self.dest_deployment_path):
- # shutil.rmtree(self.dest_deployment_path)
- # # Initialize with default
- # copy_merge_base_deployment(self.dest_deployment_path, os.path.join(
- # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH))
-
- # # Override for every cloudharness scaffolding
- # for root_path in self.root_paths:
- # copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
- # base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH))
- # collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
- # dest_helm_chart_path=self.dest_deployment_path)
-
- # def __adjust_missing_values(self, helm_values):
- # if 'name' not in helm_values:
- # with open(self.helm_chart_path) as f:
- # chart_idx_content = yaml.safe_load(f)
- # helm_values['name'] = chart_idx_content['name'].lower()
def process_values(self) -> HarnessMainConfig:
"""
@@ -141,174 +80,6 @@ def process_values(self) -> HarnessMainConfig:
validate_helm_values(merged_values)
return HarnessMainConfig.from_dict(merged_values)
- # def __process_applications(self, helm_values, base_image_name):
- # for root_path in self.root_paths:
- # app_values = init_app_values(
- # root_path, exclude=self.exclude, values=helm_values[KEY_APPS])
- # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- # app_values)
-
- # app_base_path = os.path.join(root_path, APPS_PATH)
- # app_values = self.collect_app_values(
- # app_base_path, base_image_name=base_image_name)
- # helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
- # app_values)
-
- # def collect_app_values(self, app_base_path, base_image_name=None):
- # values = {}
-
- # for app_path in get_sub_paths(app_base_path):
- # app_name = app_name_from_path(
- # os.path.relpath(app_path, app_base_path))
-
- # if app_name in self.exclude:
- # continue
- # app_key = app_name.replace('-', '_')
-
- # app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
-
- # values[app_key] = dict_merge(
- # values[app_key], app_values) if app_key in values else app_values
-
- # return values
-
- # def __init_static_images(self, base_image_name):
- # for static_img_dockerfile in self.static_images:
- # img_name = image_name_from_dockerfile_path(os.path.basename(
- # static_img_dockerfile), base_name=base_image_name)
- # self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
- # img_name, build_context_path=static_img_dockerfile)
-
- # def __assign_static_build_dependencies(self, helm_values):
- # for static_img_dockerfile in self.static_images:
- # key = os.path.basename(static_img_dockerfile)
- # if key in helm_values[KEY_TASK_IMAGES]:
- # dependencies = guess_build_dependencies_from_dockerfile(
- # static_img_dockerfile)
- # for dep in dependencies:
- # if dep in self.base_images and dep not in helm_values[KEY_TASK_IMAGES]:
- # helm_values[KEY_TASK_IMAGES][dep] = self.base_images[dep]
-
- # for image_name in list(helm_values[KEY_TASK_IMAGES].keys()):
- # if image_name in self.exclude:
- # del helm_values[KEY_TASK_IMAGES][image_name]
-
- # def __init_base_images(self, base_image_name):
-
- # for root_path in self.root_paths:
- # for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path):
- # img_name = image_name_from_dockerfile_path(
- # os.path.basename(base_img_dockerfile), base_name=base_image_name)
- # self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- # img_name, build_context_path=root_path)
-
- # self.static_images.update(find_dockerfiles_paths(
- # os.path.join(root_path, STATIC_IMAGES_PATH)))
- # return self.base_images
-
- # def __init_test_images(self, base_image_name):
- # test_images = {}
- # for root_path in self.root_paths:
- # for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)):
- # img_name = image_name_from_dockerfile_path(
- # os.path.basename(base_img_dockerfile), base_name=base_image_name)
- # test_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- # img_name, build_context_path=base_img_dockerfile)
-
- # return test_images
-
-
- # def __find_static_dockerfile_paths(self, root_path):
- # return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH))
-
- # def __merge_base_helm_values(self, helm_values):
- # # Override for every cloudharness scaffolding
- # for root_path in self.root_paths:
- # helm_values = dict_merge(
- # helm_values,
- # collect_helm_values(root_path, env=self.env)
- # )
-
- # return helm_values
-
- # def __get_default_helm_values(self):
- # helm_values = get_template(os.path.join(
- # CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH, 'values.yaml'))
- # helm_values = dict_merge(helm_values,
- # collect_helm_values(CH_ROOT, env=self.env))
-
- # return helm_values
-
- # def create_tls_certificate(self, helm_values):
- # if not self.tls:
- # helm_values['tls'] = None
- # return
- # if not self.local:
- # return
- # helm_values['tls'] = self.domain.replace(".", "-") + "-tls"
-
- # bootstrap_file = 'bootstrap.sh'
- # certs_parent_folder_path = os.path.join(
- # self.output_path, 'helm', 'resources')
- # certs_folder_path = os.path.join(certs_parent_folder_path, 'certs')
-
- # if os.path.exists(os.path.join(certs_folder_path)):
- # # don't overwrite the certificate if it exists
- # return
-
- # try:
- # client = DockerClient()
- # client.ping()
- # except:
- # raise ConnectionRefusedError(
- # '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...')
-
- # # Create CA and sign cert for domain
- # container = client.containers.run(image='frapsoft/openssl',
- # command=f'sleep 60',
- # entrypoint="",
- # detach=True,
- # environment=[
- # f"DOMAIN={self.domain}"],
- # )
-
- # container.exec_run('mkdir -p /mnt/vol1')
- # container.exec_run('mkdir -p /mnt/certs')
-
- # # copy bootstrap file
- # cur_dir = os.getcwd()
- # os.chdir(os.path.join(HERE, 'scripts'))
- # tar = tarfile.open(bootstrap_file + '.tar', mode='w')
- # try:
- # tar.add(bootstrap_file)
- # finally:
- # tar.close()
- # data = open(bootstrap_file + '.tar', 'rb').read()
- # container.put_archive('/mnt/vol1', data)
- # os.chdir(cur_dir)
- # container.exec_run(f'tar x {bootstrap_file}.tar', workdir='/mnt/vol1')
-
- # # exec bootstrap file
- # container.exec_run(f'/bin/ash /mnt/vol1/{bootstrap_file}')
-
- # # retrieve the certs from the container
- # bits, stat = container.get_archive('/mnt/certs')
- # if not os.path.exists(certs_folder_path):
- # os.makedirs(certs_folder_path)
- # f = open(f'{certs_parent_folder_path}/certs.tar', 'wb')
- # for chunk in bits:
- # f.write(chunk)
- # f.close()
- # cf = tarfile.open(f'{certs_parent_folder_path}/certs.tar')
- # cf.extractall(path=certs_parent_folder_path)
-
- # logs = container.logs()
- # logging.info(f'openssl container logs: {logs}')
-
- # # stop the container
- # container.kill()
-
- # logging.info("Created certificates for local deployment")
def __finish_helm_values(self, values):
"""
@@ -383,37 +154,6 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- # def __clear_unused_db_configuration(self, harness_config):
- # database_config = harness_config[KEY_DATABASE]
- # database_type = database_config.get('type', None)
- # if database_type is None:
- # del harness_config[KEY_DATABASE]
- # return
- # db_specific_keys = [k for k, v in database_config.items()
- # if isinstance(v, dict) and 'image' in v and 'ports' in v]
- # for db in db_specific_keys:
- # if database_type != db:
- # del database_config[db]
-
- # def image_tag(self, image_name, build_context_path=None, dependencies=()):
- # tag = self.tag
- # if tag is None and not self.local:
- # logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
- # ignore_path = os.path.join(build_context_path, '.dockerignore')
- # ignore = set(DEFAULT_IGNORE)
- # if os.path.exists(ignore_path):
- # with open(ignore_path) as f:
- # ignore = ignore.union({line.strip() for line in f})
- # logging.info(f"Ignoring {ignore}")
- # tag = generate_tag_from_content(build_context_path, ignore)
- # logging.info(f"Content hash: {tag}")
- # dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
- # tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
- # logging.info(f"Generated tag: {tag}")
- # app_name = image_name.split("/")[-1] # the image name can have a prefix
- # self.all_images[app_name] = tag
- # return self.registry + image_name + (f':{tag}' if tag else '')
-
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
@@ -478,284 +218,3 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys())
return values
-
-
-# def get_included_with_dependencies(values, include):
-# app_values = values['apps'].values()
-# directly_included = [app for app in app_values if any(
-# inc == app[KEY_HARNESS]['name'] for inc in include)]
-
-# dependent = set(include)
-# for app in directly_included:
-# if app['harness']['dependencies'].get('hard', None):
-# dependent.update(set(app[KEY_HARNESS]['dependencies']['hard']))
-# if app['harness']['dependencies'].get('soft', None):
-# dependent.update(set(app[KEY_HARNESS]['dependencies']['soft']))
-# if values['secured_gatekeepers'] and app[KEY_HARNESS]['secured']:
-# dependent.add('accounts')
-# if len(dependent) == len(include):
-# return dependent
-# return get_included_with_dependencies(values, dependent)
-
-
-# def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH):
-# pass
-
-
-# def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=(), include=None):
-# """
-# Searches recursively for helm templates inside the applications and collects the templates in the destination
-
-# :param search_root:
-# :param dest_helm_chart_path: collected helm templates destination folder
-# :param exclude:
-# :return:
-# """
-# app_base_path = os.path.join(search_root, APPS_PATH)
-
-# for app_path in get_sub_paths(app_base_path):
-# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
-# if app_name in exclude or (include and not any(inc in app_name for inc in include)):
-# continue
-# template_dir = os.path.join(app_path, 'deploy', 'templates')
-# if os.path.exists(template_dir):
-# dest_dir = os.path.join(
-# dest_helm_chart_path, 'templates', app_name)
-
-# logging.info(
-# "Collecting templates for application %s to %s", app_name, dest_dir)
-# if os.path.exists(dest_dir):
-# logging.warning(
-# "Merging/overriding all files in directory %s", dest_dir)
-# merge_configuration_directories(template_dir, dest_dir)
-# else:
-# shutil.copytree(template_dir, dest_dir)
-# resources_dir = os.path.join(app_path, 'deploy/resources')
-# if os.path.exists(resources_dir):
-# dest_dir = os.path.join(
-# dest_helm_chart_path, 'resources', app_name)
-
-# logging.info(
-# "Collecting resources for application %s to %s", app_name, dest_dir)
-
-# merge_configuration_directories(resources_dir, dest_dir)
-
-# subchart_dir = os.path.join(app_path, 'deploy/charts')
-# if os.path.exists(subchart_dir):
-# dest_dir = os.path.join(dest_helm_chart_path, 'charts', app_name)
-
-# logging.info(
-# "Collecting templates for application %s to %s", app_name, dest_dir)
-# if os.path.exists(dest_dir):
-# logging.warning(
-# "Merging/overriding all files in directory %s", dest_dir)
-# merge_configuration_directories(subchart_dir, dest_dir)
-# else:
-# shutil.copytree(subchart_dir, dest_dir)
-
-
-# def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
-# if not os.path.exists(base_helm_chart):
-# return
-# if os.path.exists(dest_helm_chart_path):
-# logging.info("Merging/overriding all files in directory %s",
-# dest_helm_chart_path)
-# merge_configuration_directories(base_helm_chart, dest_helm_chart_path)
-# else:
-# logging.info("Copying base deployment chart from %s to %s",
-# base_helm_chart, dest_helm_chart_path)
-# shutil.copytree(base_helm_chart, dest_helm_chart_path)
-
-
-# def collect_helm_values(deployment_root, env=()):
-# """
-# Creates helm values from a cloudharness deployment scaffolding
-# """
-
-# values_template_path = os.path.join(
-# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml')
-
-# values = get_template(values_template_path)
-
-# for e in env:
-# specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
-# f'values-template-{e}.yaml')
-# if os.path.exists(specific_template_path):
-# logging.info(
-# "Specific environment values template found: " + specific_template_path)
-# with open(specific_template_path) as f:
-# values_env_specific = yaml.safe_load(f)
-# values = dict_merge(values, values_env_specific)
-# return values
-
-
-# def init_app_values(deployment_root, exclude, values=None):
-# values = values if values is not None else {}
-# app_base_path = os.path.join(deployment_root, APPS_PATH)
-# overridden_template_path = os.path.join(
-# deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
-# default_values_path = os.path.join(
-# CH_ROOT, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml')
-
-# for app_path in get_sub_paths(app_base_path):
-
-# app_name = app_name_from_path(os.path.relpath(app_path, app_base_path))
-
-# if app_name in exclude:
-# continue
-# app_key = app_name.replace('-', '_')
-# if app_key not in values:
-# default_values = get_template(default_values_path)
-# values[app_key] = default_values
-# overridden_defaults = get_template(overridden_template_path)
-# values[app_key] = dict_merge(values[app_key], overridden_defaults)
-
-# return values
-
-
-# def values_from_legacy(values):
-# if KEY_HARNESS not in values:
-# values[KEY_HARNESS] = {}
-# harness = values[KEY_HARNESS]
-# if KEY_SERVICE not in harness:
-# harness[KEY_SERVICE] = {}
-# if KEY_DEPLOYMENT not in harness:
-# harness[KEY_DEPLOYMENT] = {}
-# if KEY_DATABASE not in harness:
-# harness[KEY_DATABASE] = {}
-
-# if 'subdomain' in values:
-# harness['subdomain'] = values['subdomain']
-# if 'autodeploy' in values:
-# harness[KEY_DEPLOYMENT]['auto'] = values['autodeploy']
-# if 'autoservice' in values:
-# harness[KEY_SERVICE]['auto'] = values['autoservice']
-# if 'secureme' in values:
-# harness['secured'] = values['secureme']
-# if 'resources' in values:
-# harness[KEY_DEPLOYMENT]['resources'].update(values['resources'])
-# if 'replicas' in values:
-# harness[KEY_DEPLOYMENT]['replicas'] = values['replicas']
-# if 'image' in values:
-# harness[KEY_DEPLOYMENT]['image'] = values['image']
-# if 'port' in values:
-# harness[KEY_DEPLOYMENT]['port'] = values['port']
-# harness[KEY_SERVICE]['port'] = values['port']
-
-
-# def values_set_legacy(values):
-# harness = values[KEY_HARNESS]
-# if 'image' in harness[KEY_DEPLOYMENT]:
-# values['image'] = harness[KEY_DEPLOYMENT]['image']
-
-# values['name'] = harness['name']
-# if harness[KEY_DEPLOYMENT].get('port', None):
-# values['port'] = harness[KEY_DEPLOYMENT]['port']
-# if 'resources' in harness[KEY_DEPLOYMENT]:
-# values['resources'] = harness[KEY_DEPLOYMENT]['resources']
-
-
-# def generate_tag_from_content(content_path, ignore=()):
-# from dirhash import dirhash
-# return dirhash(content_path, 'sha1', ignore=ignore)
-
-
-# def extract_env_variables_from_values(values, envs=tuple(), prefix=''):
-# if isinstance(values, dict):
-# newenvs = list(envs)
-# for key, value in values.items():
-# v = extract_env_variables_from_values(
-# value, envs, f"{prefix}_{key}".replace('-', '_').upper())
-# if key in ('name', 'port', 'subdomain'):
-# newenvs.extend(v)
-# return newenvs
-# else:
-# return [env_variable(prefix, values)]
-
-
-# def create_env_variables(values):
-# for app_name, value in values[KEY_APPS].items():
-# if KEY_HARNESS in value:
-# values['env'].extend(extract_env_variables_from_values(
-# value[KEY_HARNESS], prefix='CH_' + app_name))
-# values['env'].append(env_variable('CH_DOMAIN', values['domain']))
-# values['env'].append(env_variable(
-# 'CH_IMAGE_REGISTRY', values['registry']['name']))
-# values['env'].append(env_variable('CH_IMAGE_TAG', values['tag']))
-
-
-# def hosts_info(values):
-# domain = values['domain']
-# namespace = values['namespace']
-# subdomains = [app[KEY_HARNESS]['subdomain'] for app in values[KEY_APPS].values() if
-# KEY_HARNESS in app and app[KEY_HARNESS]['subdomain']] + [alias for app in values[KEY_APPS].values() if
-# KEY_HARNESS in app and app[KEY_HARNESS]['aliases'] for alias in app[KEY_HARNESS]['aliases']]
-# try:
-# ip = get_cluster_ip()
-# except:
-# logging.warning('Cannot get cluster ip')
-# return
-# logging.info(
-# "\nTo test locally, update your hosts file" + f"\n{ip}\t{domain + ' ' + ' '.join(sd + '.' + domain for sd in subdomains)}")
-
-# deployments = (app[KEY_HARNESS][KEY_DEPLOYMENT]['name']
-# for app in values[KEY_APPS].values() if KEY_HARNESS in app)
-
-# logging.info(
-# "\nTo run locally some apps, also those references may be needed")
-# for appname in values[KEY_APPS]:
-# app = values[KEY_APPS][appname]['harness']
-# if 'deployment' not in app:
-# continue
-# print(
-# "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format(
-# app=app['deployment']['name'], port=app['deployment']['port'], namespace=namespace))
-
-# print(
-# f"127.0.0.1\t{' '.join('%s.%s' % (s, values['namespace']) for s in deployments)}")
-
-
-# class ValuesValidationException(Exception):
-# pass
-
-
-# def validate_helm_values(values):
-# validate_dependencies(values)
-
-
-# def validate_dependencies(values):
-# all_apps = {a for a in values["apps"]}
-# for app in all_apps:
-# app_values = values["apps"][app]
-# if 'dependencies' in app_values[KEY_HARNESS]:
-# soft_dependencies = {
-# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['soft']}
-# not_found = {d for d in soft_dependencies if d not in all_apps}
-# if not_found:
-# logging.warning(
-# f"Soft dependencies specified for application {app} not found: {','.join(not_found)}")
-# hard_dependencies = {
-# d.replace("-", "_") for d in app_values[KEY_HARNESS]['dependencies']['hard']}
-# not_found = {d for d in hard_dependencies if d not in all_apps}
-# if not_found:
-# raise ValuesValidationException(
-# f"Bad application dependencies specified for application {app}: {','.join(not_found)}")
-
-# build_dependencies = {
-# d for d in app_values[KEY_HARNESS]['dependencies']['build']}
-
-# not_found = {
-# d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]}
-# not_found = {d for d in not_found if d not in all_apps}
-# if not_found:
-# raise ValuesValidationException(
-# f"Bad build dependencies specified for application {app}: {','.join(not_found)} not found as built image")
-
-# if 'use_services' in app_values[KEY_HARNESS]:
-# service_dependencies = {d['name'].replace(
-# "-", "_") for d in app_values[KEY_HARNESS]['use_services']}
-
-# not_found = {d for d in service_dependencies if d not in all_apps}
-# if not_found:
-# raise ValuesValidationException(
-# f"Bad service application dependencies specified for application {app}: {','.join(not_found)}")
From a4cd813718e35efebab29586431661fcfccc83be Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 08:21:21 -0600
Subject: [PATCH 57/94] CH-100 Refactor helm test
---
.../cloudharness_utils/testing/util.py | 6 +-
.../ch_cli_tools/configurationgenerator.py | 4 +-
.../deployment-cli-tools/ch_cli_tools/helm.py | 2 +-
tools/deployment-cli-tools/tests/test_helm.py | 189 ++++++++----------
4 files changed, 89 insertions(+), 112 deletions(-)
diff --git a/libraries/cloudharness-utils/cloudharness_utils/testing/util.py b/libraries/cloudharness-utils/cloudharness_utils/testing/util.py
index b0e98624e..6f11adcc5 100644
--- a/libraries/cloudharness-utils/cloudharness_utils/testing/util.py
+++ b/libraries/cloudharness-utils/cloudharness_utils/testing/util.py
@@ -1,7 +1,7 @@
-from cgi import test
+# from cgi import test
import os
-from os.path import dirname as dn
+# from os.path import dirname as dn
from cloudharness_model.models import ApplicationUser, ApplicationTestConfig, ApplicationHarnessConfig, E2ETestsConfig
@@ -11,7 +11,7 @@ def get_user_password(main_user: ApplicationUser):
def get_app_environment(app_config: ApplicationHarnessConfig, app_domain, use_local_env=True):
my_env = os.environ.copy() if use_local_env else {}
my_env["APP_URL"] = app_domain
-
+
if app_config.accounts and app_config.accounts.users:
main_user: ApplicationUser = app_config.accounts.users[0]
diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
index 9a445456f..ba974dfc0 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
@@ -38,7 +38,7 @@
class ConfigurationGenerator(object):
- def __init__(self, root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ def __init__(self, root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None, templates_path=HELM_PATH):
assert domain, 'A domain must be specified'
@@ -56,7 +56,7 @@ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=Non
self.include = include
self.registry_secret = registry_secret
self.tls = tls
- self.env = env
+ self.env = env or {}
self.namespace = namespace
self.templates_path = templates_path
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index 1f7408f1f..daae2d160 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -26,7 +26,7 @@ def deploy(namespace, output_path='./deployment'):
f"helm upgrade {namespace} {helm_path} -n {namespace} --install --reset-values".split())
-def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+def create_helm_chart(root_paths, tag: str | None | int ='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
diff --git a/tools/deployment-cli-tools/tests/test_helm.py b/tools/deployment-cli-tools/tests/test_helm.py
index 5fa269d64..9a7734422 100644
--- a/tools/deployment-cli-tools/tests/test_helm.py
+++ b/tools/deployment-cli-tools/tests/test_helm.py
@@ -1,16 +1,19 @@
-import shutil
-
from ch_cli_tools.helm import *
from ch_cli_tools.configurationgenerator import *
+import pytest
HERE = os.path.dirname(os.path.realpath(__file__))
RESOURCES = os.path.join(HERE, 'resources')
-OUT = '/tmp/deployment'
CLOUDHARNESS_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(HERE)))
-def test_collect_helm_values():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'],
+def exists(path):
+ return path.exists()
+
+
+def test_collect_helm_values(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
exclude=['events'], domain="my.local",
namespace='test', env='dev', local=False, tag=1, registry='reg')
@@ -51,24 +54,21 @@ def test_collect_helm_values():
# Environment specific overriding
assert values[KEY_APPS]['accounts']['a'] == 'dev'
assert values['a'] == 'dev'
- assert values['database']['auto'] == False
+ assert values['database']['auto'] is False
# legacy reading
- assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] == True
- assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] == False
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True
+ assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False
- helm_path = os.path.join(OUT, HELM_CHART_PATH)
-
- def exists(*args):
- return os.path.exists(os.path.join(*args))
+ helm_path = out_folder / HELM_CHART_PATH
# Check files
assert exists(helm_path)
- assert exists(helm_path, 'values.yaml')
- assert exists(helm_path, 'resources/accounts/realm.json')
- assert exists(helm_path, 'resources/accounts/aresource.txt')
- assert exists(helm_path, 'resources/myapp/aresource.txt')
- assert exists(helm_path, 'templates/myapp/mytemplate.yaml')
+ assert exists(helm_path / 'values.yaml')
+ assert exists(helm_path / 'resources' / 'accounts' / 'realm.json')
+ assert exists(helm_path / 'resources' / 'accounts' / 'aresource.txt')
+ assert exists(helm_path / 'resources' / 'myapp' / 'aresource.txt')
+ assert exists(helm_path / 'templates' / 'myapp' / 'mytemplate.yaml')
# Checl base and task images
assert values[KEY_TASK_IMAGES]
@@ -78,8 +78,6 @@ def exists(*args):
# Not indicated as a build dependency
assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES]
- shutil.rmtree(OUT)
-
def test_collect_helm_values_noreg_noinclude(tmp_path):
out_path = tmp_path / 'test_collect_helm_values_noreg_noinclude'
@@ -115,24 +113,21 @@ def test_collect_helm_values_noreg_noinclude(tmp_path):
# Environment specific overriding
assert values[KEY_APPS]['accounts']['a'] == 'dev'
assert values['a'] == 'dev'
- assert values['database']['auto'] == False
+ assert values['database']['auto'] is False
# legacy reading
- assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] == True
- assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] == False
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True
+ assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False
helm_path = out_path / HELM_CHART_PATH
- def exists(*args):
- return os.path.exists(os.path.join(*args))
-
# Check files
assert exists(helm_path)
- assert exists(helm_path, 'values.yaml')
- assert exists(helm_path, 'resources/accounts/realm.json')
- assert exists(helm_path, 'resources/accounts/aresource.txt')
- assert exists(helm_path, 'resources/myapp/aresource.txt')
- assert exists(helm_path, 'templates/myapp/mytemplate.yaml')
+ assert exists(helm_path / 'values.yaml')
+ assert exists(helm_path / 'resources' / 'accounts' / 'realm.json')
+ assert exists(helm_path / 'resources' / 'accounts' / 'aresource.txt')
+ assert exists(helm_path / 'resources' / 'myapp' / 'aresource.txt')
+ assert exists(helm_path / 'templates' / 'myapp' / 'mytemplate.yaml')
assert values[KEY_TASK_IMAGES]
assert 'cloudharness-base' in values[KEY_TASK_IMAGES]
@@ -140,68 +135,50 @@ def exists(*args):
assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1'
-def test_collect_helm_values_precedence():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+def test_collect_helm_values_precedence(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_precedence'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["events"])
# Values.yaml from current app must override values-prod.yaml from cloudharness
assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden'
assert values[KEY_APPS]['events']['kafka']['resources']['limits']['cpu'] == 'overridden-prod'
-def test_collect_helm_values_multiple_envs():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+def test_collect_helm_values_multiple_envs(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_multiple_envs'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"])
- assert values[KEY_APPS]['myapp']['test'] == True, 'values-test not loaded'
- assert values[KEY_APPS]['myapp']['dev'] == True, 'values-dev not loaded'
+ assert values[KEY_APPS]['myapp']['test'] is True, 'values-test not loaded'
+ assert values[KEY_APPS]['myapp']['dev'] is True, 'values-dev not loaded'
assert values[KEY_APPS]['myapp']['a'] == 'test', 'values-test not overriding'
-def test_collect_helm_values_wrong_dependencies_validate():
- try:
- values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local",
+def test_collect_helm_values_wrong_dependencies_validate(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_wrong_dependencies_validate'
+ with pytest.raises(ValuesValidationException):
+ create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"])
-
- except ValuesValidationException as e:
- logging.info("Exception correctly raised %s", e.args)
- assert True
- else:
- assert False, "Should error because of wrong hard dependency"
-
try:
- values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local",
+ create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"])
except ValuesValidationException as e:
- assert False, "Should not error because of wrong soft dependency"
- else:
- assert True, "No error for wrong soft dependencies"
+ pytest.fail("Should not error because of wrong soft dependency")
- try:
- values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local",
+ with pytest.raises(ValuesValidationException):
+ create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["wrong-build"])
-
- except ValuesValidationException as e:
- logging.info("Exception correctly raised %s", e.args)
- assert True
- else:
- assert False, "Should error because of wrong build dependency"
-
- try:
- values = create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=OUT, domain="my.local",
+ with pytest.raises(ValuesValidationException):
+ create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["wrong-services"])
- except ValuesValidationException as e:
- logging.info("Exception correctly raised %s", e.args)
- assert True
- else:
- assert False, "Should error because of wrong service dependency"
-
-def test_collect_helm_values_build_dependencies():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+def test_collect_helm_values_build_dependencies(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_build_dependencies'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["myapp"])
assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies"
@@ -209,8 +186,9 @@ def test_collect_helm_values_build_dependencies():
assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency"
-def test_collect_helm_values_build_dependencies_nodeps():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+def test_collect_helm_values_build_dependencies_nodeps(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_build_dependencies_nodeps'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["events"])
@@ -219,8 +197,10 @@ def test_collect_helm_values_build_dependencies_nodeps():
assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency"
-def test_collect_helm_values_build_dependencies_exclude():
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+
+def test_collect_helm_values_build_dependencies_exclude(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_build_dependencies_exclude'
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"])
@@ -229,9 +209,10 @@ def test_collect_helm_values_build_dependencies_exclude():
assert 'workflows-extract-download' not in values[KEY_TASK_IMAGES], "workflows-extract-download has been explicitly excluded"
-def test_clear_unused_dbconfig():
+def test_clear_unused_dbconfig(tmp_path):
+ out_folder = tmp_path / 'test_clear_unused_dbconfig'
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
env='withpostgres', local=False, include=["myapp"], exclude=["legacy"])
# There is a DB config
@@ -248,7 +229,7 @@ def test_clear_unused_dbconfig():
assert db_config['mongo'] is None
assert db_config['neo4j'] is None
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
env='withmongo', local=False, include=["myapp"], exclude=["legacy"])
assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
@@ -262,9 +243,10 @@ def test_clear_unused_dbconfig():
assert db_config['postgres'] is None
-def test_clear_all_dbconfig_if_nodb():
+def test_clear_all_dbconfig_if_nodb(tmp_path):
+ out_folder = tmp_path / 'test_clear_all_dbconfig_if_nodb'
- values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, domain="my.local",
+ values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
env='withoutdb', local=False, include=["myapp"], exclude=["legacy"])
# There is a DB config
@@ -274,6 +256,7 @@ def test_clear_all_dbconfig_if_nodb():
db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE]
assert db_config is None
+
def test_tag_hash_generation():
v1 = generate_tag_from_content(RESOURCES)
v2 = generate_tag_from_content(RESOURCES, ignore=['myapp'])
@@ -285,21 +268,22 @@ def test_tag_hash_generation():
v5 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*'])
assert v5 == v4
+ fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt'
try:
- fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt')
- with open(fname, 'w') as f:
- f.write('a')
+ fname.write_text('a')
v6 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*'])
assert v6 == v5
v7 = generate_tag_from_content(RESOURCES)
assert v7 != v1
finally:
- os.remove(fname)
+ fname.unlink()
+
-def test_collect_helm_values_auto_tag():
+def test_collect_helm_values_auto_tag(tmp_path):
+ out_folder = tmp_path / 'test_collect_helm_values_auto_tag'
def create():
- return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'],
+ return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
exclude=['events'], domain="my.local",
namespace='test', env='dev', local=False, tag=None, registry='reg')
@@ -321,62 +305,55 @@ def create():
assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore"
+ fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt'
try:
- fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt')
- with open(fname, 'w') as f:
- f.write('a')
+ fname.write_text('a')
values = create()
assert v1 != values.apps['myapp'].harness.deployment.image, "Adding the file changed the hash value"
v2 = values.apps['myapp'].harness.deployment.image
assert values["task-images"][BASE_KEY] == b1, "Application files should be ignored for base image following the root .dockerignore"
finally:
- os.remove(fname)
-
+ fname.unlink()
try:
- with open(fname, 'w') as f:
- f.write('a')
+ fname.write_text('a')
values = create()
assert v2 == values.apps['myapp'].harness.deployment.image, "Recreated an identical file, the hash value should be the same"
finally:
- os.remove(fname)
+ fname.unlink()
- fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.ignored')
+ fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.ignored'
try:
- with open(fname, 'w') as f:
- f.write('a')
+ fname.write_text('a')
+
values = create()
assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore"
assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore"
finally:
- os.remove(fname)
-
-
+ fname.unlink()
# Dependencies test: if a dependency is changed, the hash should change
- fname = os.path.join(RESOURCES, 'infrastructure/common-images', 'my-common', 'afile')
+ fname = Path(RESOURCES) / 'infrastructure' / 'common-images' / 'my-common' / 'afile'
try:
- with open(fname, 'w') as f:
- f.write('a')
+ fname.write_text('a')
values = create()
assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change"
assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change"
finally:
- os.remove(fname)
+ fname.unlink()
- fname = os.path.join(CLOUDHARNESS_ROOT, 'atestfile')
+ fname = Path(CLOUDHARNESS_ROOT) / 'atestfile'
try:
- with open(fname, 'w') as f:
- f.write('a')
+ fname.write_text('a')
values = create()
@@ -384,4 +361,4 @@ def create():
assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change"
assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change"
finally:
- os.remove(fname)
+ fname.unlink()
From 1d63bc4f139fe3edef85d2bce3713ca10e1f97a3 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 08:37:34 -0600
Subject: [PATCH 58/94] CH-100 Add tests for docker compose target
---
.../cloudharness_utils/constants.py | 1 +
.../ch_cli_tools/dockercompose.py | 2 +-
.../deploy/templates-compose/mytemplate.yaml | 0
.../tests/test_dockercompose.py | 365 ++++++++++++++++++
4 files changed, 367 insertions(+), 1 deletion(-)
create mode 100644 tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml
create mode 100644 tools/deployment-cli-tools/tests/test_dockercompose.py
diff --git a/libraries/cloudharness-utils/cloudharness_utils/constants.py b/libraries/cloudharness-utils/cloudharness_utils/constants.py
index d989cff90..1cd12a7df 100644
--- a/libraries/cloudharness-utils/cloudharness_utils/constants.py
+++ b/libraries/cloudharness-utils/cloudharness_utils/constants.py
@@ -12,6 +12,7 @@
HELM_ENGINE = HELM_PATH
COMPOSE = 'compose'
+COMPOSE_PATH = COMPOSE
COMPOSE_ENGINE = 'docker-compose'
INFRASTRUCTURE_PATH = 'infrastructure'
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index f65e352bf..1a96e5627 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -18,7 +18,7 @@
from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates
-def create_docker_compose_configuration(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+def create_docker_compose_configuration(root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
diff --git a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml
new file mode 100644
index 000000000..e69de29bb
diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py
new file mode 100644
index 000000000..c59d552f9
--- /dev/null
+++ b/tools/deployment-cli-tools/tests/test_dockercompose.py
@@ -0,0 +1,365 @@
+from ch_cli_tools.dockercompose import *
+from ch_cli_tools.configurationgenerator import *
+import pytest
+
+HERE = os.path.dirname(os.path.realpath(__file__))
+RESOURCES = os.path.join(HERE, 'resources')
+CLOUDHARNESS_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(HERE)))
+COMPOSE_PATH = COMPOSE
+
+
+def exists(path):
+ return path.exists()
+
+
+def test_collect_compose_values(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
+ exclude=['events'], domain="my.local",
+ namespace='test', env='dev', local=False, tag=1, registry='reg')
+
+ # Auto values
+ assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == 'reg/cloudharness/myapp:1'
+ assert values.apps['myapp'].harness.deployment.image == 'reg/cloudharness/myapp:1'
+ assert values[KEY_APPS]['myapp'][KEY_HARNESS]['name'] == 'myapp'
+ assert values[KEY_APPS]['legacy'][KEY_HARNESS]['name'] == 'legacy'
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['image'] == 'reg/cloudharness/accounts:1'
+
+ # First level include apps
+ assert 'samples' in values[KEY_APPS]
+ assert 'myapp' in values[KEY_APPS]
+
+ # Not included
+ assert 'jupyterhub' not in values[KEY_APPS]
+
+ # Dependency include first level
+ assert 'accounts' in values[KEY_APPS]
+ assert 'legacy' in values[KEY_APPS]
+
+ # Dependency include second level
+ assert 'argo' in values[KEY_APPS]
+
+ # Explicit exclude overrides include
+ assert 'events' not in values[KEY_APPS]
+
+ # Base values kept
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['subdomain'] == 'accounts'
+
+ # Defaults
+ assert 'service' in values[KEY_APPS]['legacy'][KEY_HARNESS]
+ assert 'common' in values[KEY_APPS]['legacy']
+ assert 'common' in values[KEY_APPS]['accounts']
+ # Values overriding
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['port'] == 'overridden'
+
+ # Environment specific overriding
+ assert values[KEY_APPS]['accounts']['a'] == 'dev'
+ assert values['a'] == 'dev'
+ assert values['database']['auto'] is False
+
+ # legacy reading
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True
+ assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False
+
+ compose_path = out_folder / COMPOSE_PATH
+
+ # Check files
+ assert exists(compose_path)
+ assert exists(compose_path / 'values.yaml')
+ assert exists(compose_path / 'resources' / 'accounts' / 'realm.json')
+ assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt')
+ assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt')
+ assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml')
+
+ # Checl base and task images
+ assert values[KEY_TASK_IMAGES]
+ assert 'cloudharness-base' in values[KEY_TASK_IMAGES]
+ assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'reg/cloudharness/cloudharness-base:1'
+ assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'reg/cloudharness/myapp-mytask:1'
+ # Not indicated as a build dependency
+ assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES]
+
+
+def test_collect_compose_values_noreg_noinclude(tmp_path):
+ out_path = tmp_path / 'test_collect_compose_values_noreg_noinclude'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_path, domain="my.local",
+ namespace='test', env='dev', local=False, tag=1)
+
+ # Auto values
+ assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == 'cloudharness/myapp:1'
+ assert values[KEY_APPS]['myapp'][KEY_HARNESS]['name'] == 'myapp'
+ assert values[KEY_APPS]['legacy'][KEY_HARNESS]['name'] == 'legacy'
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['image'] == 'cloudharness/accounts:1'
+
+ # First level include apps
+ assert 'samples' in values[KEY_APPS]
+ assert 'myapp' in values[KEY_APPS]
+ assert 'jupyterhub' in values[KEY_APPS]
+ assert 'accounts' in values[KEY_APPS]
+ assert 'legacy' in values[KEY_APPS]
+ assert 'argo' in values[KEY_APPS]
+ assert 'events' in values[KEY_APPS]
+
+ # Base values kept
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['subdomain'] == 'accounts'
+
+ # Defaults
+ assert 'service' in values[KEY_APPS]['legacy'][KEY_HARNESS]
+ assert 'common' in values[KEY_APPS]['legacy']
+ assert 'common' in values[KEY_APPS]['accounts']
+ # Values overriding
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['port'] == 'overridden'
+ assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden'
+
+ # Environment specific overriding
+ assert values[KEY_APPS]['accounts']['a'] == 'dev'
+ assert values['a'] == 'dev'
+ assert values['database']['auto'] is False
+
+ # legacy reading
+ assert values[KEY_APPS]['accounts'][KEY_HARNESS]['deployment']['auto'] is True
+ assert values[KEY_APPS]['legacy'][KEY_HARNESS]['deployment']['auto'] is False
+
+ compose_path = out_path / COMPOSE_PATH
+
+ # Check files
+ assert exists(compose_path)
+ assert exists(compose_path / 'values.yaml')
+ assert exists(compose_path / 'resources' / 'accounts' / 'realm.json')
+ assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt')
+ assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt')
+ assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml')
+
+ assert values[KEY_TASK_IMAGES]
+ assert 'cloudharness-base' in values[KEY_TASK_IMAGES]
+ assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'cloudharness/cloudharness-base:1'
+ assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1'
+
+
+def test_collect_compose_values_precedence(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_precedence'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["events"])
+
+ # Values.yaml from current app must override values-prod.yaml from cloudharness
+ assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden'
+ assert values[KEY_APPS]['events']['kafka']['resources']['limits']['cpu'] == 'overridden-prod'
+
+def test_collect_compose_values_multiple_envs(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_multiple_envs'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"])
+
+
+ assert values[KEY_APPS]['myapp']['test'] is True, 'values-test not loaded'
+ assert values[KEY_APPS]['myapp']['dev'] is True, 'values-dev not loaded'
+ assert values[KEY_APPS]['myapp']['a'] == 'test', 'values-test not overriding'
+
+
+
+def test_collect_compose_values_wrong_dependencies_validate(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_wrong_dependencies_validate'
+ with pytest.raises(ValuesValidationException):
+ create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"])
+ try:
+ create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"])
+
+ except ValuesValidationException as e:
+ pytest.fail("Should not error because of wrong soft dependency")
+
+ with pytest.raises(ValuesValidationException):
+ create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-build"])
+ with pytest.raises(ValuesValidationException):
+ create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-services"])
+
+
+def test_collect_compose_values_build_dependencies(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_build_dependencies'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["myapp"])
+
+ assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies"
+ assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed"
+ assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
+ assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency"
+
+def test_collect_compose_values_build_dependencies_nodeps(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_build_dependencies_nodeps'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["events"])
+
+
+ assert 'cloudharness-flask' not in values[KEY_TASK_IMAGES], "Cloudharness-flask is not included in the build dependencies"
+ assert 'cloudharness-base' not in values[KEY_TASK_IMAGES], "Cloudharness-base is not included in the build dependencies"
+ assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
+ assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency"
+
+
+def test_collect_compose_values_build_dependencies_exclude(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_build_dependencies_exclude'
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"])
+
+
+ assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies"
+ assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed"
+ assert 'workflows-extract-download' not in values[KEY_TASK_IMAGES], "workflows-extract-download has been explicitly excluded"
+
+
+def test_clear_unused_dbconfig(tmp_path):
+ out_folder = tmp_path / 'test_clear_unused_dbconfig'
+
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ env='withpostgres', local=False, include=["myapp"], exclude=["legacy"])
+
+ # There is a DB config
+ assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
+
+ db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE]
+ # postgres is set, but other entries are not.
+ assert db_config['postgres'] is not None
+ assert db_config['postgres']['image'].startswith('postgres:')
+
+ # However, it seems that even after removing unused entries,
+ # the finale instance of the HarnessMainConfig class that is created
+ # adds back those entries and set them to None.
+ assert db_config['mongo'] is None
+ assert db_config['neo4j'] is None
+
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ env='withmongo', local=False, include=["myapp"], exclude=["legacy"])
+
+ assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
+ db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE]
+
+ # mongo is set, but other entries are not.
+ assert db_config['mongo'] is not None
+ assert db_config['mongo']['image'].startswith('mongo:')
+ assert db_config['neo4j'] is None
+
+ assert db_config['postgres'] is None
+
+
+def test_clear_all_dbconfig_if_nodb(tmp_path):
+ out_folder = tmp_path / 'test_clear_all_dbconfig_if_nodb'
+
+ values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
+ env='withoutdb', local=False, include=["myapp"], exclude=["legacy"])
+
+ # There is a DB config
+ assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
+
+ # But it is None
+ db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE]
+ assert db_config is None
+
+
+def test_tag_hash_generation():
+ v1 = generate_tag_from_content(RESOURCES)
+ v2 = generate_tag_from_content(RESOURCES, ignore=['myapp'])
+ assert v1 != v2
+ v3 = generate_tag_from_content(RESOURCES, ignore=['*/myapp/*'])
+ assert v3 != v1
+ v4 = generate_tag_from_content(RESOURCES, ignore=['applications/myapp/*'])
+ assert v4 == v3
+ v5 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*'])
+ assert v5 == v4
+
+ fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt'
+ try:
+ fname.write_text('a')
+
+ v6 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*'])
+ assert v6 == v5
+ v7 = generate_tag_from_content(RESOURCES)
+ assert v7 != v1
+ finally:
+ fname.unlink()
+
+
+def test_collect_compose_values_auto_tag(tmp_path):
+ out_folder = tmp_path / 'test_collect_compose_values_auto_tag'
+ def create():
+ return create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
+ exclude=['events'], domain="my.local",
+ namespace='test', env='dev', local=False, tag=None, registry='reg')
+
+ BASE_KEY = "cloudharness-base"
+ values = create()
+
+ # Auto values are set by using the directory hash
+ assert 'reg/cloudharness/myapp:' in values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image']
+ assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image
+ assert 'cloudharness/myapp-mytask' in values[KEY_TASK_IMAGES]['myapp-mytask']
+ assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == values.apps['myapp'].harness.deployment.image
+ v1 = values.apps['myapp'].harness.deployment.image
+ c1 = values["task-images"]["my-common"]
+ b1 = values["task-images"][BASE_KEY]
+ d1 = values["task-images"]["cloudharness-flask"]
+
+ values = create()
+ assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value"
+ assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore"
+
+
+ fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt'
+ try:
+ fname.write_text('a')
+
+ values = create()
+ assert v1 != values.apps['myapp'].harness.deployment.image, "Adding the file changed the hash value"
+ v2 = values.apps['myapp'].harness.deployment.image
+ assert values["task-images"][BASE_KEY] == b1, "Application files should be ignored for base image following the root .dockerignore"
+ finally:
+ fname.unlink()
+
+ try:
+ fname.write_text('a')
+
+ values = create()
+ assert v2 == values.apps['myapp'].harness.deployment.image, "Recreated an identical file, the hash value should be the same"
+ finally:
+ fname.unlink()
+
+
+ fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.ignored'
+ try:
+ fname.write_text('a')
+
+
+ values = create()
+ assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore"
+
+ assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore"
+ finally:
+ fname.unlink()
+
+ # Dependencies test: if a dependency is changed, the hash should change
+ fname = Path(RESOURCES) / 'infrastructure' / 'common-images' / 'my-common' / 'afile'
+
+ try:
+ fname.write_text('a')
+
+ values = create()
+
+ assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change"
+ assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change"
+ finally:
+ fname.unlink()
+
+
+ fname = Path(CLOUDHARNESS_ROOT) / 'atestfile'
+ try:
+ fname.write_text('a')
+
+ values = create()
+
+ assert b1 != values["task-images"][BASE_KEY], "Content for base image is changed, the hash should change"
+ assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change"
+ assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change"
+ finally:
+ fname.unlink()
From b125dfc1399056c27f90b789c4726f508a2bc239 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 08:52:39 -0600
Subject: [PATCH 59/94] CH-100 Add tests for docker compose
---
.../deploy/templates-compose/mytemplate.yaml | 13 +++++++++++++
.../tests/test_dockercompose.py | 19 +++++++++++++++++++
2 files changed, 32 insertions(+)
diff --git a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml
index e69de29bb..aa1a21409 100644
--- a/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml
+++ b/tools/deployment-cli-tools/tests/resources/applications/myapp/deploy/templates-compose/mytemplate.yaml
@@ -0,0 +1,13 @@
+cloudharness-metadata:
+ path: resources/generated/test.yaml
+
+data: |-
+ mykey: myvalue
+
+---
+
+cloudharness-metadata:
+ path: resources/generated/test2.yaml
+
+data: |-
+ mykey2: myvalue2
diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py
index c59d552f9..bd6c0a67e 100644
--- a/tools/deployment-cli-tools/tests/test_dockercompose.py
+++ b/tools/deployment-cli-tools/tests/test_dockercompose.py
@@ -64,13 +64,22 @@ def test_collect_compose_values(tmp_path):
compose_path = out_folder / COMPOSE_PATH
# Check files
+ assert exists(out_folder / 'docker-compose.yaml')
assert exists(compose_path)
assert exists(compose_path / 'values.yaml')
+ assert exists(compose_path / 'allvalues.yaml')
assert exists(compose_path / 'resources' / 'accounts' / 'realm.json')
assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt')
assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt')
+ assert exists(compose_path / 'resources' / 'generated' / 'test.yaml')
+ assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml')
assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml')
+ content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text()
+ assert content == 'mykey: myvalue'
+ content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
+ assert content == 'mykey2: myvalue2'
+
# Checl base and task images
assert values[KEY_TASK_IMAGES]
assert 'cloudharness-base' in values[KEY_TASK_IMAGES]
@@ -123,19 +132,29 @@ def test_collect_compose_values_noreg_noinclude(tmp_path):
compose_path = out_path / COMPOSE_PATH
# Check files
+ assert exists(out_path / 'docker-compose.yaml')
assert exists(compose_path)
assert exists(compose_path / 'values.yaml')
+ assert exists(compose_path / 'allvalues.yaml')
assert exists(compose_path / 'resources' / 'accounts' / 'realm.json')
assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt')
assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt')
+ assert exists(compose_path / 'resources' / 'generated' / 'test.yaml')
+ assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml')
assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml')
+ content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text()
+ assert content == 'mykey: myvalue'
+ content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
+ assert content == 'mykey2: myvalue2'
+
assert values[KEY_TASK_IMAGES]
assert 'cloudharness-base' in values[KEY_TASK_IMAGES]
assert values[KEY_TASK_IMAGES]['cloudharness-base'] == 'cloudharness/cloudharness-base:1'
assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1'
+
def test_collect_compose_values_precedence(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_precedence'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
From 2c7e26488e787e372dc03f07ae2145417f145060 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 08:58:49 -0600
Subject: [PATCH 60/94] CH-100 Fix type hinting for Python 3.9
---
.../ch_cli_tools/configurationgenerator.py | 3 ++-
tools/deployment-cli-tools/ch_cli_tools/dockercompose.py | 3 ++-
tools/deployment-cli-tools/ch_cli_tools/helm.py | 3 ++-
3 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
index ba974dfc0..e371bb530 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
@@ -1,6 +1,7 @@
"""
Utilities to create a helm chart from a CloudHarness directory structure
"""
+from typing import Union
import yaml
from ruamel.yaml import YAML
import os
@@ -38,7 +39,7 @@
class ConfigurationGenerator(object):
- def __init__(self, root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ def __init__(self, root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None, templates_path=HELM_PATH):
assert domain, 'A domain must be specified'
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 1a96e5627..4b2c374b9 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -1,6 +1,7 @@
"""
Utilities to create a helm chart from a CloudHarness directory structure
"""
+from typing import Union
import yaml
from ruamel.yaml import YAML
import os
@@ -18,7 +19,7 @@
from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates
-def create_docker_compose_configuration(root_paths, tag: str | int | None='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+def create_docker_compose_configuration(root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index daae2d160..e58070fdb 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -1,6 +1,7 @@
"""
Utilities to create a helm chart from a CloudHarness directory structure
"""
+from typing import Union
import yaml
import os
import logging
@@ -26,7 +27,7 @@ def deploy(namespace, output_path='./deployment'):
f"helm upgrade {namespace} {helm_path} -n {namespace} --install --reset-values".split())
-def create_helm_chart(root_paths, tag: str | None | int ='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+def create_helm_chart(root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
From 866dee9cf928ae247beb0e6c5fdf83a3cb5b5f47 Mon Sep 17 00:00:00 2001
From: aranega
Date: Mon, 1 Apr 2024 09:13:36 -0600
Subject: [PATCH 61/94] CH-100 Add conditional test for docker compose if
"helm" is installed
---
.../tests/test_dockercompose.py | 30 +++++++++++--------
1 file changed, 18 insertions(+), 12 deletions(-)
diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py
index bd6c0a67e..86fff944a 100644
--- a/tools/deployment-cli-tools/tests/test_dockercompose.py
+++ b/tools/deployment-cli-tools/tests/test_dockercompose.py
@@ -1,12 +1,15 @@
from ch_cli_tools.dockercompose import *
from ch_cli_tools.configurationgenerator import *
import pytest
+import shutil
HERE = os.path.dirname(os.path.realpath(__file__))
RESOURCES = os.path.join(HERE, 'resources')
CLOUDHARNESS_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(HERE)))
COMPOSE_PATH = COMPOSE
+HELM_IS_INSTALLED = shutil.which("helm") is not None
+
def exists(path):
return path.exists()
@@ -71,14 +74,15 @@ def test_collect_compose_values(tmp_path):
assert exists(compose_path / 'resources' / 'accounts' / 'realm.json')
assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt')
assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt')
- assert exists(compose_path / 'resources' / 'generated' / 'test.yaml')
- assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml')
assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml')
- content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text()
- assert content == 'mykey: myvalue'
- content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
- assert content == 'mykey2: myvalue2'
+ if HELM_IS_INSTALLED:
+ assert exists(compose_path / 'resources' / 'generated' / 'test.yaml')
+ assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml')
+ content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text()
+ assert content == 'mykey: myvalue'
+ content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
+ assert content == 'mykey2: myvalue2'
# Checl base and task images
assert values[KEY_TASK_IMAGES]
@@ -139,14 +143,16 @@ def test_collect_compose_values_noreg_noinclude(tmp_path):
assert exists(compose_path / 'resources' / 'accounts' / 'realm.json')
assert exists(compose_path / 'resources' / 'accounts' / 'aresource.txt')
assert exists(compose_path / 'resources' / 'myapp' / 'aresource.txt')
- assert exists(compose_path / 'resources' / 'generated' / 'test.yaml')
- assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml')
assert exists(compose_path / 'templates' / 'myapp' / 'mytemplate.yaml')
- content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text()
- assert content == 'mykey: myvalue'
- content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
- assert content == 'mykey2: myvalue2'
+ if HELM_IS_INSTALLED:
+ assert exists(compose_path / 'resources' / 'generated' / 'test.yaml')
+ assert exists(compose_path / 'resources' / 'generated' / 'test2.yaml')
+ content = (compose_path / 'resources' / 'generated' / 'test.yaml').read_text()
+ assert content == 'mykey: myvalue'
+ content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
+ assert content == 'mykey2: myvalue2'
+ assert False
assert values[KEY_TASK_IMAGES]
assert 'cloudharness-base' in values[KEY_TASK_IMAGES]
From f226829ca70f7ff86aa755db461aa17f6562fbba Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 2 Apr 2024 06:00:53 -0600
Subject: [PATCH 62/94] CH-100 Update scripts
---
deployment-configuration/compose/Chart.yaml | 2 +-
deployment-configuration/compose/README.md | 5 ++---
2 files changed, 3 insertions(+), 4 deletions(-)
diff --git a/deployment-configuration/compose/Chart.yaml b/deployment-configuration/compose/Chart.yaml
index f294c3e78..83bf4933e 100644
--- a/deployment-configuration/compose/Chart.yaml
+++ b/deployment-configuration/compose/Chart.yaml
@@ -1,6 +1,6 @@
apiVersion: v1
appVersion: "0.0.1"
-description: CloudHarness Helm Chart
+description: CloudHarness Docker Compose
name: cloudharness
version: 0.0.1
maintainers:
diff --git a/deployment-configuration/compose/README.md b/deployment-configuration/compose/README.md
index abeab69d3..391b61c6b 100644
--- a/deployment-configuration/compose/README.md
+++ b/deployment-configuration/compose/README.md
@@ -1,4 +1,3 @@
-# CloudHarness Helm chart: deploy CloudHarness to k8s
-
-Helm is used to define the CloudHarness deployment on Kubernetes. For further information about Helm, see https://helm.sh.
+# CloudHarness Docker Compose: deploy CloudHarness to Docker Compose
+Helm is used to define templates about how the CloudHarness deployment on Docker Compose. For further information about Helm, see https://helm.sh.
From ac4b863bed7dc9a2730ed42037ef4f35a2906179 Mon Sep 17 00:00:00 2001
From: aranega
Date: Tue, 2 Apr 2024 10:33:58 -0600
Subject: [PATCH 63/94] CH-100 Fix imports of harness-deployment
---
tools/deployment-cli-tools/harness-application | 5 ++---
tools/deployment-cli-tools/harness-deployment | 3 ++-
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/tools/deployment-cli-tools/harness-application b/tools/deployment-cli-tools/harness-application
index a584c05d9..465328250 100644
--- a/tools/deployment-cli-tools/harness-application
+++ b/tools/deployment-cli-tools/harness-application
@@ -2,7 +2,6 @@
import sys
import os
-import shutil
import re
import tempfile
@@ -25,8 +24,8 @@ if __name__ == "__main__":
parser.add_argument('name', metavar='name', type=str,
help='Application name')
parser.add_argument('-t', '--template', dest='templates', action="append", default=['base',],
- help="""Add a template name.
-
+ help="""Add a template name.
+
Available templates:
- flask-server (backend flask server based on openapi)
- webapp (webapp including backend and frontend)
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index 9a5cc78cc..40d4b09a9 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -5,7 +5,8 @@ import sys
import os
from ch_cli_tools.dockercompose import create_docker_compose_configuration
-from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy
+from ch_cli_tools.helm import create_helm_chart, deploy
+from ch_cli_tools.configurationgenerator import hosts_info
from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration
from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file
from ch_cli_tools.preprocessing import preprocess_build_overrides
From 9b4dcaa9abea51e672358b64f9e47966b8e2be0f Mon Sep 17 00:00:00 2001
From: aranega
Date: Thu, 18 Apr 2024 10:11:59 -0600
Subject: [PATCH 64/94] CH-100 Fix issue with TAG policy in skaffold for docker
compose
---
.../compose/templates/allvalues-template.yaml | 2 +-
tools/deployment-cli-tools/ch_cli_tools/skaffold.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/deployment-configuration/compose/templates/allvalues-template.yaml b/deployment-configuration/compose/templates/allvalues-template.yaml
index d69538aa0..d0aa2866b 100644
--- a/deployment-configuration/compose/templates/allvalues-template.yaml
+++ b/deployment-configuration/compose/templates/allvalues-template.yaml
@@ -8,7 +8,6 @@ to replace the secrets values we create a dict with the structure:
thus with an empty secrets node
and then it's mergeOverwrite the copy of the .Values we created
resulting in a copy of the .Values with all secrets being ""
-*/ -}}
cloudharness-metadata:
path: allvalues2.yaml
data: |
@@ -18,3 +17,4 @@ data: |
{{- $tmp := mergeOverwrite $values_copy $new_secrets }}
{{- end }}
{{ $values_copy | toYaml | indent 4 }}
+*/ -}}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
index 7859d043b..b78f8b9e6 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
@@ -201,7 +201,7 @@ def identify_unicorn_based_main(candidates):
}
skaffold_conf['build']['tagPolicy'] = {
'envTemplate': {
- 'template': "TAG"
+ 'template': '"{{.TAG}}"'
}
}
From 10f2c0f1f4147247a71ee0a8ddacd0145954f5b0 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 19 Apr 2024 10:32:18 -0600
Subject: [PATCH 65/94] CH-100 Update configuration for db in docker compose
---
.../compose/templates/auto-compose.yaml | 8 ++++----
.../compose/templates/auto-database.yaml | 4 ++--
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index ca024eddf..c99023fa0 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -19,8 +19,8 @@ services:
- "443:443"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
- - "./certs/:/certs/:ro"
- - "./traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
+ - "./compose/traefik/certs/:/certs/:ro"
+ - "./compose/traefik/traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
{{- range $app_name, $app_config := .Values.apps }}
{{- if has $app_name (list "argo" "events" "nfsserver") -}}
@@ -76,10 +76,10 @@ services:
replicas: {{ $deployment.replicas | default 1 }}
resources:
limits:
- cpus: {{ $deployment.resources.limits.cpu | default "50m" }}
+ cpus: {{ $deployment.resources.limits.cpu | default "0.5" }}
memory: {{ trimSuffix "i" $deployment.resources.limits.memory | default "64M" }}
reservations:
- cpus: {{ $deployment.resources.requests.cpu | default "25m" }}
+ cpus: {{ $deployment.resources.requests.cpu | default "0.25" }}
memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }}
{{- with $deployment.command }}
# entrypoint: {{ cat . $deployment.args }}
diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml
index 569bb2209..93fd22ffb 100644
--- a/deployment-configuration/compose/templates/auto-database.yaml
+++ b/deployment-configuration/compose/templates/auto-database.yaml
@@ -13,10 +13,10 @@
deploy:
resources:
limits:
- cpus: {{ .limits.cpu | default "1000m" }}
+ cpus: {{ .limits.cpu | default "0.75" }}
memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
reservations:
- cpus: {{ .requests.cpu | default "100m" }}
+ cpus: {{ .requests.cpu | default "0.50" }}
memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
{{- end }}
volumes:
From 83d9b8a3de978d480a4e77620eca1390e975d47c Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 19 Apr 2024 11:50:21 -0600
Subject: [PATCH 66/94] CH-100 Add support to link databases with service in
docker compose
---
.../compose/templates/auto-compose.yaml | 18 ++++++++++++++----
1 file changed, 14 insertions(+), 4 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index c99023fa0..f284c4be1 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -107,11 +107,18 @@ services:
{{- range $app_config.harness.env }}
- {{ .name }}={{ .value }}
{{- end }}
- {{- with (concat (without $app_config.harness.dependencies.hard "argo") $app_config.harness.dependencies.soft) }}
+ {{- with compact
+ (append (concat (without $app_config.harness.dependencies.hard "argo")
+ $app_config.harness.dependencies.soft)
+ (dig "database" "name" "" $app_config.harness)) }}
links:
{{- range . -}}
{{- $service := .}}
{{- range $name, $conf := $.Values.apps }}
+ {{- if hasSuffix "-db" $service }}
+ - {{ $service }}:{{ $service }}.{{ $.Values.domain }}
+ {{- break -}}
+ {{- end -}}
{{- if eq $conf.harness.name $service }}
{{- if has $name (list "events" "nfsserver") }}
# - {{ $name }}:{{ $service }}.{{ $.Values.domain }}
@@ -128,14 +135,16 @@ services:
{{/* "compact" in the beginning is to remove empty values */}}
{{- with compact
(append
- (without $app_config.harness.dependencies.hard "argo" )
+ (without $app_config.harness.dependencies.hard "argo" "events")
(dig "database" "name" "" $app_config.harness)) -}}
- {{- with without $app_config.harness.dependencies.hard "argo" "events" }}
depends_on:
- {{- end }}
{{- range . -}}
{{- $service := .}}
{{- range $name, $conf := $.Values.apps -}}
+ {{- if hasSuffix "-db" $service }}
+ - {{ $service }}
+ {{- break -}}
+ {{- end -}}
{{- if eq $conf.harness.name $service }}
{{- if has $name (list "events" "nfsserver") }}
# - {{ $name }}
@@ -144,6 +153,7 @@ services:
{{- end }}
{{- break -}}
{{- end -}}
+
{{- end -}}
{{- end }}
{{- end }}
From e5361ca0a462d035499f24782d30bac6d37ba4e1 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 19 Apr 2024 12:32:07 -0600
Subject: [PATCH 67/94] CH-100 Change location of traefik config files
---
deployment-configuration/compose/templates/auto-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index f284c4be1..a3b468540 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -19,8 +19,8 @@ services:
- "443:443"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
- - "./compose/traefik/certs/:/certs/:ro"
- - "./compose/traefik/traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
+ - "./traefik/certs/:/certs/:ro"
+ - "./traefik/traefik.yaml:/etc/traefik/dynamic_conf/conf.yml:ro"
{{- range $app_name, $app_config := .Values.apps }}
{{- if has $app_name (list "argo" "events" "nfsserver") -}}
From 4c5d36905e157c740fbb6c6c72d95e839d993203 Mon Sep 17 00:00:00 2001
From: aranega
Date: Fri, 19 Apr 2024 20:26:30 -0600
Subject: [PATCH 68/94] CH-100 Add specific option to expose locally a DB
---
.../compose/templates/auto-database.yaml | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml
index 93fd22ffb..9d56ae003 100644
--- a/deployment-configuration/compose/templates/auto-database.yaml
+++ b/deployment-configuration/compose/templates/auto-database.yaml
@@ -9,6 +9,12 @@
{{- range $port := $db_infos.ports }}
- {{ $port.port | quote }}
{{- end }}
+ {{- with .local_expose }}
+ ports:
+ {{- range $port := $db_infos.ports }}
+ - 127.0.0.1:{{ $port.port }}:{{ $port.port }}
+ {{- end }}
+ {{- end }}
{{- with .resources }}
deploy:
resources:
From 4f5f34ca9c94f52b41bbe65a4d1b58e247209841 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Thu, 12 Sep 2024 19:21:51 +0200
Subject: [PATCH 69/94] CH-110 fix dc resources mapping when not numbers
---
.../compose/templates/_helpers.yaml | 11 ++++
.../compose/templates/auto-compose.yaml | 25 +++------
.../compose/templates/auto-database.yaml | 4 +-
.../compose/templates/auto-gatekeepers.yaml | 4 +-
.../ch_cli_tools/dockercompose.py | 55 ++++++++++---------
5 files changed, 51 insertions(+), 48 deletions(-)
create mode 100644 deployment-configuration/compose/templates/_helpers.yaml
diff --git a/deployment-configuration/compose/templates/_helpers.yaml b/deployment-configuration/compose/templates/_helpers.yaml
new file mode 100644
index 000000000..49c3c4563
--- /dev/null
+++ b/deployment-configuration/compose/templates/_helpers.yaml
@@ -0,0 +1,11 @@
+{{- define "convertToDecimal" -}}
+ {{- $value := . -}}
+ {{- if hasSuffix "m" $value -}}
+ {{- $number := replace "m" "" $value | float64 -}}
+ {{- $result := divf $number 1000.0 -}}
+ {{- printf "%.3f" $result -}}
+ {{- else -}}
+ {{- $value -}}
+ {{- end -}}
+
+{{- end -}}
\ No newline at end of file
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index a3b468540..46b076612 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -76,10 +76,10 @@ services:
replicas: {{ $deployment.replicas | default 1 }}
resources:
limits:
- cpus: {{ $deployment.resources.limits.cpu | default "0.5" }}
+ cpus: {{ include "convertToDecimal" ( $deployment.resources.limits.cpu | default 0.5) }}
memory: {{ trimSuffix "i" $deployment.resources.limits.memory | default "64M" }}
reservations:
- cpus: {{ $deployment.resources.requests.cpu | default "0.25" }}
+ cpus: {{ include "convertToDecimal" ( $deployment.resources.requests.cpu | default 0.01) }}
memory: {{ trimSuffix "i" $deployment.resources.requests.memory | default "32M" }}
{{- with $deployment.command }}
# entrypoint: {{ cat . $deployment.args }}
@@ -107,10 +107,13 @@ services:
{{- range $app_config.harness.env }}
- {{ .name }}={{ .value }}
{{- end }}
+ {{- if or $app_config.harness.dependencies.hard $app_config.harness.dependencies.soft }}
{{- with compact
(append (concat (without $app_config.harness.dependencies.hard "argo")
$app_config.harness.dependencies.soft)
(dig "database" "name" "" $app_config.harness)) }}
+
+
links:
{{- range . -}}
{{- $service := .}}
@@ -120,7 +123,7 @@ services:
{{- break -}}
{{- end -}}
{{- if eq $conf.harness.name $service }}
- {{- if has $name (list "events" "nfsserver") }}
+ {{- if has $name (list "events" "nfsserver" "jupyterhub") }}
# - {{ $name }}:{{ $service }}.{{ $.Values.domain }}
{{- else }}
- {{ $name }}:{{ $service }}.{{ $.Values.domain }}
@@ -137,6 +140,7 @@ services:
(append
(without $app_config.harness.dependencies.hard "argo" "events")
(dig "database" "name" "" $app_config.harness)) -}}
+
depends_on:
{{- range . -}}
{{- $service := .}}
@@ -157,6 +161,7 @@ services:
{{- end -}}
{{- end }}
{{- end }}
+ {{- end }}
volumes:
- ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
{{- range $file_name, $_ := $app_config.harness.secrets }}
@@ -176,20 +181,6 @@ services:
{{- end }}
{{- end}}
{{- end }}
- {{/*
- {{- if $.Values.local }}
- # Extra /etc/hosts list
- {{- $domain := $.Values.domain }}
- {{- $ip := $.Values.localIp }}
- extra_hosts:
- - "{{ $.Values.domain }}={{ $ip }}"
- {{- range $app := $.Values.apps }}
- {{- with $app.harness.subdomain}}
- - "{{ . }}.{{ $domain }}={{ $ip }}"
- {{- end }}
- {{- end }}
- {{- end }}
- */}}
{{- if not $isSecured }}
labels:
- "traefik.enable=true"
diff --git a/deployment-configuration/compose/templates/auto-database.yaml b/deployment-configuration/compose/templates/auto-database.yaml
index 9d56ae003..c2c4b6349 100644
--- a/deployment-configuration/compose/templates/auto-database.yaml
+++ b/deployment-configuration/compose/templates/auto-database.yaml
@@ -19,10 +19,10 @@
deploy:
resources:
limits:
- cpus: {{ .limits.cpu | default "0.75" }}
+ cpus: {{ include "convertToDecimal" (.limits.cpu | default "0.75") }}
memory: {{ trimSuffix "i" .limits.memory | default "2G" }}
reservations:
- cpus: {{ .requests.cpu | default "0.50" }}
+ cpus: {{ include "convertToDecimal" (.requests.cpu | default "0.75") }}
memory: {{ trimSuffix "i" .requests.memory | default "512M" }}
{{- end }}
volumes:
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index 4094a9250..fcd7d19a7 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -15,10 +15,10 @@
replicas: 1
resources:
limits:
- cpus: 100m
+ cpus: 0.1
memory: 64M
reservations:
- cpus: 50m
+ cpus: 0.001
memory: 32M
environment:
- PROXY_CONFIG_FILE=/opt/proxy.yml
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index 4b2c374b9..bc6f7ba67 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -19,14 +19,14 @@
from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates
-def create_docker_compose_configuration(root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True,
- output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- namespace=None) -> HarnessMainConfig:
+def create_docker_compose_configuration(root_paths, tag: Union[str, int, None] = 'latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
+ namespace=None) -> HarnessMainConfig:
if (type(env)) == str:
env = [env]
return CloudHarnessDockerCompose(root_paths, tag=tag, registry=registry, local=local, domain=domain, exclude=exclude, secured=secured,
- output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env,
- namespace=namespace, templates_path=COMPOSE).process_values()
+ output_path=output_path, include=include, registry_secret=registry_secret, tls=tls, env=env,
+ namespace=namespace, templates_path=COMPOSE).process_values()
class CloudHarnessDockerCompose(ConfigurationGenerator):
@@ -87,7 +87,9 @@ def generate_docker_compose_yaml(self):
logging.info(f'Generate docker compose configuration in: {dest_compose_yaml}, using templates from {compose_templates}')
command = f"helm template {compose_templates} > {dest_compose_yaml}"
- subprocess.call(command, shell=True)
+ res = subprocess.call(command, shell=True)
+ if res != 0:
+ raise Exception(f"Error generating docker-compose.yaml. See above output for details or try run\n\n{command} --debug")
self.__post_process_multiple_document_docker_compose(dest_compose_yaml)
@@ -280,7 +282,6 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
return values
-
def inject_entry_points_commands(self, helm_values, image_path, app_path):
context_path = os.path.relpath(image_path, '.')
@@ -294,23 +295,23 @@ def inject_entry_points_commands(self, helm_values, image_path, app_path):
def identify_unicorn_based_main(candidates, app_path):
- import re
- gunicorn_pattern = re.compile(r"gunicorn")
- # sort candidates, shortest path first
- for candidate in sorted(candidates,key=lambda x: len(x.split("/"))):
- dockerfile_path = f"{candidate}/.."
- while not os.path.exists(f"{dockerfile_path}/Dockerfile") and os.path.abspath(dockerfile_path) != os.path.abspath(app_path):
- dockerfile_path += "/.."
- dockerfile = f"{dockerfile_path}/Dockerfile"
- if not os.path.exists(dockerfile):
- continue
- with open(dockerfile, 'r') as file:
- if re.search(gunicorn_pattern, file.read()):
- return candidate
- requirements = f"{candidate}/../requirements.txt"
- if not os.path.exists(requirements):
- continue
- with open(requirements, 'r') as file:
- if re.search(gunicorn_pattern, file.read()):
- return candidate
- return None
\ No newline at end of file
+ import re
+ gunicorn_pattern = re.compile(r"gunicorn")
+ # sort candidates, shortest path first
+ for candidate in sorted(candidates, key=lambda x: len(x.split("/"))):
+ dockerfile_path = f"{candidate}/.."
+ while not os.path.exists(f"{dockerfile_path}/Dockerfile") and os.path.abspath(dockerfile_path) != os.path.abspath(app_path):
+ dockerfile_path += "/.."
+ dockerfile = f"{dockerfile_path}/Dockerfile"
+ if not os.path.exists(dockerfile):
+ continue
+ with open(dockerfile, 'r') as file:
+ if re.search(gunicorn_pattern, file.read()):
+ return candidate
+ requirements = f"{candidate}/../requirements.txt"
+ if not os.path.exists(requirements):
+ continue
+ with open(requirements, 'r') as file:
+ if re.search(gunicorn_pattern, file.read()):
+ return candidate
+ return None
From bbc57d20618793233d987f72e97409924577944c Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Thu, 12 Sep 2024 19:22:10 +0200
Subject: [PATCH 70/94] chore: remove unneded dependencies
---
libraries/cloudharness-utils/setup.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/libraries/cloudharness-utils/setup.py b/libraries/cloudharness-utils/setup.py
index 67de60c8d..e92773edf 100644
--- a/libraries/cloudharness-utils/setup.py
+++ b/libraries/cloudharness-utils/setup.py
@@ -22,9 +22,7 @@
REQUIREMENTS = [
'ruamel.yaml',
'cloudharness_model',
- 'docker',
- 'pyaml',
- 'cloudharness_model'
+ 'docker'
]
From 2a87ba5ac92dcfea2fe87fb65332d6eb70ba6ff6 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 11:22:21 +0200
Subject: [PATCH 71/94] CH-110 small fixes + tests
---
.../ch_cli_tools/configurationgenerator.py | 41 +++++++---------
.../deployment-cli-tools/ch_cli_tools/helm.py | 31 ------------
.../tests/test_dockercompose.py | 49 +++++++++----------
tools/deployment-cli-tools/tests/test_helm.py | 27 +++++-----
4 files changed, 52 insertions(+), 96 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
index e371bb530..2228e3af8 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
@@ -3,27 +3,21 @@
"""
from typing import Union
import yaml
-from ruamel.yaml import YAML
import os
import shutil
import logging
from hashlib import sha1
-import subprocess
-from functools import cache
import tarfile
from docker import from_env as DockerClient
from pathlib import Path
-import copy
from . import HERE, CH_ROOT
-from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
- DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH, COMPOSE
-from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
- get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \
- find_dockerfiles_paths, find_file_paths
-
-from .models import HarnessMainConfig
+from cloudharness_utils.constants import TEST_IMAGES_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
+ DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH
+from .utils import get_cluster_ip, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \
+ get_template, merge_configuration_directories, dict_merge, app_name_from_path, \
+ find_dockerfiles_paths
KEY_HARNESS = 'harness'
@@ -39,7 +33,7 @@
class ConfigurationGenerator(object):
- def __init__(self, root_paths, tag: Union[str, int, None]='latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ def __init__(self, root_paths, tag: Union[str, int, None] = 'latest', registry='', local=True, domain=None, exclude=(), secured=True,
output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
namespace=None, templates_path=HELM_PATH):
assert domain, 'A domain must be specified'
@@ -81,7 +75,7 @@ def __init_deployment(self):
# Override for every cloudharness scaffolding
for root_path in self.root_paths:
copy_merge_base_deployment(dest_helm_chart_path=self.dest_deployment_path,
- base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH /self.templates_path)
+ base_helm_chart=root_path / DEPLOYMENT_CONFIGURATION_PATH / self.templates_path)
collect_apps_helm_templates(root_path, exclude=self.exclude, include=self.include,
dest_helm_chart_path=self.dest_deployment_path, templates_path=self.templates_path)
@@ -150,7 +144,9 @@ def _init_static_images(self, base_image_name):
img_name = image_name_from_dockerfile_path(os.path.basename(
static_img_dockerfile), base_name=base_image_name)
self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=static_img_dockerfile)
+ img_name, build_context_path=static_img_dockerfile,
+ dependencies=guess_build_dependencies_from_dockerfile(static_img_dockerfile)
+ )
def _assign_static_build_dependencies(self, helm_values):
for static_img_dockerfile in self.static_images:
@@ -178,7 +174,9 @@ def _init_base_images(self, base_image_name):
img_name = image_name_from_dockerfile_path(
os.path.basename(base_img_dockerfile), base_name=base_image_name)
self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag(
- img_name, build_context_path=root_path)
+ img_name, build_context_path=root_path,
+ dependencies=guess_build_dependencies_from_dockerfile(base_img_dockerfile)
+ )
self.static_images.update(find_dockerfiles_paths(
os.path.join(root_path, STATIC_IMAGES_PATH)))
@@ -312,10 +310,9 @@ def image_tag(self, image_name, build_context_path=None, dependencies=()):
logging.info(f"Ignoring {ignore}")
tag = generate_tag_from_content(build_context_path, ignore)
logging.info(f"Content hash: {tag}")
- dependencies = dependencies or guess_build_dependencies_from_dockerfile(f"{build_context_path}")
- tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest()
+ tag = sha1((tag + "".join(self.all_images.get(n, '') for n in dependencies)).encode("utf-8")).hexdigest()
logging.info(f"Generated tag: {tag}")
- app_name = image_name.split("/")[-1] # the image name can have a prefix
+ app_name = image_name.split("/")[-1] # the image name can have a prefix
self.all_images[app_name] = tag
return self.registry + image_name + (f':{tag}' if tag else '')
@@ -347,11 +344,11 @@ def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart):
return
if dest_helm_chart_path.exists():
logging.info("Merging/overriding all files in directory %s",
- dest_helm_chart_path)
+ dest_helm_chart_path)
merge_configuration_directories(f"{base_helm_chart}", f"{dest_helm_chart_path}")
else:
logging.info("Copying base deployment chart from %s to %s",
- base_helm_chart, dest_helm_chart_path)
+ base_helm_chart, dest_helm_chart_path)
shutil.copytree(base_helm_chart, dest_helm_chart_path)
@@ -365,7 +362,7 @@ def collect_helm_values(deployment_root, env=()):
for e in env:
specific_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH,
- f'values-template-{e}.yaml')
+ f'values-template-{e}.yaml')
if os.path.exists(specific_template_path):
logging.info(
"Specific environment values template found: " + specific_template_path)
@@ -652,4 +649,4 @@ def collect_apps_helm_templates(search_root, dest_helm_chart_path, templates_pat
# "Merging/overriding all files in directory %s", dest_dir)
# merge_configuration_directories(subchart_dir, dest_dir)
# else:
-# shutil.copytree(subchart_dir, dest_dir)
\ No newline at end of file
+# shutil.copytree(subchart_dir, dest_dir)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index 693a8977e..d1725c67e 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -155,37 +155,6 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- def __clear_unused_db_configuration(self, harness_config):
- database_config = harness_config[KEY_DATABASE]
- database_type = database_config.get('type', None)
- if database_type is None:
- del harness_config[KEY_DATABASE]
- return
- db_specific_keys = [k for k, v in database_config.items()
- if isinstance(v, dict) and 'image' in v and 'ports' in v]
- for db in db_specific_keys:
- if database_type != db:
- del database_config[db]
-
- def image_tag(self, image_name, build_context_path=None, dependencies=()):
- tag = self.tag
- if tag is None and not self.local:
- logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
- ignore_path = os.path.join(build_context_path, '.dockerignore')
- ignore = set(DEFAULT_IGNORE)
- if os.path.exists(ignore_path):
- with open(ignore_path) as f:
- ignore = ignore.union({line.strip() for line in f if line.strip() and not line.startswith('#')})
- logging.info(f"Ignoring {ignore}")
- tag = generate_tag_from_content(build_context_path, ignore)
- logging.info(f"Content hash: {tag}")
- dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
- tag = sha1((tag + "".join(self.all_images.get(n, '') for n in dependencies)).encode("utf-8")).hexdigest()
- logging.info(f"Generated tag: {tag}")
- app_name = image_name.split("/")[-1] # the image name can have a prefix
- self.all_images[app_name] = tag
- return self.registry + image_name + (f':{tag}' if tag else '')
-
def create_app_values_spec(self, app_name, app_path, base_image_name=None):
logging.info('Generating values script for ' + app_name)
diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py
index 86fff944a..3e2c23005 100644
--- a/tools/deployment-cli-tools/tests/test_dockercompose.py
+++ b/tools/deployment-cli-tools/tests/test_dockercompose.py
@@ -12,14 +12,14 @@
def exists(path):
- return path.exists()
+ return path.exists()
def test_collect_compose_values(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
- exclude=['events'], domain="my.local",
- namespace='test', env='dev', local=False, tag=1, registry='reg')
+ exclude=['events'], domain="my.local",
+ namespace='test', env='dev', local=False, tag=1, registry='reg')
# Auto values
assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == 'reg/cloudharness/myapp:1'
@@ -96,7 +96,7 @@ def test_collect_compose_values(tmp_path):
def test_collect_compose_values_noreg_noinclude(tmp_path):
out_path = tmp_path / 'test_collect_compose_values_noreg_noinclude'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_path, domain="my.local",
- namespace='test', env='dev', local=False, tag=1)
+ namespace='test', env='dev', local=False, tag=1)
# Auto values
assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == 'cloudharness/myapp:1'
@@ -152,6 +152,7 @@ def test_collect_compose_values_noreg_noinclude(tmp_path):
assert content == 'mykey: myvalue'
content = (compose_path / 'resources' / 'generated' / 'test2.yaml').read_text()
assert content == 'mykey2: myvalue2'
+ else:
assert False
assert values[KEY_TASK_IMAGES]
@@ -160,63 +161,61 @@ def test_collect_compose_values_noreg_noinclude(tmp_path):
assert values[KEY_TASK_IMAGES]['myapp-mytask'] == 'cloudharness/myapp-mytask:1'
-
def test_collect_compose_values_precedence(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_precedence'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["events"])
+ namespace='test', env='prod', local=False, tag=1, include=["events"])
# Values.yaml from current app must override values-prod.yaml from cloudharness
assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden'
assert values[KEY_APPS]['events']['kafka']['resources']['limits']['cpu'] == 'overridden-prod'
+
def test_collect_compose_values_multiple_envs(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_multiple_envs'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"])
-
+ namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"])
assert values[KEY_APPS]['myapp']['test'] is True, 'values-test not loaded'
assert values[KEY_APPS]['myapp']['dev'] is True, 'values-dev not loaded'
assert values[KEY_APPS]['myapp']['a'] == 'test', 'values-test not overriding'
-
def test_collect_compose_values_wrong_dependencies_validate(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_wrong_dependencies_validate'
with pytest.raises(ValuesValidationException):
create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"])
try:
create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"])
except ValuesValidationException as e:
pytest.fail("Should not error because of wrong soft dependency")
with pytest.raises(ValuesValidationException):
create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-build"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-build"])
with pytest.raises(ValuesValidationException):
create_docker_compose_configuration([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-services"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-services"])
def test_collect_compose_values_build_dependencies(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_build_dependencies'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["myapp"])
+ namespace='test', env='prod', local=False, tag=1, include=["myapp"])
assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies"
assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed"
assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency"
+
def test_collect_compose_values_build_dependencies_nodeps(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_build_dependencies_nodeps'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["events"])
-
+ namespace='test', env='prod', local=False, tag=1, include=["events"])
assert 'cloudharness-flask' not in values[KEY_TASK_IMAGES], "Cloudharness-flask is not included in the build dependencies"
assert 'cloudharness-base' not in values[KEY_TASK_IMAGES], "Cloudharness-base is not included in the build dependencies"
@@ -227,8 +226,7 @@ def test_collect_compose_values_build_dependencies_nodeps(tmp_path):
def test_collect_compose_values_build_dependencies_exclude(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_build_dependencies_exclude'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"])
-
+ namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"])
assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies"
assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed"
@@ -239,7 +237,7 @@ def test_clear_unused_dbconfig(tmp_path):
out_folder = tmp_path / 'test_clear_unused_dbconfig'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- env='withpostgres', local=False, include=["myapp"], exclude=["legacy"])
+ env='withpostgres', local=False, include=["myapp"], exclude=["legacy"])
# There is a DB config
assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
@@ -256,7 +254,7 @@ def test_clear_unused_dbconfig(tmp_path):
assert db_config['neo4j'] is None
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- env='withmongo', local=False, include=["myapp"], exclude=["legacy"])
+ env='withmongo', local=False, include=["myapp"], exclude=["legacy"])
assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE]
@@ -273,7 +271,7 @@ def test_clear_all_dbconfig_if_nodb(tmp_path):
out_folder = tmp_path / 'test_clear_all_dbconfig_if_nodb'
values = create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
- env='withoutdb', local=False, include=["myapp"], exclude=["legacy"])
+ env='withoutdb', local=False, include=["myapp"], exclude=["legacy"])
# There is a DB config
assert KEY_DATABASE in values[KEY_APPS]['myapp'][KEY_HARNESS]
@@ -308,10 +306,11 @@ def test_tag_hash_generation():
def test_collect_compose_values_auto_tag(tmp_path):
out_folder = tmp_path / 'test_collect_compose_values_auto_tag'
+
def create():
return create_docker_compose_configuration([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
- exclude=['events'], domain="my.local",
- namespace='test', env='dev', local=False, tag=None, registry='reg')
+ exclude=['events'], domain="my.local",
+ namespace='test', env='dev', local=False, tag=None, registry='reg')
BASE_KEY = "cloudharness-base"
values = create()
@@ -330,7 +329,6 @@ def create():
assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value"
assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore"
-
fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt'
try:
fname.write_text('a')
@@ -350,12 +348,10 @@ def create():
finally:
fname.unlink()
-
fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.ignored'
try:
fname.write_text('a')
-
values = create()
assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore"
@@ -376,7 +372,6 @@ def create():
finally:
fname.unlink()
-
fname = Path(CLOUDHARNESS_ROOT) / 'atestfile'
try:
fname.write_text('a')
diff --git a/tools/deployment-cli-tools/tests/test_helm.py b/tools/deployment-cli-tools/tests/test_helm.py
index 9a7734422..35d903a11 100644
--- a/tools/deployment-cli-tools/tests/test_helm.py
+++ b/tools/deployment-cli-tools/tests/test_helm.py
@@ -8,7 +8,7 @@
def exists(path):
- return path.exists()
+ return path.exists()
def test_collect_helm_values(tmp_path):
@@ -144,36 +144,35 @@ def test_collect_helm_values_precedence(tmp_path):
assert values[KEY_APPS]['events']['kafka']['resources']['limits']['memory'] == 'overridden'
assert values[KEY_APPS]['events']['kafka']['resources']['limits']['cpu'] == 'overridden-prod'
+
def test_collect_helm_values_multiple_envs(tmp_path):
out_folder = tmp_path / 'test_collect_helm_values_multiple_envs'
values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env=['dev', 'test'], local=False, tag=1, include=["myapp"])
-
assert values[KEY_APPS]['myapp']['test'] is True, 'values-test not loaded'
assert values[KEY_APPS]['myapp']['dev'] is True, 'values-dev not loaded'
assert values[KEY_APPS]['myapp']['a'] == 'test', 'values-test not overriding'
-
def test_collect_helm_values_wrong_dependencies_validate(tmp_path):
out_folder = tmp_path / 'test_collect_helm_values_wrong_dependencies_validate'
with pytest.raises(ValuesValidationException):
create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-hard"])
try:
create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-soft"])
except ValuesValidationException as e:
pytest.fail("Should not error because of wrong soft dependency")
with pytest.raises(ValuesValidationException):
create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-build"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-build"])
with pytest.raises(ValuesValidationException):
create_helm_chart([CLOUDHARNESS_ROOT, f"{RESOURCES}/wrong-dependencies"], output_path=out_folder, domain="my.local",
- namespace='test', env='prod', local=False, tag=1, include=["wrong-services"])
+ namespace='test', env='prod', local=False, tag=1, include=["wrong-services"])
def test_collect_helm_values_build_dependencies(tmp_path):
@@ -186,12 +185,12 @@ def test_collect_helm_values_build_dependencies(tmp_path):
assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
assert 'cloudharness-frontend-build' not in values[KEY_TASK_IMAGES], "cloudharness-frontend-build is not included in any dependency"
+
def test_collect_helm_values_build_dependencies_nodeps(tmp_path):
out_folder = tmp_path / 'test_collect_helm_values_build_dependencies_nodeps'
values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["events"])
-
assert 'cloudharness-flask' not in values[KEY_TASK_IMAGES], "Cloudharness-flask is not included in the build dependencies"
assert 'cloudharness-base' not in values[KEY_TASK_IMAGES], "Cloudharness-base is not included in the build dependencies"
assert 'cloudharness-base-debian' not in values[KEY_TASK_IMAGES], "Cloudharness-base-debian is not included in any dependency"
@@ -203,7 +202,6 @@ def test_collect_helm_values_build_dependencies_exclude(tmp_path):
values = create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, domain="my.local",
namespace='test', env='prod', local=False, tag=1, include=["workflows"], exclude=["workflows-extract-download"])
-
assert 'cloudharness-flask' in values[KEY_TASK_IMAGES], "Cloudharness-flask is included in the build dependencies"
assert 'cloudharness-base' in values[KEY_TASK_IMAGES], "Cloudharness-base is included in cloudharness-flask Dockerfile and it should be guessed"
assert 'workflows-extract-download' not in values[KEY_TASK_IMAGES], "workflows-extract-download has been explicitly excluded"
@@ -281,11 +279,12 @@ def test_tag_hash_generation():
def test_collect_helm_values_auto_tag(tmp_path):
- out_folder = tmp_path / 'test_collect_helm_values_auto_tag'
+ out_folder = str(tmp_path / 'test_collect_helm_values_auto_tag')
+
def create():
return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=out_folder, include=['samples', 'myapp'],
- exclude=['events'], domain="my.local",
- namespace='test', env='dev', local=False, tag=None, registry='reg')
+ exclude=['events'], domain="my.local",
+ namespace='test', env='dev', local=False, tag=None, registry='reg')
BASE_KEY = "cloudharness-base"
values = create()
@@ -304,7 +303,6 @@ def create():
assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value"
assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore"
-
fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.txt'
try:
fname.write_text('a')
@@ -324,12 +322,10 @@ def create():
finally:
fname.unlink()
-
fname = Path(RESOURCES) / 'applications' / 'myapp' / 'afile.ignored'
try:
fname.write_text('a')
-
values = create()
assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore"
@@ -350,7 +346,6 @@ def create():
finally:
fname.unlink()
-
fname = Path(CLOUDHARNESS_ROOT) / 'atestfile'
try:
fname.write_text('a')
From 5adab28ae28b92fa894025b5518ae2a7300ca346 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 11:23:45 +0200
Subject: [PATCH 72/94] CH-110 chore: ignore generated compose file
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index b09e20fd0..1c0fcc2e4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,6 +8,7 @@ node_modules
*.DS_Store
deployment/helm
deployment/compose
+deployment/docker-compose.yaml
*.egg-info
*.idea
/build
From c10719e0d021f205afeb79468eb9e48b1b4a428d Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 15:01:18 +0200
Subject: [PATCH 73/94] CH-126 update python client generation
---
.../frontend/src/rest/apis/ResourceApi.ts | 45 +-
.../frontend/src/rest/apis/WorkflowsApi.ts | 22 +-
.../src/rest/models/InlineResponse202.ts | 21 +-
.../src/rest/models/InlineResponse202Task.ts | 25 +-
.../src/rest/models/SampleResource.ts | 29 +-
.../samples/frontend/src/rest/runtime.ts | 9 +-
libraries/client/cloudharness_cli/README.md | 480 ++++
.../cloudharness_cli/common/__init__.py | 25 +-
.../cloudharness_cli/common/api/__init__.py | 7 +
.../common/api/accounts_api.py | 279 ++
.../cloudharness_cli/common/api/config_api.py | 276 ++
.../cloudharness_cli/common/api/sentry_api.py | 303 ++
.../cloudharness_cli/common/api_client.py | 2016 +++++---------
.../cloudharness_cli/common/api_response.py | 21 +
.../cloudharness_cli/common/apis/__init__.py | 3 -
.../common/apis/path_to_api.py | 20 -
.../common/apis/paths/__init__.py | 3 -
.../common/apis/paths/accounts_config.py | 7 -
.../apis/paths/sentry_getdsn_appname.py | 7 -
.../common/apis/tag_to_api.py | 20 -
.../common/apis/tags/__init__.py | 10 -
.../common/apis/tags/accounts_api.py | 23 -
.../common/apis/tags/sentry_api.py | 23 -
.../cloudharness_cli/common/configuration.py | 150 +-
.../cloudharness_cli/common/exceptions.py | 103 +-
.../cloudharness_cli/common/model/__init__.py | 5 -
.../common/models/__init__.py | 21 +-
.../common/models/app_version.py | 89 +
.../common/models/get_config200_response.py | 91 +
.../cloudharness_cli/common/paths/__init__.py | 10 -
.../common/paths/accounts_config/__init__.py | 7 -
.../common/paths/accounts_config/get.py | 302 --
.../common/paths/accounts_config/get.pyi | 297 --
.../paths/sentry_getdsn_appname/__init__.py | 7 -
.../common/paths/sentry_getdsn_appname/get.py | 287 --
.../paths/sentry_getdsn_appname/get.pyi | 282 --
.../common/py.typed} | 0
.../cloudharness_cli/common/rest.py | 331 +--
.../cloudharness_cli/common/schemas.py | 2462 ----------------
.../cloudharness_cli/samples/__init__.py | 27 +-
.../cloudharness_cli/samples/api/__init__.py | 8 +
.../cloudharness_cli/samples/api/auth_api.py | 531 ++++
.../samples/api/resource_api.py | 1356 +++++++++
.../cloudharness_cli/samples/api/test_api.py | 527 ++++
.../samples/api/workflows_api.py | 800 ++++++
.../cloudharness_cli/samples/api_client.py | 2016 +++++---------
.../cloudharness_cli/samples/api_response.py | 21 +
.../cloudharness_cli/samples/apis/__init__.py | 3 -
.../samples/apis/path_to_api.py | 41 -
.../samples/apis/paths/__init__.py | 3 -
.../samples/apis/paths/error.py | 7 -
.../samples/apis/paths/operation_async.py | 7 -
.../samples/apis/paths/operation_sync.py | 7 -
.../apis/paths/operation_sync_results.py | 7 -
.../samples/apis/paths/ping.py | 7 -
.../samples/apis/paths/sampleresources.py | 9 -
.../sampleresources_sampleresource_id.py | 11 -
.../samples/apis/paths/valid.py | 7 -
.../samples/apis/paths/valid_cookie.py | 7 -
.../samples/apis/tag_to_api.py | 26 -
.../samples/apis/tags/__init__.py | 12 -
.../samples/apis/tags/auth_api.py | 26 -
.../samples/apis/tags/resource_api.py | 32 -
.../samples/apis/tags/test_api.py | 26 -
.../samples/apis/tags/workflows_api.py | 28 -
.../cloudharness_cli/samples/configuration.py | 150 +-
.../cloudharness_cli/samples/exceptions.py | 103 +-
.../samples/model/__init__.py | 5 -
.../samples/model/inline_response202.py | 84 -
.../samples/model/inline_response202.pyi | 84 -
.../samples/model/inline_response202_task.py | 89 -
.../samples/model/inline_response202_task.pyi | 89 -
.../samples/model/sample_resource.py | 104 -
.../samples/model/sample_resource.pyi | 104 -
.../samples/models/__init__.py | 26 +-
.../samples/models/inline_response202.py | 92 +
.../samples/models/inline_response202_task.py | 90 +
.../samples/models/sample_resource.py | 92 +
.../samples/paths/__init__.py | 17 -
.../samples/paths/error/__init__.py | 7 -
.../samples/paths/error/get.py | 246 --
.../samples/paths/error/get.pyi | 240 --
.../samples/paths/operation_async/__init__.py | 7 -
.../samples/paths/operation_async/get.py | 235 --
.../samples/paths/operation_async/get.pyi | 230 --
.../samples/paths/operation_sync/__init__.py | 7 -
.../samples/paths/operation_sync/get.py | 233 --
.../samples/paths/operation_sync/get.pyi | 228 --
.../paths/operation_sync_results/__init__.py | 7 -
.../paths/operation_sync_results/get.py | 298 --
.../paths/operation_sync_results/get.pyi | 293 --
.../samples/paths/ping/__init__.py | 7 -
.../samples/paths/ping/get.py | 246 --
.../samples/paths/ping/get.pyi | 240 --
.../samples/paths/sampleresources/__init__.py | 7 -
.../samples/paths/sampleresources/get.py | 260 --
.../samples/paths/sampleresources/get.pyi | 255 --
.../samples/paths/sampleresources/post.py | 312 ---
.../samples/paths/sampleresources/post.pyi | 306 --
.../__init__.py | 7 -
.../delete.py | 282 --
.../delete.pyi | 275 --
.../sampleresources_sampleresource_id/get.py | 315 ---
.../sampleresources_sampleresource_id/get.pyi | 308 ---
.../sampleresources_sampleresource_id/put.py | 382 ---
.../sampleresources_sampleresource_id/put.pyi | 375 ---
.../samples/paths/valid/__init__.py | 7 -
.../samples/paths/valid/get.py | 250 --
.../samples/paths/valid/get.pyi | 241 --
.../samples/paths/valid_cookie/__init__.py | 7 -
.../samples/paths/valid_cookie/get.py | 250 --
.../samples/paths/valid_cookie/get.pyi | 241 --
.../samples/py.typed} | 0
.../cloudharness_cli/samples/rest.py | 331 +--
.../cloudharness_cli/samples/schemas.py | 2463 -----------------
.../volumemanager/__init__.py | 23 +-
.../volumemanager/api/__init__.py | 5 +
.../volumemanager/api/rest_api.py | 570 ++++
.../volumemanager/api_client.py | 2016 +++++---------
.../volumemanager/api_response.py | 21 +
.../volumemanager/apis/__init__.py | 3 -
.../volumemanager/apis/path_to_api.py | 20 -
.../volumemanager/apis/paths/__init__.py | 3 -
.../volumemanager/apis/paths/pvc.py | 7 -
.../volumemanager/apis/paths/pvc_name.py | 7 -
.../volumemanager/apis/tag_to_api.py | 17 -
.../volumemanager/apis/tags/__init__.py | 9 -
.../volumemanager/apis/tags/rest_api.py | 25 -
.../volumemanager/configuration.py | 150 +-
.../volumemanager/exceptions.py | 103 +-
.../volumemanager/model/__init__.py | 5 -
.../model/persistent_volume_claim.py | 108 -
.../model/persistent_volume_claim.pyi | 108 -
.../model/persistent_volume_claim_create.py | 88 -
.../model/persistent_volume_claim_create.pyi | 88 -
.../volumemanager/models/__init__.py | 25 +-
.../models/persistent_volume_claim.py | 93 +
.../models/persistent_volume_claim_create.py | 89 +
.../volumemanager/paths/__init__.py | 10 -
.../volumemanager/paths/pvc/__init__.py | 7 -
.../volumemanager/paths/pvc/post.py | 347 ---
.../volumemanager/paths/pvc/post.pyi | 338 ---
.../volumemanager/paths/pvc_name/__init__.py | 7 -
.../volumemanager/paths/pvc_name/get.py | 306 --
.../volumemanager/paths/pvc_name/get.pyi | 297 --
.../volumemanager/py.typed} | 0
.../cloudharness_cli/volumemanager/rest.py | 331 +--
.../cloudharness_cli/volumemanager/schemas.py | 2462 ----------------
.../cloudharness_cli/workflows/__init__.py | 25 +-
.../workflows/api/__init__.py | 5 +
.../workflows/api/create_and_access_api.py | 1120 ++++++++
.../cloudharness_cli/workflows/api_client.py | 2016 +++++---------
.../workflows/api_response.py | 21 +
.../workflows/apis/__init__.py | 3 -
.../workflows/apis/path_to_api.py | 23 -
.../workflows/apis/paths/__init__.py | 3 -
.../workflows/apis/paths/operations.py | 7 -
.../workflows/apis/paths/operations_name.py | 9 -
.../apis/paths/operations_name_logs.py | 7 -
.../workflows/apis/tag_to_api.py | 17 -
.../workflows/apis/tags/__init__.py | 9 -
.../apis/tags/create_and_access_api.py | 30 -
.../workflows/configuration.py | 154 +-
.../cloudharness_cli/workflows/exceptions.py | 103 +-
.../workflows/model/__init__.py | 5 -
.../workflows/model/operation.py | 127 -
.../workflows/model/operation.pyi | 127 -
.../model/operation_search_result.py | 123 -
.../model/operation_search_result.pyi | 123 -
.../workflows/model/operation_status.py | 70 -
.../workflows/model/operation_status.pyi | 59 -
.../workflows/model/search_result_data.py | 82 -
.../workflows/model/search_result_data.pyi | 82 -
.../workflows/models/__init__.py | 28 +-
.../workflows/models/operation.py | 100 +
.../models/operation_search_result.py | 102 +
.../workflows/models/operation_status.py | 42 +
.../workflows/models/search_result_data.py | 88 +
.../workflows/paths/__init__.py | 11 -
.../workflows/paths/operations/__init__.py | 7 -
.../workflows/paths/operations/get.py | 330 ---
.../workflows/paths/operations/get.pyi | 320 ---
.../paths/operations_name/__init__.py | 7 -
.../workflows/paths/operations_name/delete.py | 269 --
.../paths/operations_name/delete.pyi | 263 --
.../workflows/paths/operations_name/get.py | 327 ---
.../workflows/paths/operations_name/get.pyi | 321 ---
.../paths/operations_name_logs/__init__.py | 7 -
.../paths/operations_name_logs/get.py | 300 --
.../paths/operations_name_logs/get.pyi | 294 --
.../workflows/py.typed} | 0
.../cloudharness_cli/workflows/rest.py | 331 +--
.../cloudharness_cli/workflows/schemas.py | 2463 -----------------
.../docs/common/AccountsApi.md | 73 +
.../docs/common/AppVersion.md | 31 +
.../cloudharness_cli/docs/common/ConfigApi.md | 70 +
.../docs/common/GetConfig200Response.md | 31 +
.../cloudharness_cli/docs/common/SentryApi.md | 78 +
.../docs/common/apis/tags/AccountsApi.md | 80 -
.../docs/common/apis/tags/SentryApi.md | 98 -
.../cloudharness_cli/docs/samples/AuthApi.md | 159 ++
.../docs/samples/InlineResponse202.md | 29 +
.../docs/samples/InlineResponse202Task.md | 30 +
.../docs/samples/ResourceApi.md | 351 +++
.../docs/samples/SampleResource.md | 32 +
.../cloudharness_cli/docs/samples/TestApi.md | 134 +
.../docs/samples/WorkflowsApi.md | 201 ++
.../docs/samples/apis/tags/AuthApi.md | 173 --
.../docs/samples/apis/tags/ResourceApi.md | 482 ----
.../docs/samples/apis/tags/TestApi.md | 148 -
.../docs/samples/apis/tags/WorkflowsApi.md | 230 --
.../docs/samples/models/InlineResponse202.md | 15 -
.../samples/models/InlineResponse202Task.md | 16 -
.../docs/samples/models/SampleResource.md | 17 -
.../volumemanager/PersistentVolumeClaim.md | 32 +
.../PersistentVolumeClaimCreate.md | 30 +
.../docs/volumemanager/RestApi.md | 165 ++
.../docs/volumemanager/apis/tags/RestApi.md | 217 --
.../models/PersistentVolumeClaim.md | 18 -
.../models/PersistentVolumeClaimCreate.md | 16 -
.../docs/workflows/CreateAndAccessApi.md | 290 ++
.../docs/workflows/Operation.md | 34 +
.../docs/workflows/OperationSearchResult.md | 31 +
.../docs/workflows/OperationStatus.md | 20 +
.../docs/workflows/SearchResultData.md | 30 +
.../workflows/apis/tags/CreateAndAccessApi.md | 416 ---
.../docs/workflows/models/Operation.md | 21 -
.../workflows/models/OperationSearchResult.md | 30 -
.../docs/workflows/models/OperationStatus.md | 9 -
.../docs/workflows/models/SearchResultData.md | 17 -
.../client/cloudharness_cli/requirements.txt | 10 +-
libraries/client/cloudharness_cli/setup.py | 5 +-
.../cloudharness_cli/test-requirements.txt | 8 +-
.../test/common/test_paths/__init__.py | 68 -
.../test_accounts_config/test_get.py | 41 -
.../test_sentry_getdsn_appname/test_get.py | 41 -
.../cloudharness_cli/test/samples/__init__.py | 0
.../test/samples/test_models/__init__.py | 0
.../test_models/test_inline_response202.py | 26 -
.../test_inline_response202_task.py | 26 -
.../test_models/test_sample_resource.py | 26 -
.../test/samples/test_paths/__init__.py | 68 -
.../samples/test_paths/test_error/__init__.py | 0
.../samples/test_paths/test_error/test_get.py | 41 -
.../test_operation_async/__init__.py | 0
.../test_operation_async/test_get.py | 41 -
.../test_operation_sync/__init__.py | 0
.../test_operation_sync/test_get.py | 41 -
.../test_operation_sync_results/__init__.py | 0
.../test_operation_sync_results/test_get.py | 41 -
.../samples/test_paths/test_ping/__init__.py | 0
.../samples/test_paths/test_ping/test_get.py | 41 -
.../test_sampleresources/__init__.py | 0
.../test_sampleresources/test_get.py | 41 -
.../test_sampleresources/test_post.py | 42 -
.../__init__.py | 0
.../test_delete.py | 40 -
.../test_get.py | 41 -
.../test_put.py | 42 -
.../samples/test_paths/test_valid/__init__.py | 0
.../samples/test_paths/test_valid/test_get.py | 41 -
.../test_paths/test_valid_cookie/__init__.py | 0
.../test_paths/test_valid_cookie/test_get.py | 41 -
.../test/volumemanager/__init__.py | 0
.../volumemanager/test_models/__init__.py | 0
.../test_persistent_volume_claim.py | 25 -
.../test_persistent_volume_claim_create.py | 25 -
.../test/volumemanager/test_paths/__init__.py | 68 -
.../test_paths/test_pvc/__init__.py | 0
.../test_paths/test_pvc/test_post.py | 43 -
.../test_paths/test_pvc_name/__init__.py | 0
.../test_paths/test_pvc_name/test_get.py | 41 -
.../test/workflows/__init__.py | 0
.../test/workflows/test_models/__init__.py | 0
.../workflows/test_models/test_operation.py | 26 -
.../test_operation_search_result.py | 26 -
.../test_models/test_operation_status.py | 26 -
.../test_models/test_search_result_data.py | 26 -
.../test/workflows/test_paths/__init__.py | 68 -
.../test_paths/test_operations/__init__.py | 0
.../test_paths/test_operations/test_get.py | 41 -
.../test_operations_name/__init__.py | 0
.../test_operations_name/test_delete.py | 40 -
.../test_operations_name/test_get.py | 41 -
.../test_operations_name_logs/__init__.py | 0
.../test_operations_name_logs/test_get.py | 41 -
openapitools.json | 7 +
.../ch_cli_tools/openapi.py | 8 +-
tools/deployment-cli-tools/harness-generate | 10 +-
289 files changed, 13318 insertions(+), 33755 deletions(-)
create mode 100644 libraries/client/cloudharness_cli/README.md
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/api/__init__.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/api/accounts_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/api/config_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/api/sentry_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/api_response.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/path_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/accounts_config.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/sentry_getdsn_appname.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tag_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/accounts_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/sentry_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/model/__init__.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/models/app_version.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/models/get_config200_response.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.pyi
rename libraries/client/cloudharness_cli/{test/common/__init__.py => cloudharness_cli/common/py.typed} (100%)
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/common/schemas.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/api/resource_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/api/test_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/api_response.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/path_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/error.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_async.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync_results.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/ping.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources_sampleresource_id.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid_cookie.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tag_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/auth_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/resource_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/test_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/workflows_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.pyi
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/models/sample_resource.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.pyi
rename libraries/client/cloudharness_cli/{test/common/test_models/__init__.py => cloudharness_cli/samples/py.typed} (100%)
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/samples/schemas.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/__init__.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/rest_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_response.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/path_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc_name.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tag_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/rest_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.pyi
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim_create.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.pyi
rename libraries/client/cloudharness_cli/{test/common/test_paths/test_accounts_config/__init__.py => cloudharness_cli/volumemanager/py.typed} (100%)
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/schemas.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_response.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/path_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name_logs.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tag_to_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/create_and_access_api.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.pyi
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py
create mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.pyi
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.py
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.pyi
rename libraries/client/cloudharness_cli/{test/common/test_paths/test_sentry_getdsn_appname/__init__.py => cloudharness_cli/workflows/py.typed} (100%)
delete mode 100644 libraries/client/cloudharness_cli/cloudharness_cli/workflows/schemas.py
create mode 100644 libraries/client/cloudharness_cli/docs/common/AccountsApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/common/AppVersion.md
create mode 100644 libraries/client/cloudharness_cli/docs/common/ConfigApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/common/GetConfig200Response.md
create mode 100644 libraries/client/cloudharness_cli/docs/common/SentryApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/common/apis/tags/AccountsApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/common/apis/tags/SentryApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/AuthApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/InlineResponse202.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/InlineResponse202Task.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/ResourceApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/SampleResource.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/TestApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/samples/WorkflowsApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/apis/tags/AuthApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/apis/tags/ResourceApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/apis/tags/TestApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/apis/tags/WorkflowsApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202Task.md
delete mode 100644 libraries/client/cloudharness_cli/docs/samples/models/SampleResource.md
create mode 100644 libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaim.md
create mode 100644 libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaimCreate.md
create mode 100644 libraries/client/cloudharness_cli/docs/volumemanager/RestApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/volumemanager/apis/tags/RestApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaim.md
delete mode 100644 libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaimCreate.md
create mode 100644 libraries/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md
create mode 100644 libraries/client/cloudharness_cli/docs/workflows/Operation.md
create mode 100644 libraries/client/cloudharness_cli/docs/workflows/OperationSearchResult.md
create mode 100644 libraries/client/cloudharness_cli/docs/workflows/OperationStatus.md
create mode 100644 libraries/client/cloudharness_cli/docs/workflows/SearchResultData.md
delete mode 100644 libraries/client/cloudharness_cli/docs/workflows/apis/tags/CreateAndAccessApi.md
delete mode 100644 libraries/client/cloudharness_cli/docs/workflows/models/Operation.md
delete mode 100644 libraries/client/cloudharness_cli/docs/workflows/models/OperationSearchResult.md
delete mode 100644 libraries/client/cloudharness_cli/docs/workflows/models/OperationStatus.md
delete mode 100644 libraries/client/cloudharness_cli/docs/workflows/models/SearchResultData.md
delete mode 100644 libraries/client/cloudharness_cli/test/common/test_paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/common/test_paths/test_accounts_config/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/common/test_paths/test_sentry_getdsn_appname/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_models/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202_task.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_models/test_sample_resource.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_error/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_error/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_post.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_delete.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_put.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_models/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim_create.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/test_post.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_models/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_models/test_operation.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_search_result.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_status.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_models/test_search_result_data.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_delete.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_get.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/__init__.py
delete mode 100644 libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/test_get.py
create mode 100644 openapitools.json
diff --git a/applications/samples/frontend/src/rest/apis/ResourceApi.ts b/applications/samples/frontend/src/rest/apis/ResourceApi.ts
index a0c70d83d..a5da64ee0 100644
--- a/applications/samples/frontend/src/rest/apis/ResourceApi.ts
+++ b/applications/samples/frontend/src/rest/apis/ResourceApi.ts
@@ -49,8 +49,11 @@ export class ResourceApi extends runtime.BaseAPI {
* Create a SampleResource
*/
async createSampleResourceRaw(requestParameters: CreateSampleResourceRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise> {
- if (requestParameters.sampleResource === null || requestParameters.sampleResource === undefined) {
- throw new runtime.RequiredError('sampleResource','Required parameter requestParameters.sampleResource was null or undefined when calling createSampleResource.');
+ if (requestParameters['sampleResource'] == null) {
+ throw new runtime.RequiredError(
+ 'sampleResource',
+ 'Required parameter "sampleResource" was null or undefined when calling createSampleResource().'
+ );
}
const queryParameters: any = {};
@@ -64,7 +67,7 @@ export class ResourceApi extends runtime.BaseAPI {
method: 'POST',
headers: headerParameters,
query: queryParameters,
- body: SampleResourceToJSON(requestParameters.sampleResource),
+ body: SampleResourceToJSON(requestParameters['sampleResource']),
}, initOverrides);
return new runtime.VoidApiResponse(response);
@@ -83,8 +86,11 @@ export class ResourceApi extends runtime.BaseAPI {
* Delete a SampleResource
*/
async deleteSampleResourceRaw(requestParameters: DeleteSampleResourceRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise> {
- if (requestParameters.sampleresourceId === null || requestParameters.sampleresourceId === undefined) {
- throw new runtime.RequiredError('sampleresourceId','Required parameter requestParameters.sampleresourceId was null or undefined when calling deleteSampleResource.');
+ if (requestParameters['sampleresourceId'] == null) {
+ throw new runtime.RequiredError(
+ 'sampleresourceId',
+ 'Required parameter "sampleresourceId" was null or undefined when calling deleteSampleResource().'
+ );
}
const queryParameters: any = {};
@@ -92,7 +98,7 @@ export class ResourceApi extends runtime.BaseAPI {
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
- path: `/sampleresources/{sampleresourceId}`.replace(`{${"sampleresourceId"}}`, encodeURIComponent(String(requestParameters.sampleresourceId))),
+ path: `/sampleresources/{sampleresourceId}`.replace(`{${"sampleresourceId"}}`, encodeURIComponent(String(requestParameters['sampleresourceId']))),
method: 'DELETE',
headers: headerParameters,
query: queryParameters,
@@ -114,8 +120,11 @@ export class ResourceApi extends runtime.BaseAPI {
* Get a SampleResource
*/
async getSampleResourceRaw(requestParameters: GetSampleResourceRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise> {
- if (requestParameters.sampleresourceId === null || requestParameters.sampleresourceId === undefined) {
- throw new runtime.RequiredError('sampleresourceId','Required parameter requestParameters.sampleresourceId was null or undefined when calling getSampleResource.');
+ if (requestParameters['sampleresourceId'] == null) {
+ throw new runtime.RequiredError(
+ 'sampleresourceId',
+ 'Required parameter "sampleresourceId" was null or undefined when calling getSampleResource().'
+ );
}
const queryParameters: any = {};
@@ -123,7 +132,7 @@ export class ResourceApi extends runtime.BaseAPI {
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request({
- path: `/sampleresources/{sampleresourceId}`.replace(`{${"sampleresourceId"}}`, encodeURIComponent(String(requestParameters.sampleresourceId))),
+ path: `/sampleresources/{sampleresourceId}`.replace(`{${"sampleresourceId"}}`, encodeURIComponent(String(requestParameters['sampleresourceId']))),
method: 'GET',
headers: headerParameters,
query: queryParameters,
@@ -174,12 +183,18 @@ export class ResourceApi extends runtime.BaseAPI {
* Update a SampleResource
*/
async updateSampleResourceRaw(requestParameters: UpdateSampleResourceRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise> {
- if (requestParameters.sampleresourceId === null || requestParameters.sampleresourceId === undefined) {
- throw new runtime.RequiredError('sampleresourceId','Required parameter requestParameters.sampleresourceId was null or undefined when calling updateSampleResource.');
+ if (requestParameters['sampleresourceId'] == null) {
+ throw new runtime.RequiredError(
+ 'sampleresourceId',
+ 'Required parameter "sampleresourceId" was null or undefined when calling updateSampleResource().'
+ );
}
- if (requestParameters.sampleResource === null || requestParameters.sampleResource === undefined) {
- throw new runtime.RequiredError('sampleResource','Required parameter requestParameters.sampleResource was null or undefined when calling updateSampleResource.');
+ if (requestParameters['sampleResource'] == null) {
+ throw new runtime.RequiredError(
+ 'sampleResource',
+ 'Required parameter "sampleResource" was null or undefined when calling updateSampleResource().'
+ );
}
const queryParameters: any = {};
@@ -189,11 +204,11 @@ export class ResourceApi extends runtime.BaseAPI {
headerParameters['Content-Type'] = 'application/json';
const response = await this.request({
- path: `/sampleresources/{sampleresourceId}`.replace(`{${"sampleresourceId"}}`, encodeURIComponent(String(requestParameters.sampleresourceId))),
+ path: `/sampleresources/{sampleresourceId}`.replace(`{${"sampleresourceId"}}`, encodeURIComponent(String(requestParameters['sampleresourceId']))),
method: 'PUT',
headers: headerParameters,
query: queryParameters,
- body: SampleResourceToJSON(requestParameters.sampleResource),
+ body: SampleResourceToJSON(requestParameters['sampleResource']),
}, initOverrides);
return new runtime.VoidApiResponse(response);
diff --git a/applications/samples/frontend/src/rest/apis/WorkflowsApi.ts b/applications/samples/frontend/src/rest/apis/WorkflowsApi.ts
index 5294d5cfb..c70e2df62 100644
--- a/applications/samples/frontend/src/rest/apis/WorkflowsApi.ts
+++ b/applications/samples/frontend/src/rest/apis/WorkflowsApi.ts
@@ -91,22 +91,28 @@ export class WorkflowsApi extends runtime.BaseAPI {
* @deprecated
*/
async submitSyncWithResultsRaw(requestParameters: SubmitSyncWithResultsRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise> {
- if (requestParameters.a === null || requestParameters.a === undefined) {
- throw new runtime.RequiredError('a','Required parameter requestParameters.a was null or undefined when calling submitSyncWithResults.');
+ if (requestParameters['a'] == null) {
+ throw new runtime.RequiredError(
+ 'a',
+ 'Required parameter "a" was null or undefined when calling submitSyncWithResults().'
+ );
}
- if (requestParameters.b === null || requestParameters.b === undefined) {
- throw new runtime.RequiredError('b','Required parameter requestParameters.b was null or undefined when calling submitSyncWithResults.');
+ if (requestParameters['b'] == null) {
+ throw new runtime.RequiredError(
+ 'b',
+ 'Required parameter "b" was null or undefined when calling submitSyncWithResults().'
+ );
}
const queryParameters: any = {};
- if (requestParameters.a !== undefined) {
- queryParameters['a'] = requestParameters.a;
+ if (requestParameters['a'] != null) {
+ queryParameters['a'] = requestParameters['a'];
}
- if (requestParameters.b !== undefined) {
- queryParameters['b'] = requestParameters.b;
+ if (requestParameters['b'] != null) {
+ queryParameters['b'] = requestParameters['b'];
}
const headerParameters: runtime.HTTPHeaders = {};
diff --git a/applications/samples/frontend/src/rest/models/InlineResponse202.ts b/applications/samples/frontend/src/rest/models/InlineResponse202.ts
index 6703a8e71..6e25cd6c7 100644
--- a/applications/samples/frontend/src/rest/models/InlineResponse202.ts
+++ b/applications/samples/frontend/src/rest/models/InlineResponse202.ts
@@ -12,7 +12,7 @@
* Do not edit the class manually.
*/
-import { exists, mapValues } from '../runtime';
+import { mapValues } from '../runtime';
import type { InlineResponse202Task } from './InlineResponse202Task';
import {
InlineResponse202TaskFromJSON,
@@ -37,10 +37,8 @@ export interface InlineResponse202 {
/**
* Check if a given object implements the InlineResponse202 interface.
*/
-export function instanceOfInlineResponse202(value: object): boolean {
- let isInstance = true;
-
- return isInstance;
+export function instanceOfInlineResponse202(value: object): value is InlineResponse202 {
+ return true;
}
export function InlineResponse202FromJSON(json: any): InlineResponse202 {
@@ -48,25 +46,22 @@ export function InlineResponse202FromJSON(json: any): InlineResponse202 {
}
export function InlineResponse202FromJSONTyped(json: any, ignoreDiscriminator: boolean): InlineResponse202 {
- if ((json === undefined) || (json === null)) {
+ if (json == null) {
return json;
}
return {
- 'task': !exists(json, 'task') ? undefined : InlineResponse202TaskFromJSON(json['task']),
+ 'task': json['task'] == null ? undefined : InlineResponse202TaskFromJSON(json['task']),
};
}
export function InlineResponse202ToJSON(value?: InlineResponse202 | null): any {
- if (value === undefined) {
- return undefined;
- }
- if (value === null) {
- return null;
+ if (value == null) {
+ return value;
}
return {
- 'task': InlineResponse202TaskToJSON(value.task),
+ 'task': InlineResponse202TaskToJSON(value['task']),
};
}
diff --git a/applications/samples/frontend/src/rest/models/InlineResponse202Task.ts b/applications/samples/frontend/src/rest/models/InlineResponse202Task.ts
index b35eab4d9..733e40589 100644
--- a/applications/samples/frontend/src/rest/models/InlineResponse202Task.ts
+++ b/applications/samples/frontend/src/rest/models/InlineResponse202Task.ts
@@ -12,7 +12,7 @@
* Do not edit the class manually.
*/
-import { exists, mapValues } from '../runtime';
+import { mapValues } from '../runtime';
/**
*
* @export
@@ -36,10 +36,8 @@ export interface InlineResponse202Task {
/**
* Check if a given object implements the InlineResponse202Task interface.
*/
-export function instanceOfInlineResponse202Task(value: object): boolean {
- let isInstance = true;
-
- return isInstance;
+export function instanceOfInlineResponse202Task(value: object): value is InlineResponse202Task {
+ return true;
}
export function InlineResponse202TaskFromJSON(json: any): InlineResponse202Task {
@@ -47,27 +45,24 @@ export function InlineResponse202TaskFromJSON(json: any): InlineResponse202Task
}
export function InlineResponse202TaskFromJSONTyped(json: any, ignoreDiscriminator: boolean): InlineResponse202Task {
- if ((json === undefined) || (json === null)) {
+ if (json == null) {
return json;
}
return {
- 'href': !exists(json, 'href') ? undefined : json['href'],
- 'name': !exists(json, 'name') ? undefined : json['name'],
+ 'href': json['href'] == null ? undefined : json['href'],
+ 'name': json['name'] == null ? undefined : json['name'],
};
}
export function InlineResponse202TaskToJSON(value?: InlineResponse202Task | null): any {
- if (value === undefined) {
- return undefined;
- }
- if (value === null) {
- return null;
+ if (value == null) {
+ return value;
}
return {
- 'href': value.href,
- 'name': value.name,
+ 'href': value['href'],
+ 'name': value['name'],
};
}
diff --git a/applications/samples/frontend/src/rest/models/SampleResource.ts b/applications/samples/frontend/src/rest/models/SampleResource.ts
index b6da584dc..a124300ba 100644
--- a/applications/samples/frontend/src/rest/models/SampleResource.ts
+++ b/applications/samples/frontend/src/rest/models/SampleResource.ts
@@ -12,7 +12,7 @@
* Do not edit the class manually.
*/
-import { exists, mapValues } from '../runtime';
+import { mapValues } from '../runtime';
/**
*
* @export
@@ -42,11 +42,9 @@ export interface SampleResource {
/**
* Check if a given object implements the SampleResource interface.
*/
-export function instanceOfSampleResource(value: object): boolean {
- let isInstance = true;
- isInstance = isInstance && "a" in value;
-
- return isInstance;
+export function instanceOfSampleResource(value: object): value is SampleResource {
+ if (!('a' in value) || value['a'] === undefined) return false;
+ return true;
}
export function SampleResourceFromJSON(json: any): SampleResource {
@@ -54,29 +52,26 @@ export function SampleResourceFromJSON(json: any): SampleResource {
}
export function SampleResourceFromJSONTyped(json: any, ignoreDiscriminator: boolean): SampleResource {
- if ((json === undefined) || (json === null)) {
+ if (json == null) {
return json;
}
return {
'a': json['a'],
- 'b': !exists(json, 'b') ? undefined : json['b'],
- 'id': !exists(json, 'id') ? undefined : json['id'],
+ 'b': json['b'] == null ? undefined : json['b'],
+ 'id': json['id'] == null ? undefined : json['id'],
};
}
export function SampleResourceToJSON(value?: SampleResource | null): any {
- if (value === undefined) {
- return undefined;
- }
- if (value === null) {
- return null;
+ if (value == null) {
+ return value;
}
return {
- 'a': value.a,
- 'b': value.b,
- 'id': value.id,
+ 'a': value['a'],
+ 'b': value['b'],
+ 'id': value['id'],
};
}
diff --git a/applications/samples/frontend/src/rest/runtime.ts b/applications/samples/frontend/src/rest/runtime.ts
index 8544aaca7..44b60af44 100644
--- a/applications/samples/frontend/src/rest/runtime.ts
+++ b/applications/samples/frontend/src/rest/runtime.ts
@@ -22,7 +22,7 @@ export interface ConfigurationParameters {
queryParamsStringify?: (params: HTTPQuery) => string; // stringify function for query strings
username?: string; // parameter for basic security
password?: string; // parameter for basic security
- apiKey?: string | ((name: string) => string); // parameter for apiKey security
+ apiKey?: string | Promise | ((name: string) => string | Promise); // parameter for apiKey security
accessToken?: string | Promise | ((name?: string, scopes?: string[]) => string | Promise); // parameter for oauth2 security
headers?: HTTPHeaders; //header params we want to use on every request
credentials?: RequestCredentials; //value for the credentials param we want to use on each request
@@ -59,7 +59,7 @@ export class Configuration {
return this.configuration.password;
}
- get apiKey(): ((name: string) => string) | undefined {
+ get apiKey(): ((name: string) => string | Promise) | undefined {
const apiKey = this.configuration.apiKey;
if (apiKey) {
return typeof apiKey === 'function' ? apiKey : () => apiKey;
@@ -310,11 +310,6 @@ export interface RequestOpts {
body?: HTTPBody;
}
-export function exists(json: any, key: string) {
- const value = json[key];
- return value !== null && value !== undefined;
-}
-
export function querystring(params: HTTPQuery, prefix: string = ''): string {
return Object.keys(params)
.map(key => querystringSingleKey(key, params[key], prefix))
diff --git a/libraries/client/cloudharness_cli/README.md b/libraries/client/cloudharness_cli/README.md
new file mode 100644
index 000000000..2ad75ad5f
--- /dev/null
+++ b/libraries/client/cloudharness_cli/README.md
@@ -0,0 +1,480 @@
+# cloudharness-cli.common
+Cloud Harness Platform - Reference CH service API
+
+This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
+
+- API version: 0.1.0
+- Package version: 1.0.0
+- Generator version: 7.7.0
+- Build package: org.openapitools.codegen.languages.PythonClientCodegen
+
+## Requirements.
+
+Python 3.7+
+
+## Installation & Usage
+### pip install
+
+If the python package is hosted on a repository, you can install directly using:
+
+```sh
+pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git
+```
+(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`)
+
+Then import the package:
+```python
+import cloudharness_cli.common
+```
+
+### Setuptools
+
+Install via [Setuptools](http://pypi.python.org/pypi/setuptools).
+
+```sh
+python setup.py install --user
+```
+(or `sudo python setup.py install` to install the package for all users)
+
+Then import the package:
+```python
+import cloudharness_cli.common
+```
+
+### Tests
+
+Execute `pytest` to run the tests.
+
+## Getting Started
+
+Please follow the [installation procedure](#installation--usage) and then run the following:
+
+```python
+
+import cloudharness_cli.common
+from cloudharness_cli.common.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.common.Configuration(
+ host = "/api"
+)
+
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.common.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.common.AccountsApi(api_client)
+
+ try:
+ # Gets the config for logging in into accounts
+ api_response = api_instance.get_config()
+ print("The response of AccountsApi->get_config:\n")
+ pprint(api_response)
+ except ApiException as e:
+ print("Exception when calling AccountsApi->get_config: %s\n" % e)
+
+```
+
+## Documentation for API Endpoints
+
+All URIs are relative to */api*
+
+Class | Method | HTTP request | Description
+------------ | ------------- | ------------- | -------------
+*AccountsApi* | [**get_config**](docs/common/AccountsApi.md#get_config) | **GET** /accounts/config | Gets the config for logging in into accounts
+*SentryApi* | [**getdsn**](docs/common/SentryApi.md#getdsn) | **GET** /sentry/getdsn/{appname} | Gets the Sentry DSN for a given application
+*ConfigApi* | [**get_version**](docs/common/ConfigApi.md#get_version) | **GET** /version |
+
+
+## Documentation For Models
+
+ - [AppVersion](docs/common/AppVersion.md)
+ - [GetConfig200Response](docs/common/GetConfig200Response.md)
+
+
+
+## Documentation For Authorization
+
+Endpoints do not require authorization.
+
+
+## Author
+
+
+
+
+# cloudharness-cli.volumemanager
+CloudHarness Volumes manager API
+
+This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
+
+- API version: 0.1.0
+- Package version: 1.0.0
+- Generator version: 7.7.0
+- Build package: org.openapitools.codegen.languages.PythonClientCodegen
+
+## Requirements.
+
+Python 3.7+
+
+## Installation & Usage
+### pip install
+
+If the python package is hosted on a repository, you can install directly using:
+
+```sh
+pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git
+```
+(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`)
+
+Then import the package:
+```python
+import cloudharness_cli.volumemanager
+```
+
+### Setuptools
+
+Install via [Setuptools](http://pypi.python.org/pypi/setuptools).
+
+```sh
+python setup.py install --user
+```
+(or `sudo python setup.py install` to install the package for all users)
+
+Then import the package:
+```python
+import cloudharness_cli.volumemanager
+```
+
+### Tests
+
+Execute `pytest` to run the tests.
+
+## Getting Started
+
+Please follow the [installation procedure](#installation--usage) and then run the following:
+
+```python
+
+import cloudharness_cli.volumemanager
+from cloudharness_cli.volumemanager.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.volumemanager.Configuration(
+ host = "/api"
+)
+
+# The client must configure the authentication and authorization parameters
+# in accordance with the API server security policy.
+# Examples for each auth method are provided below, use the example that
+# satisfies your auth use case.
+
+# Configure Bearer authorization (JWT): bearerAuth
+configuration = cloudharness_cli.volumemanager.Configuration(
+ access_token = os.environ["BEARER_TOKEN"]
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.volumemanager.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.volumemanager.RestApi(api_client)
+ name = 'name_example' # str | The name of the Persistent Volume Claim to be retrieved
+
+ try:
+ # Retrieve a Persistent Volume Claim from the Kubernetes repository.
+ api_response = api_instance.pvc_name_get(name)
+ print("The response of RestApi->pvc_name_get:\n")
+ pprint(api_response)
+ except ApiException as e:
+ print("Exception when calling RestApi->pvc_name_get: %s\n" % e)
+
+```
+
+## Documentation for API Endpoints
+
+All URIs are relative to */api*
+
+Class | Method | HTTP request | Description
+------------ | ------------- | ------------- | -------------
+*RestApi* | [**pvc_name_get**](docs/volumemanager/RestApi.md#pvc_name_get) | **GET** /pvc/{name} | Retrieve a Persistent Volume Claim from the Kubernetes repository.
+*RestApi* | [**pvc_post**](docs/volumemanager/RestApi.md#pvc_post) | **POST** /pvc | Create a Persistent Volume Claim in Kubernetes
+
+
+## Documentation For Models
+
+ - [PersistentVolumeClaim](docs/volumemanager/PersistentVolumeClaim.md)
+ - [PersistentVolumeClaimCreate](docs/volumemanager/PersistentVolumeClaimCreate.md)
+
+
+
+## Documentation For Authorization
+
+
+Authentication schemes defined for the API:
+
+### bearerAuth
+
+- **Type**: Bearer authentication (JWT)
+
+
+## Author
+
+
+
+
+# cloudharness-cli.workflows
+Workflows API
+
+This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
+
+- API version: 0.1.0
+- Package version: 1.0.0
+- Generator version: 7.7.0
+- Build package: org.openapitools.codegen.languages.PythonClientCodegen
+
+## Requirements.
+
+Python 3.7+
+
+## Installation & Usage
+### pip install
+
+If the python package is hosted on a repository, you can install directly using:
+
+```sh
+pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git
+```
+(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`)
+
+Then import the package:
+```python
+import cloudharness_cli.workflows
+```
+
+### Setuptools
+
+Install via [Setuptools](http://pypi.python.org/pypi/setuptools).
+
+```sh
+python setup.py install --user
+```
+(or `sudo python setup.py install` to install the package for all users)
+
+Then import the package:
+```python
+import cloudharness_cli.workflows
+```
+
+### Tests
+
+Execute `pytest` to run the tests.
+
+## Getting Started
+
+Please follow the [installation procedure](#installation--usage) and then run the following:
+
+```python
+
+import cloudharness_cli.workflows
+from cloudharness_cli.workflows.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.workflows.Configuration(
+ host = "/api"
+)
+
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client)
+ name = 'my-workflow' # str |
+
+ try:
+ # deletes operation by name
+ api_instance.delete_operation(name)
+ except ApiException as e:
+ print("Exception when calling CreateAndAccessApi->delete_operation: %s\n" % e)
+
+```
+
+## Documentation for API Endpoints
+
+All URIs are relative to */api*
+
+Class | Method | HTTP request | Description
+------------ | ------------- | ------------- | -------------
+*CreateAndAccessApi* | [**delete_operation**](docs/workflows/CreateAndAccessApi.md#delete_operation) | **DELETE** /operations/{name} | deletes operation by name
+*CreateAndAccessApi* | [**get_operation**](docs/workflows/CreateAndAccessApi.md#get_operation) | **GET** /operations/{name} | get operation by name
+*CreateAndAccessApi* | [**list_operations**](docs/workflows/CreateAndAccessApi.md#list_operations) | **GET** /operations | lists operations
+*CreateAndAccessApi* | [**log_operation**](docs/workflows/CreateAndAccessApi.md#log_operation) | **GET** /operations/{name}/logs | get operation by name
+
+
+## Documentation For Models
+
+ - [Operation](docs/workflows/Operation.md)
+ - [OperationSearchResult](docs/workflows/OperationSearchResult.md)
+ - [OperationStatus](docs/workflows/OperationStatus.md)
+ - [SearchResultData](docs/workflows/SearchResultData.md)
+
+
+
+## Documentation For Authorization
+
+Endpoints do not require authorization.
+
+
+## Author
+
+cloudharness@metacell.us
+
+
+# cloudharness-cli.samples
+CloudHarness Sample api
+
+This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
+
+- API version: 0.1.0
+- Package version: 1.0.0
+- Generator version: 7.7.0
+- Build package: org.openapitools.codegen.languages.PythonClientCodegen
+
+## Requirements.
+
+Python 3.7+
+
+## Installation & Usage
+### pip install
+
+If the python package is hosted on a repository, you can install directly using:
+
+```sh
+pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git
+```
+(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`)
+
+Then import the package:
+```python
+import cloudharness_cli.samples
+```
+
+### Setuptools
+
+Install via [Setuptools](http://pypi.python.org/pypi/setuptools).
+
+```sh
+python setup.py install --user
+```
+(or `sudo python setup.py install` to install the package for all users)
+
+Then import the package:
+```python
+import cloudharness_cli.samples
+```
+
+### Tests
+
+Execute `pytest` to run the tests.
+
+## Getting Started
+
+Please follow the [installation procedure](#installation--usage) and then run the following:
+
+```python
+
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+# The client must configure the authentication and authorization parameters
+# in accordance with the API server security policy.
+# Examples for each auth method are provided below, use the example that
+# satisfies your auth use case.
+
+# Configure API key authorization: cookieAuth
+configuration.api_key['cookieAuth'] = os.environ["API_KEY"]
+
+# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
+# configuration.api_key_prefix['cookieAuth'] = 'Bearer'
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.AuthApi(api_client)
+
+ try:
+ # Check if the token is valid. Get a token by logging into the base url
+ api_response = api_instance.valid_cookie()
+ print("The response of AuthApi->valid_cookie:\n")
+ pprint(api_response)
+ except ApiException as e:
+ print("Exception when calling AuthApi->valid_cookie: %s\n" % e)
+
+```
+
+## Documentation for API Endpoints
+
+All URIs are relative to */api*
+
+Class | Method | HTTP request | Description
+------------ | ------------- | ------------- | -------------
+*AuthApi* | [**valid_cookie**](docs/samples/AuthApi.md#valid_cookie) | **GET** /valid-cookie | Check if the token is valid. Get a token by logging into the base url
+*AuthApi* | [**valid_token**](docs/samples/AuthApi.md#valid_token) | **GET** /valid | Check if the token is valid. Get a token by logging into the base url
+*ResourceApi* | [**create_sample_resource**](docs/samples/ResourceApi.md#create_sample_resource) | **POST** /sampleresources | Create a SampleResource
+*ResourceApi* | [**delete_sample_resource**](docs/samples/ResourceApi.md#delete_sample_resource) | **DELETE** /sampleresources/{sampleresourceId} | Delete a SampleResource
+*ResourceApi* | [**get_sample_resource**](docs/samples/ResourceApi.md#get_sample_resource) | **GET** /sampleresources/{sampleresourceId} | Get a SampleResource
+*ResourceApi* | [**get_sample_resources**](docs/samples/ResourceApi.md#get_sample_resources) | **GET** /sampleresources | List All SampleResources
+*ResourceApi* | [**update_sample_resource**](docs/samples/ResourceApi.md#update_sample_resource) | **PUT** /sampleresources/{sampleresourceId} | Update a SampleResource
+*TestApi* | [**error**](docs/samples/TestApi.md#error) | **GET** /error | test sentry is working
+*TestApi* | [**ping**](docs/samples/TestApi.md#ping) | **GET** /ping | test the application is up
+*WorkflowsApi* | [**submit_async**](docs/samples/WorkflowsApi.md#submit_async) | **GET** /operation_async | Send an asynchronous operation
+*WorkflowsApi* | [**submit_sync**](docs/samples/WorkflowsApi.md#submit_sync) | **GET** /operation_sync | Send a synchronous operation
+*WorkflowsApi* | [**submit_sync_with_results**](docs/samples/WorkflowsApi.md#submit_sync_with_results) | **GET** /operation_sync_results | Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+
+
+## Documentation For Models
+
+ - [InlineResponse202](docs/samples/InlineResponse202.md)
+ - [InlineResponse202Task](docs/samples/InlineResponse202Task.md)
+ - [SampleResource](docs/samples/SampleResource.md)
+
+
+
+## Documentation For Authorization
+
+
+Authentication schemes defined for the API:
+
+### bearerAuth
+
+- **Type**: Bearer authentication (JWT)
+
+
+### cookieAuth
+
+- **Type**: API key
+- **API key parameter name**: kc-access
+- **Location**:
+
+
+## Author
+
+cloudharness@metacell.us
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/__init__.py
index 06ad679d7..cd51d191e 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/__init__.py
@@ -5,24 +5,33 @@
"""
CH common service API
- Cloud Harness Platform - Reference CH service API # noqa: E501
+ Cloud Harness Platform - Reference CH service API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
__version__ = "1.0.0"
+# import apis into sdk package
+from cloudharness_cli.common.api.accounts_api import AccountsApi
+from cloudharness_cli.common.api.sentry_api import SentryApi
+from cloudharness_cli.common.api.config_api import ConfigApi
+
# import ApiClient
+from cloudharness_cli.common.api_response import ApiResponse
from cloudharness_cli.common.api_client import ApiClient
-
-# import Configuration
from cloudharness_cli.common.configuration import Configuration
-
-# import exceptions
from cloudharness_cli.common.exceptions import OpenApiException
-from cloudharness_cli.common.exceptions import ApiAttributeError
from cloudharness_cli.common.exceptions import ApiTypeError
from cloudharness_cli.common.exceptions import ApiValueError
from cloudharness_cli.common.exceptions import ApiKeyError
+from cloudharness_cli.common.exceptions import ApiAttributeError
from cloudharness_cli.common.exceptions import ApiException
+
+# import models into sdk package
+from cloudharness_cli.common.models.app_version import AppVersion
+from cloudharness_cli.common.models.get_config200_response import GetConfig200Response
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/api/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/__init__.py
new file mode 100644
index 000000000..893803909
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/__init__.py
@@ -0,0 +1,7 @@
+# flake8: noqa
+
+# import apis into api package
+from cloudharness_cli.common.api.accounts_api import AccountsApi
+from cloudharness_cli.common.api.sentry_api import SentryApi
+from cloudharness_cli.common.api.config_api import ConfigApi
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/api/accounts_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/accounts_api.py
new file mode 100644
index 000000000..ac12f862e
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/accounts_api.py
@@ -0,0 +1,279 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from cloudharness_cli.common.models.get_config200_response import GetConfig200Response
+
+from cloudharness_cli.common.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.common.api_response import ApiResponse
+from cloudharness_cli.common.rest import RESTResponseType
+
+
+class AccountsApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def get_config(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> GetConfig200Response:
+ """Gets the config for logging in into accounts
+
+ Gets the config for logging in into accounts
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_config_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "GetConfig200Response",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def get_config_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[GetConfig200Response]:
+ """Gets the config for logging in into accounts
+
+ Gets the config for logging in into accounts
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_config_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "GetConfig200Response",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def get_config_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Gets the config for logging in into accounts
+
+ Gets the config for logging in into accounts
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_config_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "GetConfig200Response",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _get_config_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/accounts/config',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/api/config_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/config_api.py
new file mode 100644
index 000000000..a59269680
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/config_api.py
@@ -0,0 +1,276 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from cloudharness_cli.common.models.app_version import AppVersion
+
+from cloudharness_cli.common.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.common.api_response import ApiResponse
+from cloudharness_cli.common.rest import RESTResponseType
+
+
+class ConfigApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def get_version(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> AppVersion:
+ """get_version
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_version_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "AppVersion",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def get_version_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[AppVersion]:
+ """get_version
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_version_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "AppVersion",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def get_version_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """get_version
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_version_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "AppVersion",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _get_version_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/version',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/api/sentry_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/sentry_api.py
new file mode 100644
index 000000000..bad110e61
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/api/sentry_api.py
@@ -0,0 +1,303 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import StrictStr
+from typing import Any, Dict
+
+from cloudharness_cli.common.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.common.api_response import ApiResponse
+from cloudharness_cli.common.rest import RESTResponseType
+
+
+class SentryApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def getdsn(
+ self,
+ appname: StrictStr,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> object:
+ """Gets the Sentry DSN for a given application
+
+ Gets the Sentry DSN for a given application
+
+ :param appname: (required)
+ :type appname: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._getdsn_serialize(
+ appname=appname,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "object",
+ '400': "object",
+ '404': "object",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def getdsn_with_http_info(
+ self,
+ appname: StrictStr,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[object]:
+ """Gets the Sentry DSN for a given application
+
+ Gets the Sentry DSN for a given application
+
+ :param appname: (required)
+ :type appname: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._getdsn_serialize(
+ appname=appname,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "object",
+ '400': "object",
+ '404': "object",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def getdsn_without_preload_content(
+ self,
+ appname: StrictStr,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Gets the Sentry DSN for a given application
+
+ Gets the Sentry DSN for a given application
+
+ :param appname: (required)
+ :type appname: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._getdsn_serialize(
+ appname=appname,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "object",
+ '400': "object",
+ '404': "object",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _getdsn_serialize(
+ self,
+ appname,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if appname is not None:
+ _path_params['appname'] = appname
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json',
+ 'text/html',
+ 'application/problem+json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/sentry/getdsn/{appname}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/api_client.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/api_client.py
index dc2f32ff1..d0a683dc6 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/api_client.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/api_client.py
@@ -1,1499 +1,781 @@
# coding: utf-8
+
"""
CH common service API
- Cloud Harness Platform - Reference CH service API # noqa: E501
+ Cloud Harness Platform - Reference CH service API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
-from dataclasses import dataclass
-from decimal import Decimal
-import enum
-import email
+import datetime
+from dateutil.parser import parse
+from enum import Enum
import json
+import mimetypes
import os
-import io
-import atexit
-from multiprocessing.pool import ThreadPool
import re
import tempfile
-import typing
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-from urllib.parse import urlparse, quote
-from urllib3.fields import RequestField as RequestFieldBase
-import frozendict
+from urllib.parse import quote
+from typing import Tuple, Optional, List, Dict, Union
+from pydantic import SecretStr
-from cloudharness_cli.common import rest
from cloudharness_cli.common.configuration import Configuration
-from cloudharness_cli.common.exceptions import ApiTypeError, ApiValueError
-from cloudharness_cli.common.schemas import (
- NoneClass,
- BoolClass,
- Schema,
- FileIO,
- BinarySchema,
- date,
- datetime,
- none_type,
- Unset,
- unset,
+from cloudharness_cli.common.api_response import ApiResponse, T as ApiResponseT
+import cloudharness_cli.common.models
+from cloudharness_cli.common import rest
+from cloudharness_cli.common.exceptions import (
+ ApiValueError,
+ ApiException,
+ BadRequestException,
+ UnauthorizedException,
+ ForbiddenException,
+ NotFoundException,
+ ServiceException
)
+RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]]
-class RequestField(RequestFieldBase):
- def __eq__(self, other):
- if not isinstance(other, RequestField):
- return False
- return self.__dict__ == other.__dict__
-
-
-class JSONEncoder(json.JSONEncoder):
- compact_separators = (',', ':')
-
- def default(self, obj):
- if isinstance(obj, str):
- return str(obj)
- elif isinstance(obj, float):
- return float(obj)
- elif isinstance(obj, int):
- return int(obj)
- elif isinstance(obj, Decimal):
- if obj.as_tuple().exponent >= 0:
- return int(obj)
- return float(obj)
- elif isinstance(obj, NoneClass):
- return None
- elif isinstance(obj, BoolClass):
- return bool(obj)
- elif isinstance(obj, (dict, frozendict.frozendict)):
- return {key: self.default(val) for key, val in obj.items()}
- elif isinstance(obj, (list, tuple)):
- return [self.default(item) for item in obj]
- raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
-
-
-class ParameterInType(enum.Enum):
- QUERY = 'query'
- HEADER = 'header'
- PATH = 'path'
- COOKIE = 'cookie'
-
-
-class ParameterStyle(enum.Enum):
- MATRIX = 'matrix'
- LABEL = 'label'
- FORM = 'form'
- SIMPLE = 'simple'
- SPACE_DELIMITED = 'spaceDelimited'
- PIPE_DELIMITED = 'pipeDelimited'
- DEEP_OBJECT = 'deepObject'
-
-
-class PrefixSeparatorIterator:
- # A class to store prefixes and separators for rfc6570 expansions
-
- def __init__(self, prefix: str, separator: str):
- self.prefix = prefix
- self.separator = separator
- self.first = True
- if separator in {'.', '|', '%20'}:
- item_separator = separator
- else:
- item_separator = ','
- self.item_separator = item_separator
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self.first:
- self.first = False
- return self.prefix
- return self.separator
-
-
-class ParameterSerializerBase:
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- return False
-
- @staticmethod
- def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
- """
- Get representation if str/float/int/None/items in list/ values in dict
- None is returned if an item is undefined, use cases are value=
- - None
- - []
- - {}
- - [None, None None]
- - {'a': None, 'b': None}
- """
- if type(in_data) in {str, float, int}:
- if percent_encode:
- return quote(str(in_data))
- return str(in_data)
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, list) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, dict) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- raise ApiValueError('Unable to generate a ref6570 item representation of {}'.format(in_data))
-
- @staticmethod
- def _to_dict(name: str, value: str):
- return {name: value}
-
- @classmethod
- def __ref6570_str_float_int_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_value = cls.__ref6570_item_value(in_data, percent_encode)
- if item_value is None or (item_value == '' and prefix_separator_iterator.separator == ';'):
- return next(prefix_separator_iterator) + var_name_piece
- value_pair_equals = '=' if named_parameter_expansion else ''
- return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+class ApiClient:
+ """Generic API client for OpenAPI client library builds.
- @classmethod
- def __ref6570_list_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
- item_values = [v for v in item_values if v is not None]
- if not item_values:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece +
- value_pair_equals +
- prefix_separator_iterator.item_separator.join(item_values)
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [var_name_piece + value_pair_equals + val for val in item_values]
- )
+ OpenAPI generic API client. This client handles the client-
+ server communication, and is invariant across implementations. Specifics of
+ the methods and models for each application are generated from the OpenAPI
+ templates.
- @classmethod
- def __ref6570_dict_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
- in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
- if not in_data_transformed:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece + value_pair_equals +
- prefix_separator_iterator.item_separator.join(
- prefix_separator_iterator.item_separator.join(
- item_pair
- ) for item_pair in in_data_transformed.items()
- )
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [key + '=' + val for key, val in in_data_transformed.items()]
- )
+ :param configuration: .Configuration object for this client
+ :param header_name: a header to pass when making calls to the API.
+ :param header_value: a header value to pass when making calls to
+ the API.
+ :param cookie: a cookie to include in the header when making calls
+ to the API
+ """
- @classmethod
- def _ref6570_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator
- ) -> str:
- """
- Separator is for separate variables like dict with explode true, not for array item separation
- """
- named_parameter_expansion = prefix_separator_iterator.separator in {'&', ';'}
- var_name_piece = variable_name if named_parameter_expansion else ''
- if type(in_data) in {str, float, int}:
- return cls.__ref6570_str_float_int_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- elif isinstance(in_data, list):
- return cls.__ref6570_list_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, dict):
- return cls.__ref6570_dict_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- # bool, bytes, etc
- raise ApiValueError('Unable to generate a ref6570 representation of {}'.format(in_data))
+ PRIMITIVE_TYPES = (float, bool, bytes, str, int)
+ NATIVE_TYPES_MAPPING = {
+ 'int': int,
+ 'long': int, # TODO remove as only py3 is supported?
+ 'float': float,
+ 'str': str,
+ 'bool': bool,
+ 'date': datetime.date,
+ 'datetime': datetime.datetime,
+ 'object': object,
+ }
+ _pool = None
+ def __init__(
+ self,
+ configuration=None,
+ header_name=None,
+ header_value=None,
+ cookie=None
+ ) -> None:
+ # use default configuration if none is provided
+ if configuration is None:
+ configuration = Configuration.get_default()
+ self.configuration = configuration
-class StyleFormSerializer(ParameterSerializerBase):
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- if style is ParameterStyle.FORM:
- return True
- return super()._get_default_explode(style)
+ self.rest_client = rest.RESTClientObject(configuration)
+ self.default_headers = {}
+ if header_name is not None:
+ self.default_headers[header_name] = header_value
+ self.cookie = cookie
+ # Set default User-Agent.
+ self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ self.client_side_validation = configuration.client_side_validation
- def _serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> str:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = PrefixSeparatorIterator('', '&')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
-class StyleSimpleSerializer(ParameterSerializerBase):
+ @property
+ def user_agent(self):
+ """User agent for this API client"""
+ return self.default_headers['User-Agent']
- def _serialize_simple(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool
- ) -> str:
- prefix_separator_iterator = PrefixSeparatorIterator('', ',')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ @user_agent.setter
+ def user_agent(self, value):
+ self.default_headers['User-Agent'] = value
+ def set_default_header(self, header_name, header_value):
+ self.default_headers[header_name] = header_value
-class JSONDetector:
- """
- Works for:
- application/json
- application/json; charset=UTF-8
- application/json-patch+json
- application/geo+json
- """
- __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
- @classmethod
- def _content_type_is_json(cls, content_type: str) -> bool:
- if cls.__json_content_type_pattern.match(content_type):
- return True
- return False
-
-
-@dataclass
-class ParameterBase(JSONDetector):
- name: str
- in_type: ParameterInType
- required: bool
- style: typing.Optional[ParameterStyle]
- explode: typing.Optional[bool]
- allow_reserved: typing.Optional[bool]
- schema: typing.Optional[typing.Type[Schema]]
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
-
- __style_to_in_type = {
- ParameterStyle.MATRIX: {ParameterInType.PATH},
- ParameterStyle.LABEL: {ParameterInType.PATH},
- ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
- ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
- ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
- }
- __in_type_to_default_style = {
- ParameterInType.QUERY: ParameterStyle.FORM,
- ParameterInType.PATH: ParameterStyle.SIMPLE,
- ParameterInType.HEADER: ParameterStyle.SIMPLE,
- ParameterInType.COOKIE: ParameterStyle.FORM,
- }
- __disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'}
- _json_encoder = JSONEncoder()
+ _default = None
@classmethod
- def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
- if style is None:
- return
- in_type_set = cls.__style_to_in_type[style]
- if in_type not in in_type_set:
- raise ValueError(
- 'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format(
- style, in_type_set
- )
- )
-
- def __init__(
- self,
- name: str,
- in_type: ParameterInType,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- if schema is None and content is None:
- raise ValueError('Value missing; Pass in either schema or content')
- if schema and content:
- raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input')
- if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
- raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names))
- self.__verify_style_to_in_type(style, in_type)
- if content is None and style is None:
- style = self.__in_type_to_default_style[in_type]
- if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
- raise ValueError('Invalid content length, content length must equal 1')
- self.in_type = in_type
- self.name = name
- self.required = required
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
- self.schema = schema
- self.content = content
-
- def _serialize_json(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- eliminate_whitespace: bool = False
- ) -> str:
- if eliminate_whitespace:
- return json.dumps(in_data, separators=self._json_encoder.compact_separators)
- return json.dumps(in_data)
+ def get_default(cls):
+ """Return new instance of ApiClient.
+ This method returns newly created, based on default constructor,
+ object of ApiClient class or returns a copy of default
+ ApiClient.
-class PathParameter(ParameterBase, StyleSimpleSerializer):
+ :return: The ApiClient object.
+ """
+ if cls._default is None:
+ cls._default = ApiClient()
+ return cls._default
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.PATH,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ @classmethod
+ def set_default(cls, default):
+ """Set default instance of ApiClient.
- def __serialize_label(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator('.', '.')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ It stores default ApiClient.
- def __serialize_matrix(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator(';', ';')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ :param default: object of ApiClient.
+ """
+ cls._default = default
- def __serialize_simple(
+ def param_serialize(
self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- ) -> typing.Dict[str, str]:
- value = self._serialize_simple(
- in_data=in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True
- )
- return self._to_dict(self.name, value)
+ method,
+ resource_path,
+ path_params=None,
+ query_params=None,
+ header_params=None,
+ body=None,
+ post_params=None,
+ files=None, auth_settings=None,
+ collection_formats=None,
+ _host=None,
+ _request_auth=None
+ ) -> RequestSerialized:
+
+ """Builds the HTTP request params needed by the request.
+ :param method: Method to call.
+ :param resource_path: Path to method endpoint.
+ :param path_params: Path parameters in the url.
+ :param query_params: Query parameters in the url.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings list: Auth Settings names for the request.
+ :param files dict: key -> filename, value -> filepath,
+ for `multipart/form-data`.
+ :param collection_formats: dict of collection formats for path, query,
+ header, and post parameters.
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the authentication
+ in the spec for a single request.
+ :return: tuple of form (path, http_method, query_params, header_params,
+ body, post_params, files)
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> path
- path:
- returns path_params: dict
- label -> path
- returns path_params
- matrix -> path
- returns path_params
- """
- if self.style:
- if self.style is ParameterStyle.SIMPLE:
- return self.__serialize_simple(cast_in_data)
- elif self.style is ParameterStyle.LABEL:
- return self.__serialize_label(cast_in_data)
- elif self.style is ParameterStyle.MATRIX:
- return self.__serialize_matrix(cast_in_data)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class QueryParameter(ParameterBase, StyleFormSerializer):
+ config = self.configuration
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.QUERY,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ # header parameters
+ header_params = header_params or {}
+ header_params.update(self.default_headers)
+ if self.cookie:
+ header_params['Cookie'] = self.cookie
+ if header_params:
+ header_params = self.sanitize_for_serialization(header_params)
+ header_params = dict(
+ self.parameters_to_tuples(header_params,collection_formats)
+ )
- def __serialize_space_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # path parameters
+ if path_params:
+ path_params = self.sanitize_for_serialization(path_params)
+ path_params = self.parameters_to_tuples(
+ path_params,
+ collection_formats
+ )
+ for k, v in path_params:
+ # specified safe chars, encode everything
+ resource_path = resource_path.replace(
+ '{%s}' % k,
+ quote(str(v), safe=config.safe_chars_for_path_param)
+ )
- def __serialize_pipe_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # post parameters
+ if post_params or files:
+ post_params = post_params if post_params else []
+ post_params = self.sanitize_for_serialization(post_params)
+ post_params = self.parameters_to_tuples(
+ post_params,
+ collection_formats
+ )
+ if files:
+ post_params.extend(self.files_parameters(files))
- def __serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._serialize_form(
- in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
+ # auth setting
+ self.update_params_for_auth(
+ header_params,
+ query_params,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=_request_auth
)
- return self._to_dict(self.name, value)
- def get_prefix_separator_iterator(self) -> typing.Optional[PrefixSeparatorIterator]:
- if self.style is ParameterStyle.FORM:
- return PrefixSeparatorIterator('?', '&')
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return PrefixSeparatorIterator('', '%20')
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return PrefixSeparatorIterator('', '|')
-
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> query
- query:
- - GET/HEAD/DELETE: could use fields
- - PUT/POST: must use urlencode to send parameters
- returns fields: tuple
- spaceDelimited -> query
- returns fields
- pipeDelimited -> query
- returns fields
- deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
- returns fields
- """
- if self.style:
- # TODO update query ones to omit setting values when [] {} or None is input
- if self.style is ParameterStyle.FORM:
- return self.__serialize_form(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
- # self.content will be length one
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
- return self._to_dict(
- self.name,
- next(prefix_separator_iterator) + self.name + '=' + quote(value)
- )
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+ # body
+ if body:
+ body = self.sanitize_for_serialization(body)
+ # request url
+ if _host is None or self.configuration.ignore_operation_servers:
+ url = self.configuration.host + resource_path
+ else:
+ # use server/host defined in path or operation instead
+ url = _host + resource_path
+
+ # query parameters
+ if query_params:
+ query_params = self.sanitize_for_serialization(query_params)
+ url_query = self.parameters_to_url_query(
+ query_params,
+ collection_formats
+ )
+ url += "?" + url_query
-class CookieParameter(ParameterBase, StyleFormSerializer):
+ return method, url, header_params, body, post_params
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None and content is None and schema else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.COOKIE,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> cookie
- returns fields: tuple
- """
- if self.style:
- """
- TODO add escaping of comma, space, equals
- or turn encoding on
- """
- value = self._serialize_form(
- cast_in_data,
- explode=self.explode,
- name=self.name,
- percent_encode=False,
- prefix_separator_iterator=PrefixSeparatorIterator('', '&')
- )
- return self._to_dict(self.name, value)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class HeaderParameter(ParameterBase, StyleSimpleSerializer):
- def __init__(
+ def call_api(
self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.HEADER,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
-
- @staticmethod
- def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
- data = tuple(t for t in in_data if t)
- headers = HTTPHeaderDict()
- if not data:
- return headers
- headers.extend(data)
- return headers
+ method,
+ url,
+ header_params=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ) -> rest.RESTResponse:
+ """Makes the HTTP request (synchronous)
+ :param method: Method to call.
+ :param url: Path to method endpoint.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param _request_timeout: timeout setting for this request.
+ :return: RESTResponse
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> HTTPHeaderDict:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> header
- headers: PoolManager needs a mapping, tuple is close
- returns headers: dict
- """
- if self.style:
- value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
- return self.__to_headers(((self.name, value),))
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self.__to_headers(((self.name, value),))
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class Encoding:
- def __init__(
- self,
- content_type: str,
- headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: bool = False,
- ):
- self.content_type = content_type
- self.headers = headers
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
-
-
-@dataclass
-class MediaType:
- """
- Used to store request and response body schema information
- encoding:
- A map between a property name and its encoding information.
- The key, being the property name, MUST exist in the schema as a property.
- The encoding object SHALL only apply to requestBody objects when the media type is
- multipart or application/x-www-form-urlencoded.
- """
- schema: typing.Optional[typing.Type[Schema]] = None
- encoding: typing.Optional[typing.Dict[str, Encoding]] = None
+ try:
+ # perform request and return response
+ response_data = self.rest_client.request(
+ method, url,
+ headers=header_params,
+ body=body, post_params=post_params,
+ _request_timeout=_request_timeout
+ )
+ except ApiException as e:
+ raise e
-@dataclass
-class ApiResponse:
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, Schema]
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
+ return response_data
- def __init__(
+ def response_deserialize(
self,
- response: urllib3.HTTPResponse,
- body: typing.Union[Unset, typing.Type[Schema]],
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
- ):
- """
- pycharm needs this to prevent 'Unexpected argument' warnings
+ response_data: rest.RESTResponse,
+ response_types_map: Optional[Dict[str, ApiResponseT]]=None
+ ) -> ApiResponse[ApiResponseT]:
+ """Deserializes response into an object.
+ :param response_data: RESTResponse object to be deserialized.
+ :param response_types_map: dict of response types.
+ :return: ApiResponse
"""
- self.response = response
- self.body = body
- self.headers = headers
+ msg = "RESTResponse.read() must be called before passing it to response_deserialize()"
+ assert response_data.data is not None, msg
-@dataclass
-class ApiResponseWithoutDeserialization(ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, typing.Type[Schema]] = unset
- headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
+ response_type = response_types_map.get(str(response_data.status), None)
+ if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599:
+ # if not found, look for '1XX', '2XX', etc.
+ response_type = response_types_map.get(str(response_data.status)[0] + "XX", None)
+ # deserialize response data
+ response_text = None
+ return_data = None
+ try:
+ if response_type == "bytearray":
+ return_data = response_data.data
+ elif response_type == "file":
+ return_data = self.__deserialize_file(response_data)
+ elif response_type is not None:
+ match = None
+ content_type = response_data.getheader('content-type')
+ if content_type is not None:
+ match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type)
+ encoding = match.group(1) if match else "utf-8"
+ response_text = response_data.data.decode(encoding)
+ return_data = self.deserialize(response_text, response_type, content_type)
+ finally:
+ if not 200 <= response_data.status <= 299:
+ raise ApiException.from_response(
+ http_resp=response_data,
+ body=response_text,
+ data=return_data,
+ )
-class OpenApiResponse(JSONDetector):
- __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
+ return ApiResponse(
+ status_code = response_data.status,
+ data = return_data,
+ headers = response_data.getheaders(),
+ raw_data = response_data.data
+ )
- def __init__(
- self,
- response_cls: typing.Type[ApiResponse] = ApiResponse,
- content: typing.Optional[typing.Dict[str, MediaType]] = None,
- headers: typing.Optional[typing.List[HeaderParameter]] = None,
- ):
- self.headers = headers
- if content is not None and len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
- self.response_cls = response_cls
-
- @staticmethod
- def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
- # python must be >= 3.9 so we can pass in bytes into json.loads
- return json.loads(response.data)
-
- @staticmethod
- def __file_name_from_response_url(response_url: typing.Optional[str]) -> typing.Optional[str]:
- if response_url is None:
- return None
- url_path = urlparse(response_url).path
- if url_path:
- path_basename = os.path.basename(url_path)
- if path_basename:
- _filename, ext = os.path.splitext(path_basename)
- if ext:
- return path_basename
- return None
+ def sanitize_for_serialization(self, obj):
+ """Builds a JSON POST object.
- @classmethod
- def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
- if content_disposition is None:
- return None
- match = cls.__filename_content_disposition_pattern.search(content_disposition)
- if not match:
- return None
- return match.group(1)
+ If obj is None, return None.
+ If obj is SecretStr, return obj.get_secret_value()
+ If obj is str, int, long, float, bool, return directly.
+ If obj is datetime.datetime, datetime.date
+ convert to string in iso8601 format.
+ If obj is list, sanitize each element in the list.
+ If obj is dict, return the dict.
+ If obj is OpenAPI model, return the properties dict.
- def __deserialize_application_octet_stream(
- self, response: urllib3.HTTPResponse
- ) -> typing.Union[bytes, io.BufferedReader]:
- """
- urllib3 use cases:
- 1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
- 2. when preload_content=False (stream=True) then supports_chunked_reads is True and
- a file will be written and returned
+ :param obj: The data to serialize.
+ :return: The serialized form of data.
"""
- if response.supports_chunked_reads():
- file_name = (
- self.__file_name_from_content_disposition(response.headers.get('content-disposition'))
- or self.__file_name_from_response_url(response.geturl())
+ if obj is None:
+ return None
+ elif isinstance(obj, Enum):
+ return obj.value
+ elif isinstance(obj, SecretStr):
+ return obj.get_secret_value()
+ elif isinstance(obj, self.PRIMITIVE_TYPES):
+ return obj
+ elif isinstance(obj, list):
+ return [
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
+ ]
+ elif isinstance(obj, tuple):
+ return tuple(
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
)
+ elif isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
- if file_name is None:
- _fd, path = tempfile.mkstemp()
- else:
- path = os.path.join(tempfile.gettempdir(), file_name)
-
- with open(path, 'wb') as new_file:
- chunk_size = 1024
- while True:
- data = response.read(chunk_size)
- if not data:
- break
- new_file.write(data)
- # release_conn is needed for streaming connections only
- response.release_conn()
- new_file = open(path, 'rb')
- return new_file
+ elif isinstance(obj, dict):
+ obj_dict = obj
else:
- return response.data
+ # Convert model obj to dict except
+ # attributes `openapi_types`, `attribute_map`
+ # and attributes which value is not None.
+ # Convert attribute name to json key in
+ # model definition for request.
+ if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')):
+ obj_dict = obj.to_dict()
+ else:
+ obj_dict = obj.__dict__
- @staticmethod
- def __deserialize_multipart_form_data(
- response: urllib3.HTTPResponse
- ) -> typing.Dict[str, typing.Any]:
- msg = email.message_from_bytes(response.data)
return {
- part.get_param("name", header="Content-Disposition"): part.get_payload(
- decode=True
- ).decode(part.get_content_charset())
- if part.get_content_charset()
- else part.get_payload()
- for part in msg.get_payload()
+ key: self.sanitize_for_serialization(val)
+ for key, val in obj_dict.items()
}
- def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
- content_type = response.getheader('content-type')
- deserialized_body = unset
- streamed = response.supports_chunked_reads()
-
- deserialized_headers = unset
- if self.headers is not None:
- # TODO add header deserialiation here
- pass
-
- if self.content is not None:
- if content_type not in self.content:
- raise ApiValueError(
- f"Invalid content_type returned. Content_type='{content_type}' was returned "
- f"when only {str(set(self.content))} are defined for status_code={str(response.status)}"
- )
- body_schema = self.content[content_type].schema
- if body_schema is None:
- # some specs do not define response content media type schemas
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=unset
- )
+ def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]):
+ """Deserializes response into an object.
+
+ :param response: RESTResponse object to be deserialized.
+ :param response_type: class literal for
+ deserialized object, or string of class name.
+ :param content_type: content type of response.
+
+ :return: deserialized object.
+ """
- if self._content_type_is_json(content_type):
- body_data = self.__deserialize_json(response)
- elif content_type == 'application/octet-stream':
- body_data = self.__deserialize_application_octet_stream(response)
- elif content_type.startswith('multipart/form-data'):
- body_data = self.__deserialize_multipart_form_data(response)
- content_type = 'multipart/form-data'
+ # fetch data from response object
+ if content_type is None:
+ try:
+ data = json.loads(response_text)
+ except ValueError:
+ data = response_text
+ elif content_type.startswith("application/json"):
+ if response_text == "":
+ data = ""
else:
- raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type))
- deserialized_body = body_schema.from_openapi_data_oapg(
- body_data, _configuration=configuration)
- elif streamed:
- response.release_conn()
-
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=deserialized_body
- )
+ data = json.loads(response_text)
+ elif content_type.startswith("text/plain"):
+ data = response_text
+ else:
+ raise ApiException(
+ status=0,
+ reason="Unsupported content type: {0}".format(content_type)
+ )
+ return self.__deserialize(data, response_type)
-class ApiClient:
- """Generic API client for OpenAPI client library builds.
+ def __deserialize(self, data, klass):
+ """Deserializes dict, list, str into an object.
- OpenAPI generic API client. This client handles the client-
- server communication, and is invariant across implementations. Specifics of
- the methods and models for each application are generated from the OpenAPI
- templates.
+ :param data: dict, list or str.
+ :param klass: class literal, or string of class name.
- NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
+ :return: object.
+ """
+ if data is None:
+ return None
- :param configuration: .Configuration object for this client
- :param header_name: a header to pass when making calls to the API.
- :param header_value: a header value to pass when making calls to
- the API.
- :param cookie: a cookie to include in the header when making calls
- to the API
- :param pool_threads: The number of threads to use for async requests
- to the API. More threads means more concurrent API requests.
- """
+ if isinstance(klass, str):
+ if klass.startswith('List['):
+ m = re.match(r'List\[(.*)]', klass)
+ assert m is not None, "Malformed List type definition"
+ sub_kls = m.group(1)
+ return [self.__deserialize(sub_data, sub_kls)
+ for sub_data in data]
+
+ if klass.startswith('Dict['):
+ m = re.match(r'Dict\[([^,]*), (.*)]', klass)
+ assert m is not None, "Malformed Dict type definition"
+ sub_kls = m.group(2)
+ return {k: self.__deserialize(v, sub_kls)
+ for k, v in data.items()}
+
+ # convert str to class
+ if klass in self.NATIVE_TYPES_MAPPING:
+ klass = self.NATIVE_TYPES_MAPPING[klass]
+ else:
+ klass = getattr(cloudharness_cli.common.models, klass)
+
+ if klass in self.PRIMITIVE_TYPES:
+ return self.__deserialize_primitive(data, klass)
+ elif klass == object:
+ return self.__deserialize_object(data)
+ elif klass == datetime.date:
+ return self.__deserialize_date(data)
+ elif klass == datetime.datetime:
+ return self.__deserialize_datetime(data)
+ elif issubclass(klass, Enum):
+ return self.__deserialize_enum(data, klass)
+ else:
+ return self.__deserialize_model(data, klass)
- _pool = None
+ def parameters_to_tuples(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __init__(
- self,
- configuration: typing.Optional[Configuration] = None,
- header_name: typing.Optional[str] = None,
- header_value: typing.Optional[str] = None,
- cookie: typing.Optional[str] = None,
- pool_threads: int = 1
- ):
- if configuration is None:
- configuration = Configuration()
- self.configuration = configuration
- self.pool_threads = pool_threads
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: Parameters as list of tuples, collections formatted
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, value) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(str(value) for value in v)))
+ else:
+ new_params.append((k, v))
+ return new_params
- self.rest_client = rest.RESTClientObject(configuration)
- self.default_headers = HTTPHeaderDict()
- if header_name is not None:
- self.default_headers[header_name] = header_value
- self.cookie = cookie
- # Set default User-Agent.
- self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ def parameters_to_url_query(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __enter__(self):
- return self
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: URL query string (e.g. a=Hello%20World&b=123)
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if isinstance(v, bool):
+ v = str(v).lower()
+ if isinstance(v, (int, float)):
+ v = str(v)
+ if isinstance(v, dict):
+ v = json.dumps(v)
+
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, str(value)) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(quote(str(value)) for value in v))
+ )
+ else:
+ new_params.append((k, quote(str(v))))
- def __exit__(self, exc_type, exc_value, traceback):
- self.close()
+ return "&".join(["=".join(map(str, item)) for item in new_params])
- def close(self):
- if self._pool:
- self._pool.close()
- self._pool.join()
- self._pool = None
- if hasattr(atexit, 'unregister'):
- atexit.unregister(self.close)
+ def files_parameters(self, files: Dict[str, Union[str, bytes]]):
+ """Builds form parameters.
- @property
- def pool(self):
- """Create thread pool on first request
- avoids instantiating unused threadpool for blocking clients.
+ :param files: File parameters.
+ :return: Form parameters with files.
"""
- if self._pool is None:
- atexit.register(self.close)
- self._pool = ThreadPool(self.pool_threads)
- return self._pool
-
- @property
- def user_agent(self):
- """User agent for this API client"""
- return self.default_headers['User-Agent']
+ params = []
+ for k, v in files.items():
+ if isinstance(v, str):
+ with open(v, 'rb') as f:
+ filename = os.path.basename(f.name)
+ filedata = f.read()
+ elif isinstance(v, bytes):
+ filename = k
+ filedata = v
+ else:
+ raise ValueError("Unsupported file value")
+ mimetype = (
+ mimetypes.guess_type(filename)[0]
+ or 'application/octet-stream'
+ )
+ params.append(
+ tuple([k, tuple([filename, filedata, mimetype])])
+ )
+ return params
- @user_agent.setter
- def user_agent(self, value):
- self.default_headers['User-Agent'] = value
+ def select_header_accept(self, accepts: List[str]) -> Optional[str]:
+ """Returns `Accept` based on an array of accepts provided.
- def set_default_header(self, header_name, header_value):
- self.default_headers[header_name] = header_value
+ :param accepts: List of headers.
+ :return: Accept (e.g. application/json).
+ """
+ if not accepts:
+ return None
- def __call_api(
- self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
+ for accept in accepts:
+ if re.search('json', accept, re.IGNORECASE):
+ return accept
- # header parameters
- used_headers = HTTPHeaderDict(self.default_headers)
- if self.cookie:
- headers['Cookie'] = self.cookie
+ return accepts[0]
- # auth setting
- self.update_params_for_auth(used_headers,
- auth_settings, resource_path, method, body)
+ def select_header_content_type(self, content_types):
+ """Returns `Content-Type` based on an array of content_types provided.
- # must happen after cookie setting and auth setting in case user is overriding those
- if headers:
- used_headers.update(headers)
+ :param content_types: List of content-types.
+ :return: Content-Type (e.g. application/json).
+ """
+ if not content_types:
+ return None
- # request url
- if host is None:
- url = self.configuration.host + resource_path
- else:
- # use server/host defined in path or operation instead
- url = host + resource_path
+ for content_type in content_types:
+ if re.search('json', content_type, re.IGNORECASE):
+ return content_type
- # perform request and return response
- response = self.request(
- method,
- url,
- headers=used_headers,
- fields=fields,
- body=body,
- stream=stream,
- timeout=timeout,
- )
- return response
+ return content_types[0]
- def call_api(
+ def update_params_for_auth(
self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- async_req: typing.Optional[bool] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request (synchronous) and returns deserialized data.
-
- To make an async_req request, set the async_req parameter.
+ headers,
+ queries,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=None
+ ) -> None:
+ """Updates header and query params based on authentication setting.
- :param resource_path: Path to method endpoint.
- :param method: Method to call.
- :param headers: Header parameters to be
- placed in the request header.
- :param body: Request body.
- :param fields: Request post form parameters,
- for `application/x-www-form-urlencoded`, `multipart/form-data`.
- :param auth_settings: Auth Settings names for the request.
- :param async_req: execute request asynchronously
- :type async_req: bool, optional TODO remove, unused
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Also when True, if the openapi spec describes a file download,
- the data will be written to a local filesystme file and the BinarySchema
- instance will also inherit from FileSchema and FileIO
- Default is False.
- :type stream: bool, optional
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
- :param host: api endpoint host
- :return:
- If async_req parameter is True,
- the request will be called asynchronously.
- The method will return the request thread.
- If parameter async_req is False or missing,
- then the method will return the response directly.
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :param auth_settings: Authentication setting identifiers list.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param request_auth: if set, the provided settings will
+ override the token in the configuration.
"""
+ if not auth_settings:
+ return
- if not async_req:
- return self.__call_api(
- resource_path,
- method,
+ if request_auth:
+ self._apply_auth_params(
headers,
- body,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
- )
-
- return self.pool.apply_async(
- self.__call_api,
- (
+ queries,
resource_path,
method,
- headers,
body,
- json,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
+ request_auth
)
- )
-
- def request(
+ else:
+ for auth in auth_settings:
+ auth_setting = self.configuration.auth_settings().get(auth)
+ if auth_setting:
+ self._apply_auth_params(
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ )
+
+ def _apply_auth_params(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request using RESTClient."""
- if method == "GET":
- return self.rest_client.GET(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "HEAD":
- return self.rest_client.HEAD(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "OPTIONS":
- return self.rest_client.OPTIONS(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "POST":
- return self.rest_client.POST(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PUT":
- return self.rest_client.PUT(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PATCH":
- return self.rest_client.PATCH(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "DELETE":
- return self.rest_client.DELETE(url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body)
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ ) -> None:
+ """Updates the request parameters based on a single auth_setting
+
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param auth_setting: auth settings for the endpoint
+ """
+ if auth_setting['in'] == 'cookie':
+ headers['Cookie'] = auth_setting['value']
+ elif auth_setting['in'] == 'header':
+ if auth_setting['type'] != 'http-signature':
+ headers[auth_setting['key']] = auth_setting['value']
+ elif auth_setting['in'] == 'query':
+ queries.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
- "http method must be `GET`, `HEAD`, `OPTIONS`,"
- " `POST`, `PATCH`, `PUT` or `DELETE`."
+ 'Authentication token must be in `query` or `header`'
)
- def update_params_for_auth(self, headers, auth_settings,
- resource_path, method, body):
- """Updates header and query params based on authentication setting.
+ def __deserialize_file(self, response):
+ """Deserializes body to file
- :param headers: Header parameters dict to be updated.
- :param auth_settings: Authentication setting identifiers list.
- :param resource_path: A string representation of the HTTP request resource path.
- :param method: A string representation of the HTTP request method.
- :param body: A object representing the body of the HTTP request.
- The object type is the return value of _encoder.default().
- """
- if not auth_settings:
- return
+ Saves response body into a file in a temporary folder,
+ using the filename from the `Content-Disposition` header if provided.
- for auth in auth_settings:
- auth_setting = self.configuration.auth_settings().get(auth)
- if not auth_setting:
- continue
- if auth_setting['in'] == 'cookie':
- headers.add('Cookie', auth_setting['value'])
- elif auth_setting['in'] == 'header':
- if auth_setting['type'] != 'http-signature':
- headers.add(auth_setting['key'], auth_setting['value'])
- elif auth_setting['in'] == 'query':
- """ TODO implement auth in query
- need to pass in prefix_separator_iterator
- and need to output resource_path with query params added
- """
- raise ApiValueError("Auth in query not yet implemented")
- else:
- raise ApiValueError(
- 'Authentication token must be in `query` or `header`'
- )
+ handle file downloading
+ save response body into a tmp file and return the instance
+ :param response: RESTResponse.
+ :return: file path.
+ """
+ fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
+ os.close(fd)
+ os.remove(path)
+
+ content_disposition = response.getheader("Content-Disposition")
+ if content_disposition:
+ m = re.search(
+ r'filename=[\'"]?([^\'"\s]+)[\'"]?',
+ content_disposition
+ )
+ assert m is not None, "Unexpected 'content-disposition' header value"
+ filename = m.group(1)
+ path = os.path.join(os.path.dirname(path), filename)
-class Api:
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
+ with open(path, "wb") as f:
+ f.write(response.data)
- Do not edit the class manually.
- """
+ return path
+
+ def __deserialize_primitive(self, data, klass):
+ """Deserializes string to primitive type.
- def __init__(self, api_client: typing.Optional[ApiClient] = None):
- if api_client is None:
- api_client = ApiClient()
- self.api_client = api_client
+ :param data: str.
+ :param klass: class literal.
- @staticmethod
- def _verify_typed_dict_inputs_oapg(cls: typing.Type[typing_extensions.TypedDict], data: typing.Dict[str, typing.Any]):
+ :return: int, long, float, str, bool.
"""
- Ensures that:
- - required keys are present
- - additional properties are not input
- - value stored under required keys do not have the value unset
- Note: detailed value checking is done in schema classes
+ try:
+ return klass(data)
+ except UnicodeEncodeError:
+ return str(data)
+ except TypeError:
+ return data
+
+ def __deserialize_object(self, value):
+ """Return an original value.
+
+ :return: object.
"""
- missing_required_keys = []
- required_keys_with_unset_values = []
- for required_key in cls.__required_keys__:
- if required_key not in data:
- missing_required_keys.append(required_key)
- continue
- value = data[required_key]
- if value is unset:
- required_keys_with_unset_values.append(required_key)
- if missing_required_keys:
- raise ApiTypeError(
- '{} missing {} required arguments: {}'.format(
- cls.__name__, len(missing_required_keys), missing_required_keys
- )
- )
- if required_keys_with_unset_values:
- raise ApiValueError(
- '{} contains invalid unset values for {} required keys: {}'.format(
- cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values
- )
- )
+ return value
- disallowed_additional_keys = []
- for key in data:
- if key in cls.__required_keys__ or key in cls.__optional_keys__:
- continue
- disallowed_additional_keys.append(key)
- if disallowed_additional_keys:
- raise ApiTypeError(
- '{} got {} unexpected keyword arguments: {}'.format(
- cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys
- )
- )
+ def __deserialize_date(self, string):
+ """Deserializes string to date.
- def _get_host_oapg(
- self,
- operation_id: str,
- servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
- host_index: typing.Optional[int] = None
- ) -> typing.Optional[str]:
- configuration = self.api_client.configuration
+ :param string: str.
+ :return: date.
+ """
try:
- if host_index is None:
- index = configuration.server_operation_index.get(
- operation_id, configuration.server_index
- )
- else:
- index = host_index
- server_variables = configuration.server_operation_variables.get(
- operation_id, configuration.server_variables
- )
- host = configuration.get_host_from_settings(
- index, variables=server_variables, servers=servers
+ return parse(string).date()
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason="Failed to parse `{0}` as date object".format(string)
)
- except IndexError:
- if servers:
- raise ApiValueError(
- "Invalid host index. Must be 0 <= index < %s" %
- len(servers)
- )
- host = None
- return host
-
-
-class SerializedRequestBody(typing_extensions.TypedDict, total=False):
- body: typing.Union[str, bytes]
- fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
+ def __deserialize_datetime(self, string):
+ """Deserializes string to datetime.
-class RequestBody(StyleFormSerializer, JSONDetector):
- """
- A request body parameter
- content: content_type to MediaType Schema info
- """
- __json_encoder = JSONEncoder()
+ The string should be in iso8601 datetime format.
- def __init__(
- self,
- content: typing.Dict[str, MediaType],
- required: bool = False,
- ):
- self.required = required
- if len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
-
- def __serialize_json(
- self,
- in_data: typing.Any
- ) -> typing.Dict[str, bytes]:
- in_data = self.__json_encoder.default(in_data)
- json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode(
- "utf-8"
- )
- return dict(body=json_str)
-
- @staticmethod
- def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
- if isinstance(in_data, frozendict.frozendict):
- raise ValueError('Unable to serialize type frozendict.frozendict to text/plain')
- elif isinstance(in_data, tuple):
- raise ValueError('Unable to serialize type tuple to text/plain')
- elif isinstance(in_data, NoneClass):
- raise ValueError('Unable to serialize type NoneClass to text/plain')
- elif isinstance(in_data, BoolClass):
- raise ValueError('Unable to serialize type BoolClass to text/plain')
- return dict(body=str(in_data))
-
- def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
- json_value = self.__json_encoder.default(value)
- return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'})
-
- def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
- if isinstance(value, str):
- return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'})
- elif isinstance(value, bytes):
- return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'})
- elif isinstance(value, FileIO):
- request_field = RequestField(
- name=key,
- data=value.read(),
- filename=os.path.basename(value.name),
- headers={'Content-Type': 'application/octet-stream'}
+ :param string: str.
+ :return: datetime.
+ """
+ try:
+ return parse(string)
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as datetime object"
+ .format(string)
+ )
)
- value.close()
- return request_field
- else:
- return self.__multipart_json_item(key=key, value=value)
- def __serialize_multipart_form_data(
- self, in_data: Schema
- ) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data')
+ def __deserialize_enum(self, data, klass):
+ """Deserializes primitive type to enum.
+
+ :param data: primitive type.
+ :param klass: class literal.
+ :return: enum value.
"""
- In a multipart/form-data request body, each schema property, or each element of a schema array property,
- takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
- for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
+ try:
+ return klass(data)
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as `{1}`"
+ .format(data, klass)
+ )
+ )
- When passing in multipart types, boundaries MAY be used to separate sections of the content being
- transferred – thus, the following default Content-Types are defined for multipart:
+ def __deserialize_model(self, data, klass):
+ """Deserializes list or dict to model.
- If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
- If the property is complex, or an array of complex values, the default Content-Type is application/json
- Question: how is the array of primitives encoded?
- If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
- """
- fields = []
- for key, value in in_data.items():
- if isinstance(value, tuple):
- if value:
- # values use explode = True, so the code makes a RequestField for each item with name=key
- for item in value:
- request_field = self.__multipart_form_item(key=key, value=item)
- fields.append(request_field)
- else:
- # send an empty array as json because exploding will not send it
- request_field = self.__multipart_json_item(key=key, value=value)
- fields.append(request_field)
- else:
- request_field = self.__multipart_form_item(key=key, value=value)
- fields.append(request_field)
-
- return dict(fields=tuple(fields))
-
- def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
- if isinstance(in_data, bytes):
- return dict(body=in_data)
- # FileIO type
- result = dict(body=in_data.read())
- in_data.close()
- return result
-
- def __serialize_application_x_www_form_data(
- self, in_data: typing.Any
- ) -> SerializedRequestBody:
+ :param data: dict, list.
+ :param klass: class literal.
+ :return: model object.
"""
- POST submission of form data in body
- """
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(
- f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data')
- cast_in_data = self.__json_encoder.default(in_data)
- value = self._serialize_form(cast_in_data, name='', explode=True, percent_encode=True)
- return dict(body=value)
-
- def serialize(
- self, in_data: typing.Any, content_type: str
- ) -> SerializedRequestBody:
- """
- If a str is returned then the result will be assigned to data when making the request
- If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
- Return a tuple of
- The key of the return dict is
- - body for application/json
- - encode_multipart and fields for multipart/form-data
- """
- media_type = self.content[content_type]
- if isinstance(in_data, media_type.schema):
- cast_in_data = in_data
- elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
- cast_in_data = media_type.schema(**in_data)
- else:
- cast_in_data = media_type.schema(in_data)
- # TODO check for and use encoding if it exists
- # and content_type is multipart or application/x-www-form-urlencoded
- if self._content_type_is_json(content_type):
- return self.__serialize_json(cast_in_data)
- elif content_type == 'text/plain':
- return self.__serialize_text_plain(cast_in_data)
- elif content_type == 'multipart/form-data':
- return self.__serialize_multipart_form_data(cast_in_data)
- elif content_type == 'application/x-www-form-urlencoded':
- return self.__serialize_application_x_www_form_data(cast_in_data)
- elif content_type == 'application/octet-stream':
- return self.__serialize_application_octet_stream(cast_in_data)
- raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type))
+ return klass.from_dict(data)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/api_response.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/api_response.py
new file mode 100644
index 000000000..9bc7c11f6
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/api_response.py
@@ -0,0 +1,21 @@
+"""API response object."""
+
+from __future__ import annotations
+from typing import Optional, Generic, Mapping, TypeVar
+from pydantic import Field, StrictInt, StrictBytes, BaseModel
+
+T = TypeVar("T")
+
+class ApiResponse(BaseModel, Generic[T]):
+ """
+ API response object
+ """
+
+ status_code: StrictInt = Field(description="HTTP status code")
+ headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers")
+ data: T = Field(description="Deserialized data given the data type")
+ raw_data: StrictBytes = Field(description="Raw data (HTTP response body)")
+
+ model_config = {
+ "arbitrary_types_allowed": True
+ }
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/__init__.py
deleted file mode 100644
index 7840f7726..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints then import them from
-# tags, paths, or path_to_api, or tag_to_api
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/path_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/path_to_api.py
deleted file mode 100644
index 7488ae065..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/path_to_api.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.common.paths import PathValues
-from cloudharness_cli.common.apis.paths.sentry_getdsn_appname import SentryGetdsnAppname
-from cloudharness_cli.common.apis.paths.accounts_config import AccountsConfig
-
-PathToApi = typing_extensions.TypedDict(
- 'PathToApi',
- {
- PathValues.SENTRY_GETDSN_APPNAME: SentryGetdsnAppname,
- PathValues.ACCOUNTS_CONFIG: AccountsConfig,
- }
-)
-
-path_to_api = PathToApi(
- {
- PathValues.SENTRY_GETDSN_APPNAME: SentryGetdsnAppname,
- PathValues.ACCOUNTS_CONFIG: AccountsConfig,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/__init__.py
deleted file mode 100644
index 585cfa24d..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.common.apis.path_to_api import path_to_api
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/accounts_config.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/accounts_config.py
deleted file mode 100644
index b70c3dbda..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/accounts_config.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.common.paths.accounts_config.get import ApiForget
-
-
-class AccountsConfig(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/sentry_getdsn_appname.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/sentry_getdsn_appname.py
deleted file mode 100644
index 857659e20..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/paths/sentry_getdsn_appname.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.common.paths.sentry_getdsn_appname.get import ApiForget
-
-
-class SentryGetdsnAppname(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tag_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tag_to_api.py
deleted file mode 100644
index 89eb78ba9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tag_to_api.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.common.apis.tags import TagValues
-from cloudharness_cli.common.apis.tags.sentry_api import SentryApi
-from cloudharness_cli.common.apis.tags.accounts_api import AccountsApi
-
-TagToApi = typing_extensions.TypedDict(
- 'TagToApi',
- {
- TagValues.SENTRY: SentryApi,
- TagValues.ACCOUNTS: AccountsApi,
- }
-)
-
-tag_to_api = TagToApi(
- {
- TagValues.SENTRY: SentryApi,
- TagValues.ACCOUNTS: AccountsApi,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/__init__.py
deleted file mode 100644
index 402054b73..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.common.apis.tag_to_api import tag_to_api
-
-import enum
-
-
-class TagValues(str, enum.Enum):
- SENTRY = "Sentry"
- ACCOUNTS = "Accounts"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/accounts_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/accounts_api.py
deleted file mode 100644
index e7a2a8148..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/accounts_api.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# coding: utf-8
-
-"""
- CH common service API
-
- Cloud Harness Platform - Reference CH service API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.common.paths.accounts_config.get import GetConfig
-
-
-class AccountsApi(
- GetConfig,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/sentry_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/sentry_api.py
deleted file mode 100644
index 06d9a982b..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/apis/tags/sentry_api.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# coding: utf-8
-
-"""
- CH common service API
-
- Cloud Harness Platform - Reference CH service API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.common.paths.sentry_getdsn_appname.get import Getdsn
-
-
-class SentryApi(
- Getdsn,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/configuration.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/configuration.py
index 663a0f53d..e95302f84 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/configuration.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/configuration.py
@@ -3,69 +3,48 @@
"""
CH common service API
- Cloud Harness Platform - Reference CH service API # noqa: E501
+ Cloud Harness Platform - Reference CH service API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
import copy
import logging
+from logging import FileHandler
import multiprocessing
import sys
+from typing import Optional
import urllib3
-from http import client as http_client
-from cloudharness_cli.common.exceptions import ApiValueError
-
+import http.client as httplib
JSON_SCHEMA_VALIDATION_KEYWORDS = {
'multipleOf', 'maximum', 'exclusiveMaximum',
'minimum', 'exclusiveMinimum', 'maxLength',
- 'minLength', 'pattern', 'maxItems', 'minItems',
- 'uniqueItems', 'maxProperties', 'minProperties',
+ 'minLength', 'pattern', 'maxItems', 'minItems'
}
-class Configuration(object):
- """NOTE: This class is auto generated by OpenAPI Generator
+class Configuration:
+ """This class contains various settings of the API client.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
-
- :param host: Base url
+ :param host: Base url.
+ :param ignore_operation_servers
+ Boolean to ignore operation servers for the API client.
+ Config will use `host` as the base url regardless of the operation servers.
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
- :param api_key_prefix: Dict to store API prefix (e.g. Bearer)
+ :param api_key_prefix: Dict to store API prefix (e.g. Bearer).
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
- :param username: Username for HTTP basic authentication
- :param password: Password for HTTP basic authentication
- :param discard_unknown_keys: Boolean value indicating whether to discard
- unknown properties. A server may send a response that includes additional
- properties that are not known by the client in the following scenarios:
- 1. The OpenAPI document is incomplete, i.e. it does not match the server
- implementation.
- 2. The client was generated using an older version of the OpenAPI document
- and the server has been upgraded since then.
- If a schema in the OpenAPI document defines the additionalProperties attribute,
- then all undeclared properties received by the server are injected into the
- additional properties map. In that case, there are undeclared properties, and
- nothing to discard.
- :param disabled_client_side_validations (string): Comma-separated list of
- JSON schema validation keywords to disable JSON schema structural validation
- rules. The following keywords may be specified: multipleOf, maximum,
- exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
- maxItems, minItems.
- By default, the validation is performed for data generated locally by the client
- and data received from the server, independent of any validation performed by
- the server side. If the input data does not satisfy the JSON schema validation
- rules specified in the OpenAPI document, an exception is raised.
- If disabled_client_side_validations is set, structural validation is
- disabled. This can be useful to troubleshoot data validation problem, such as
- when the OpenAPI document validation rules do not match the actual API data
- received by the server.
+ :param username: Username for HTTP basic authentication.
+ :param password: Password for HTTP basic authentication.
+ :param access_token: Access token.
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
@@ -74,7 +53,11 @@ class Configuration(object):
configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
- The validation of enums is performed for variables with defined enum values before.
+ The validation of enums is performed for variables with defined enum
+ values before.
+ :param ssl_ca_cert: str - the path to a file of concatenated CA certificates
+ in PEM format.
+ :param retries: Number of retries for API requests.
"""
@@ -83,11 +66,15 @@ class Configuration(object):
def __init__(self, host=None,
api_key=None, api_key_prefix=None,
username=None, password=None,
- discard_unknown_keys=False,
- disabled_client_side_validations="",
+ access_token=None,
server_index=None, server_variables=None,
server_operation_index=None, server_operation_variables=None,
- ):
+ ignore_operation_servers=False,
+ ssl_ca_cert=None,
+ retries=None,
+ *,
+ debug: Optional[bool] = None
+ ) -> None:
"""Constructor
"""
self._base_path = "/api" if host is None else host
@@ -101,6 +88,9 @@ def __init__(self, host=None,
self.server_operation_variables = server_operation_variables or {}
"""Default server variables
"""
+ self.ignore_operation_servers = ignore_operation_servers
+ """Ignore operation servers
+ """
self.temp_folder_path = None
"""Temp file folder for downloading files
"""
@@ -124,8 +114,9 @@ def __init__(self, host=None,
self.password = password
"""Password for HTTP basic authentication
"""
- self.discard_unknown_keys = discard_unknown_keys
- self.disabled_client_side_validations = disabled_client_side_validations
+ self.access_token = access_token
+ """Access token
+ """
self.logger = {}
"""Logging Settings
"""
@@ -137,13 +128,16 @@ def __init__(self, host=None,
self.logger_stream_handler = None
"""Log stream handler
"""
- self.logger_file_handler = None
+ self.logger_file_handler: Optional[FileHandler] = None
"""Log file handler
"""
self.logger_file = None
"""Debug file location
"""
- self.debug = False
+ if debug is not None:
+ self.debug = debug
+ else:
+ self.__debug = False
"""Debug switch
"""
@@ -152,7 +146,7 @@ def __init__(self, host=None,
Set this to false to skip verifying SSL certificate when calling API
from https server.
"""
- self.ssl_ca_cert = None
+ self.ssl_ca_cert = ssl_ca_cert
"""Set this to customize the certificate file to verify the peer.
"""
self.cert_file = None
@@ -164,6 +158,10 @@ def __init__(self, host=None,
self.assert_hostname = None
"""Set this to True/False to enable/disable SSL hostname verification.
"""
+ self.tls_server_name = None
+ """SSL/TLS Server Name Indication (SNI)
+ Set this to the SNI value expected by the server.
+ """
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
"""urllib3 connection pool's maximum number of connections saved
@@ -173,7 +171,7 @@ def __init__(self, host=None,
cpu_count * 5 is used as default value to increase performance.
"""
- self.proxy = None
+ self.proxy: Optional[str] = None
"""Proxy URL
"""
self.proxy_headers = None
@@ -182,14 +180,23 @@ def __init__(self, host=None,
self.safe_chars_for_path_param = ''
"""Safe chars for path_param
"""
- self.retries = None
+ self.retries = retries
"""Adding retries to override urllib3 default value 3
"""
# Enable client side validation
self.client_side_validation = True
- # Options to pass down to the underlying urllib3 socket
self.socket_options = None
+ """Options to pass down to the underlying urllib3 socket
+ """
+
+ self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z"
+ """datetime format
+ """
+
+ self.date_format = "%Y-%m-%d"
+ """date format
+ """
def __deepcopy__(self, memo):
cls = self.__class__
@@ -207,13 +214,6 @@ def __deepcopy__(self, memo):
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
- if name == 'disabled_client_side_validations':
- s = set(filter(None, value.split(',')))
- for v in s:
- if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
- raise ApiValueError(
- "Invalid keyword: '{0}''".format(v))
- self._disabled_client_side_validations = s
@classmethod
def set_default(cls, default):
@@ -224,21 +224,31 @@ def set_default(cls, default):
:param default: object of Configuration
"""
- cls._default = copy.deepcopy(default)
+ cls._default = default
@classmethod
def get_default_copy(cls):
- """Return new instance of configuration.
+ """Deprecated. Please use `get_default` instead.
+
+ Deprecated. Please use `get_default` instead.
+
+ :return: The configuration object.
+ """
+ return cls.get_default()
+
+ @classmethod
+ def get_default(cls):
+ """Return the default configuration.
This method returns newly created, based on default constructor,
object of Configuration class or returns a copy of default
- configuration passed by the set_default method.
+ configuration.
:return: The configuration object.
"""
- if cls._default is not None:
- return copy.deepcopy(cls._default)
- return Configuration()
+ if cls._default is None:
+ cls._default = Configuration()
+ return cls._default
@property
def logger_file(self):
@@ -292,15 +302,15 @@ def debug(self, value):
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
- # turn on http_client debug
- http_client.HTTPConnection.debuglevel = 1
+ # turn on httplib debug
+ httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
- # turn off http_client debug
- http_client.HTTPConnection.debuglevel = 0
+ # turn off httplib debug
+ httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/exceptions.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/exceptions.py
index e0d9c24ad..921d7be4b 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/exceptions.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/exceptions.py
@@ -3,12 +3,16 @@
"""
CH common service API
- Cloud Harness Platform - Reference CH service API # noqa: E501
+ Cloud Harness Platform - Reference CH service API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+from typing import Any, Optional
+from typing_extensions import Self
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
@@ -16,7 +20,7 @@ class OpenApiException(Exception):
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
- key_type=None):
+ key_type=None) -> None:
""" Raises an exception for TypeErrors
Args:
@@ -44,7 +48,7 @@ def __init__(self, msg, path_to_item=None, valid_classes=None,
class ApiValueError(OpenApiException, ValueError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -62,7 +66,7 @@ def __init__(self, msg, path_to_item=None):
class ApiAttributeError(OpenApiException, AttributeError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Raised when an attribute reference or assignment fails.
@@ -81,7 +85,7 @@ def __init__(self, msg, path_to_item=None):
class ApiKeyError(OpenApiException, KeyError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -99,17 +103,56 @@ def __init__(self, msg, path_to_item=None):
class ApiException(OpenApiException):
- def __init__(self, status=None, reason=None, api_response: 'cloudharness_cli.common.api_client.ApiResponse' = None):
- if api_response:
- self.status = api_response.response.status
- self.reason = api_response.response.reason
- self.body = api_response.response.data
- self.headers = api_response.response.getheaders()
- else:
- self.status = status
- self.reason = reason
- self.body = None
- self.headers = None
+ def __init__(
+ self,
+ status=None,
+ reason=None,
+ http_resp=None,
+ *,
+ body: Optional[str] = None,
+ data: Optional[Any] = None,
+ ) -> None:
+ self.status = status
+ self.reason = reason
+ self.body = body
+ self.data = data
+ self.headers = None
+
+ if http_resp:
+ if self.status is None:
+ self.status = http_resp.status
+ if self.reason is None:
+ self.reason = http_resp.reason
+ if self.body is None:
+ try:
+ self.body = http_resp.data.decode('utf-8')
+ except Exception:
+ pass
+ self.headers = http_resp.getheaders()
+
+ @classmethod
+ def from_response(
+ cls,
+ *,
+ http_resp,
+ body: Optional[str],
+ data: Optional[Any],
+ ) -> Self:
+ if http_resp.status == 400:
+ raise BadRequestException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 401:
+ raise UnauthorizedException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 403:
+ raise ForbiddenException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 404:
+ raise NotFoundException(http_resp=http_resp, body=body, data=data)
+
+ if 500 <= http_resp.status <= 599:
+ raise ServiceException(http_resp=http_resp, body=body, data=data)
+ raise ApiException(http_resp=http_resp, body=body, data=data)
def __str__(self):
"""Custom error messages for exception"""
@@ -119,12 +162,32 @@ def __str__(self):
error_message += "HTTP response headers: {0}\n".format(
self.headers)
- if self.body:
- error_message += "HTTP response body: {0}\n".format(self.body)
+ if self.data or self.body:
+ error_message += "HTTP response body: {0}\n".format(self.data or self.body)
return error_message
+class BadRequestException(ApiException):
+ pass
+
+
+class NotFoundException(ApiException):
+ pass
+
+
+class UnauthorizedException(ApiException):
+ pass
+
+
+class ForbiddenException(ApiException):
+ pass
+
+
+class ServiceException(ApiException):
+ pass
+
+
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/model/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/model/__init__.py
deleted file mode 100644
index 43d3a937d..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/model/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# we can not import model classes here because that would create a circular
-# reference which would not work in python2
-# do not import all models into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all models from one package, import them with
-# from cloudharness_cli.common.models import ModelA, ModelB
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/models/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/models/__init__.py
index 181123a3f..c7a30be14 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/models/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/models/__init__.py
@@ -1,13 +1,18 @@
# coding: utf-8
# flake8: noqa
+"""
+ CH common service API
-# import all models into this package
-# if you have many models here with many references from one model to another this may
-# raise a RecursionError
-# to avoid this, import only the models that you directly need like:
-# from from cloudharness_cli.common.model.pet import Pet
-# or import this package, but before doing it, use:
-# import sys
-# sys.setrecursionlimit(n)
+ Cloud Harness Platform - Reference CH service API
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+# import models into model package
+from cloudharness_cli.common.models.app_version import AppVersion
+from cloudharness_cli.common.models.get_config200_response import GetConfig200Response
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/models/app_version.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/models/app_version.py
new file mode 100644
index 000000000..1ab39844c
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/models/app_version.py
@@ -0,0 +1,89 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing import Optional, Set
+from typing_extensions import Self
+
+class AppVersion(BaseModel):
+ """
+
+ """ # noqa: E501
+ build: Optional[StrictStr] = None
+ tag: Optional[StrictStr] = None
+ __properties: ClassVar[List[str]] = ["build", "tag"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of AppVersion from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of AppVersion from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "build": obj.get("build"),
+ "tag": obj.get("tag")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/models/get_config200_response.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/models/get_config200_response.py
new file mode 100644
index 000000000..25277ad23
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/models/get_config200_response.py
@@ -0,0 +1,91 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing import Optional, Set
+from typing_extensions import Self
+
+class GetConfig200Response(BaseModel):
+ """
+ GetConfig200Response
+ """ # noqa: E501
+ url: Optional[StrictStr] = Field(default=None, description="The auth URL.")
+ realm: Optional[StrictStr] = Field(default=None, description="The realm.")
+ client_id: Optional[StrictStr] = Field(default=None, description="The clientID.", alias="clientId")
+ __properties: ClassVar[List[str]] = ["url", "realm", "clientId"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of GetConfig200Response from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of GetConfig200Response from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "url": obj.get("url"),
+ "realm": obj.get("realm"),
+ "clientId": obj.get("clientId")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/__init__.py
deleted file mode 100644
index 4efcd37ea..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.common.apis.path_to_api import path_to_api
-
-import enum
-
-
-class PathValues(str, enum.Enum):
- SENTRY_GETDSN_APPNAME = "/sentry/getdsn/{appname}"
- ACCOUNTS_CONFIG = "/accounts/config"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/__init__.py
deleted file mode 100644
index 62417d82c..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.common.paths.accounts_config import Api
-
-from cloudharness_cli.common.paths import PathValues
-
-path = PathValues.ACCOUNTS_CONFIG
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.py
deleted file mode 100644
index 65b8b3bb6..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.common import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.common import schemas # noqa: F401
-
-from . import path
-
-
-
-class SchemaFor200ResponseBodyApplicationJson(
- schemas.DictSchema
-):
-
-
- class MetaOapg:
-
- class properties:
- url = schemas.StrSchema
- realm = schemas.StrSchema
- clientId = schemas.StrSchema
- __annotations__ = {
- "url": url,
- "realm": realm,
- "clientId": clientId,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["url"]) -> MetaOapg.properties.url: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["realm"]) -> MetaOapg.properties.realm: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["clientId"]) -> MetaOapg.properties.clientId: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["url", "realm", "clientId", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["url"]) -> typing.Union[MetaOapg.properties.url, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["realm"]) -> typing.Union[MetaOapg.properties.realm, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["clientId"]) -> typing.Union[MetaOapg.properties.clientId, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["url", "realm", "clientId", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- url: typing.Union[MetaOapg.properties.url, str, schemas.Unset] = schemas.unset,
- realm: typing.Union[MetaOapg.properties.realm, str, schemas.Unset] = schemas.unset,
- clientId: typing.Union[MetaOapg.properties.clientId, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'SchemaFor200ResponseBodyApplicationJson':
- return super().__new__(
- cls,
- *args,
- url=url,
- realm=realm,
- clientId=clientId,
- _configuration=_configuration,
- **kwargs,
- )
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_status_code_to_response = {
- '200': _response_for_200,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_config_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_config_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_config_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_config_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Gets the config for logging in into accounts
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetConfig(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_config(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_config(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_config(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_config(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_config_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_config_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.pyi
deleted file mode 100644
index 6ebea5ea9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/accounts_config/get.pyi
+++ /dev/null
@@ -1,297 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.common import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.common import schemas # noqa: F401
-
-
-
-class SchemaFor200ResponseBodyApplicationJson(
- schemas.DictSchema
-):
-
-
- class MetaOapg:
-
- class properties:
- url = schemas.StrSchema
- realm = schemas.StrSchema
- clientId = schemas.StrSchema
- __annotations__ = {
- "url": url,
- "realm": realm,
- "clientId": clientId,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["url"]) -> MetaOapg.properties.url: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["realm"]) -> MetaOapg.properties.realm: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["clientId"]) -> MetaOapg.properties.clientId: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["url", "realm", "clientId", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["url"]) -> typing.Union[MetaOapg.properties.url, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["realm"]) -> typing.Union[MetaOapg.properties.realm, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["clientId"]) -> typing.Union[MetaOapg.properties.clientId, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["url", "realm", "clientId", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- url: typing.Union[MetaOapg.properties.url, str, schemas.Unset] = schemas.unset,
- realm: typing.Union[MetaOapg.properties.realm, str, schemas.Unset] = schemas.unset,
- clientId: typing.Union[MetaOapg.properties.clientId, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'SchemaFor200ResponseBodyApplicationJson':
- return super().__new__(
- cls,
- *args,
- url=url,
- realm=realm,
- clientId=clientId,
- _configuration=_configuration,
- **kwargs,
- )
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_config_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_config_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_config_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_config_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Gets the config for logging in into accounts
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetConfig(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_config(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_config(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_config(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_config(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_config_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_config_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/__init__.py
deleted file mode 100644
index e4d361b93..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.common.paths.sentry_getdsn_appname import Api
-
-from cloudharness_cli.common.paths import PathValues
-
-path = PathValues.SENTRY_GETDSN_APPNAME
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.py
deleted file mode 100644
index b063200ea..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.py
+++ /dev/null
@@ -1,287 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.common import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.common import schemas # noqa: F401
-
-from . import path
-
-# Path params
-AppnameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'appname': typing.Union[AppnameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_appname = api_client.PathParameter(
- name="appname",
- style=api_client.ParameterStyle.SIMPLE,
- schema=AppnameSchema,
- required=True,
-)
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_status_code_to_response = {
- '200': _response_for_200,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _getdsn_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _getdsn_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _getdsn_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _getdsn_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Gets the Sentry DSN for a given application
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_appname,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class Getdsn(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def getdsn(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def getdsn(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def getdsn(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def getdsn(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._getdsn_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._getdsn_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.pyi
deleted file mode 100644
index e7655b5cc..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/paths/sentry_getdsn_appname/get.pyi
+++ /dev/null
@@ -1,282 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.common import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.common import schemas # noqa: F401
-
-# Path params
-AppnameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'appname': typing.Union[AppnameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_appname = api_client.PathParameter(
- name="appname",
- style=api_client.ParameterStyle.SIMPLE,
- schema=AppnameSchema,
- required=True,
-)
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _getdsn_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _getdsn_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _getdsn_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _getdsn_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Gets the Sentry DSN for a given application
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_appname,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class Getdsn(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def getdsn(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def getdsn(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def getdsn(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def getdsn(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._getdsn_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._getdsn_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/test/common/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/py.typed
similarity index 100%
rename from libraries/client/cloudharness_cli/test/common/__init__.py
rename to libraries/client/cloudharness_cli/cloudharness_cli/common/py.typed
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/rest.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/rest.py
index 198f23c67..13170ca47 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/rest.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/common/rest.py
@@ -3,34 +3,66 @@
"""
CH common service API
- Cloud Harness Platform - Reference CH service API # noqa: E501
+ Cloud Harness Platform - Reference CH service API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
-import logging
+import io
+import json
+import re
import ssl
-from urllib.parse import urlencode
-import typing
-import certifi
import urllib3
-from urllib3._collections import HTTPHeaderDict
from cloudharness_cli.common.exceptions import ApiException, ApiValueError
+SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"}
+RESTResponseType = urllib3.HTTPResponse
+
+
+def is_socks_proxy_url(url):
+ if url is None:
+ return False
+ split_section = url.split("://")
+ if len(split_section) < 2:
+ return False
+ else:
+ return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES
+
+
+class RESTResponse(io.IOBase):
+
+ def __init__(self, resp) -> None:
+ self.response = resp
+ self.status = resp.status
+ self.reason = resp.reason
+ self.data = None
+
+ def read(self):
+ if self.data is None:
+ self.data = self.response.data
+ return self.data
+
+ def getheaders(self):
+ """Returns a dictionary of the response headers."""
+ return self.response.headers
-logger = logging.getLogger(__name__)
+ def getheader(self, name, default=None):
+ """Returns a given response header."""
+ return self.response.headers.get(name, default)
-class RESTClientObject(object):
+class RESTClientObject:
- def __init__(self, configuration, pools_size=4, maxsize=None):
+ def __init__(self, configuration) -> None:
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
- # maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
@@ -39,140 +71,167 @@ def __init__(self, configuration, pools_size=4, maxsize=None):
else:
cert_reqs = ssl.CERT_NONE
- # ca_certs
- if configuration.ssl_ca_cert:
- ca_certs = configuration.ssl_ca_cert
- else:
- # if not set certificate file, use Mozilla's root certificates.
- ca_certs = certifi.where()
-
- addition_pool_args = {}
+ pool_args = {
+ "cert_reqs": cert_reqs,
+ "ca_certs": configuration.ssl_ca_cert,
+ "cert_file": configuration.cert_file,
+ "key_file": configuration.key_file,
+ }
if configuration.assert_hostname is not None:
- addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
+ pool_args['assert_hostname'] = (
+ configuration.assert_hostname
+ )
if configuration.retries is not None:
- addition_pool_args['retries'] = configuration.retries
+ pool_args['retries'] = configuration.retries
+
+ if configuration.tls_server_name:
+ pool_args['server_hostname'] = configuration.tls_server_name
+
if configuration.socket_options is not None:
- addition_pool_args['socket_options'] = configuration.socket_options
+ pool_args['socket_options'] = configuration.socket_options
- if maxsize is None:
- if configuration.connection_pool_maxsize is not None:
- maxsize = configuration.connection_pool_maxsize
- else:
- maxsize = 4
+ if configuration.connection_pool_maxsize is not None:
+ pool_args['maxsize'] = configuration.connection_pool_maxsize
# https pool manager
+ self.pool_manager: urllib3.PoolManager
+
if configuration.proxy:
- self.pool_manager = urllib3.ProxyManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- proxy_url=configuration.proxy,
- proxy_headers=configuration.proxy_headers,
- **addition_pool_args
- )
+ if is_socks_proxy_url(configuration.proxy):
+ from urllib3.contrib.socks import SOCKSProxyManager
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["headers"] = configuration.proxy_headers
+ self.pool_manager = SOCKSProxyManager(**pool_args)
+ else:
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["proxy_headers"] = configuration.proxy_headers
+ self.pool_manager = urllib3.ProxyManager(**pool_args)
else:
- self.pool_manager = urllib3.PoolManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- **addition_pool_args
- )
+ self.pool_manager = urllib3.PoolManager(**pool_args)
def request(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, typing.Any], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
+ method,
+ url,
+ headers=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ):
"""Perform requests.
:param method: http request method
:param url: http request url
:param headers: http request headers
- :param body: request body, for other types
- :param fields: request parameters for
- `application/x-www-form-urlencoded`
- or `multipart/form-data`
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Default is False.
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
+ :param body: request json body, for `application/json`
+ :param post_params: request post parameters,
+ `application/x-www-form-urlencoded`
+ and `multipart/form-data`
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
"""
method = method.upper()
- assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
- 'PATCH', 'OPTIONS']
-
- if fields and body:
+ assert method in [
+ 'GET',
+ 'HEAD',
+ 'DELETE',
+ 'POST',
+ 'PUT',
+ 'PATCH',
+ 'OPTIONS'
+ ]
+
+ if post_params and body:
raise ApiValueError(
- "body parameter cannot be used with fields parameter."
+ "body parameter cannot be used with post_params parameter."
)
- fields = fields or {}
+ post_params = post_params or {}
headers = headers or {}
- if timeout:
- if isinstance(timeout, (int, float)): # noqa: E501,F821
- timeout = urllib3.Timeout(total=timeout)
- elif (isinstance(timeout, tuple) and
- len(timeout) == 2):
- timeout = urllib3.Timeout(connect=timeout[0], read=timeout[1])
+ timeout = None
+ if _request_timeout:
+ if isinstance(_request_timeout, (int, float)):
+ timeout = urllib3.Timeout(total=_request_timeout)
+ elif (
+ isinstance(_request_timeout, tuple)
+ and len(_request_timeout) == 2
+ ):
+ timeout = urllib3.Timeout(
+ connect=_request_timeout[0],
+ read=_request_timeout[1]
+ )
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
- if 'Content-Type' not in headers and body is None:
+
+ # no content type provided or payload is json
+ content_type = headers.get('Content-Type')
+ if (
+ not content_type
+ or re.search('json', content_type, re.IGNORECASE)
+ ):
+ request_body = None
+ if body is not None:
+ request_body = json.dumps(body)
r = self.pool_manager.request(
method,
url,
- preload_content=not stream,
+ body=request_body,
timeout=timeout,
- headers=headers
+ headers=headers,
+ preload_content=False
)
- elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
+ elif content_type == 'application/x-www-form-urlencoded':
r = self.pool_manager.request(
- method, url,
- body=body,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=False,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
- elif headers['Content-Type'] == 'multipart/form-data':
+ headers=headers,
+ preload_content=False
+ )
+ elif content_type == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
+ # Ensures that dict objects are serialized
+ post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params]
r = self.pool_manager.request(
- method, url,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=True,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
+ headers=headers,
+ preload_content=False
+ )
# Pass a `string` parameter directly in the body to support
- # other content types than Json when `body` argument is
- # provided in serialized form
+ # other content types than JSON when `body` argument is
+ # provided in serialized form.
elif isinstance(body, str) or isinstance(body, bytes):
- request_body = body
r = self.pool_manager.request(
- method, url,
+ method,
+ url,
+ body=body,
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
+ elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool):
+ request_body = "true" if body else "false"
+ r = self.pool_manager.request(
+ method,
+ url,
body=request_body,
- preload_content=not stream,
+ preload_content=False,
timeout=timeout,
headers=headers)
else:
@@ -183,72 +242,16 @@ def request(
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
- r = self.pool_manager.request(method, url,
- preload_content=not stream,
- timeout=timeout,
- headers=headers)
+ r = self.pool_manager.request(
+ method,
+ url,
+ fields={},
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
except urllib3.exceptions.SSLError as e:
- msg = "{0}\n{1}".format(type(e).__name__, str(e))
+ msg = "\n".join([type(e).__name__, str(e)])
raise ApiException(status=0, reason=msg)
- if not stream:
- # log response body
- logger.debug("response body: %s", r.data)
-
- return r
-
- def GET(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("GET", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def HEAD(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("HEAD", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def OPTIONS(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("OPTIONS", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def DELETE(self, url, headers=None, body=None,
- stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("DELETE", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def POST(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("POST", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PUT(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PUT", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PATCH(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PATCH", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
+ return RESTResponse(r)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/common/schemas.py b/libraries/client/cloudharness_cli/cloudharness_cli/common/schemas.py
deleted file mode 100644
index 18c6f8f23..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/common/schemas.py
+++ /dev/null
@@ -1,2462 +0,0 @@
-# coding: utf-8
-
-"""
- CH common service API
-
- Cloud Harness Platform - Reference CH service API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from collections import defaultdict
-from datetime import date, datetime, timedelta # noqa: F401
-import functools
-import decimal
-import io
-import re
-import types
-import typing
-import uuid
-
-from dateutil.parser.isoparser import isoparser, _takes_ascii
-import frozendict
-
-from cloudharness_cli.common.exceptions import (
- ApiTypeError,
- ApiValueError,
-)
-from cloudharness_cli.common.configuration import (
- Configuration,
-)
-
-
-class Unset(object):
- """
- An instance of this class is set as the default value for object type(dict) properties that are optional
- When a property has an unset value, that property will not be assigned in the dict
- """
- pass
-
-unset = Unset()
-
-none_type = type(None)
-file_type = io.IOBase
-
-
-class FileIO(io.FileIO):
- """
- A class for storing files
- Note: this class is not immutable
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]):
- if isinstance(arg, (io.FileIO, io.BufferedReader)):
- if arg.closed:
- raise ApiValueError('Invalid file state; file is closed and must be open')
- arg.close()
- inst = super(FileIO, cls).__new__(cls, arg.name)
- super(FileIO, inst).__init__(arg.name)
- return inst
- raise ApiValueError('FileIO must be passed arg which contains the open file')
-
- def __init__(self, arg: typing.Union[io.FileIO, io.BufferedReader]):
- pass
-
-
-def update(d: dict, u: dict):
- """
- Adds u to d
- Where each dict is defaultdict(set)
- """
- if not u:
- return d
- for k, v in u.items():
- if k not in d:
- d[k] = v
- else:
- d[k] = d[k] | v
-
-
-class ValidationMetadata(frozendict.frozendict):
- """
- A class storing metadata that is needed to validate OpenApi Schema payloads
- """
- def __new__(
- cls,
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
- from_server: bool = False,
- configuration: typing.Optional[Configuration] = None,
- seen_classes: typing.FrozenSet[typing.Type] = frozenset(),
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]] = frozendict.frozendict()
- ):
- """
- Args:
- path_to_item: the path to the current data being instantiated.
- For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0)
- This changes from location to location
- from_server: whether or not this data came form the server
- True when receiving server data
- False when instantiating model with client side data not form the server
- This does not change from location to location
- configuration: the Configuration instance to use
- This is needed because in Configuration:
- - one can disable validation checking
- This does not change from location to location
- seen_classes: when deserializing data that matches multiple schemas, this is used to store
- the schemas that have been traversed. This is used to stop processing when a cycle is seen.
- This changes from location to location
- validated_path_to_schemas: stores the already validated schema classes for a given path location
- This does not change from location to location
- """
- return super().__new__(
- cls,
- path_to_item=path_to_item,
- from_server=from_server,
- configuration=configuration,
- seen_classes=seen_classes,
- validated_path_to_schemas=validated_path_to_schemas
- )
-
- def validation_ran_earlier(self, cls: type) -> bool:
- validated_schemas = self.validated_path_to_schemas.get(self.path_to_item, set())
- validation_ran_earlier = validated_schemas and cls in validated_schemas
- if validation_ran_earlier:
- return True
- if cls in self.seen_classes:
- return True
- return False
-
- @property
- def path_to_item(self) -> typing.Tuple[typing.Union[str, int], ...]:
- return self.get('path_to_item')
-
- @property
- def from_server(self) -> bool:
- return self.get('from_server')
-
- @property
- def configuration(self) -> typing.Optional[Configuration]:
- return self.get('configuration')
-
- @property
- def seen_classes(self) -> typing.FrozenSet[typing.Type]:
- return self.get('seen_classes')
-
- @property
- def validated_path_to_schemas(self) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]]:
- return self.get('validated_path_to_schemas')
-
-
-class Singleton:
- """
- Enums and singletons are the same
- The same instance is returned for a given key of (cls, arg)
- """
- _instances = {}
-
- def __new__(cls, arg: typing.Any, **kwargs):
- """
- cls base classes: BoolClass, NoneClass, str, decimal.Decimal
- The 3rd key is used in the tuple below for a corner case where an enum contains integer 1
- However 1.0 can also be ingested into that enum schema because 1.0 == 1 and
- Decimal('1.0') == Decimal('1')
- But if we omitted the 3rd value in the key, then Decimal('1.0') would be stored as Decimal('1')
- and json serializing that instance would be '1' rather than the expected '1.0'
- Adding the 3rd value, the str of arg ensures that 1.0 -> Decimal('1.0') which is serialized as 1.0
- """
- key = (cls, arg, str(arg))
- if key not in cls._instances:
- if isinstance(arg, (none_type, bool, BoolClass, NoneClass)):
- inst = super().__new__(cls)
- cls._instances[key] = inst
- else:
- cls._instances[key] = super().__new__(cls, arg)
- return cls._instances[key]
-
- def __repr__(self):
- if isinstance(self, NoneClass):
- return f'<{self.__class__.__name__}: None>'
- elif isinstance(self, BoolClass):
- if bool(self):
- return f'<{self.__class__.__name__}: True>'
- return f'<{self.__class__.__name__}: False>'
- return f'<{self.__class__.__name__}: {super().__repr__()}>'
-
-
-class classproperty:
-
- def __init__(self, fget):
- self.fget = fget
-
- def __get__(self, owner_self, owner_cls):
- return self.fget(owner_cls)
-
-
-class NoneClass(Singleton):
- @classproperty
- def NONE(cls):
- return cls(None)
-
- def __bool__(self) -> bool:
- return False
-
-
-class BoolClass(Singleton):
- @classproperty
- def TRUE(cls):
- return cls(True)
-
- @classproperty
- def FALSE(cls):
- return cls(False)
-
- @functools.lru_cache()
- def __bool__(self) -> bool:
- for key, instance in self._instances.items():
- if self is instance:
- return bool(key[1])
- raise ValueError('Unable to find the boolean value of this instance')
-
-
-class MetaOapgTyped:
- exclusive_maximum: typing.Union[int, float]
- inclusive_maximum: typing.Union[int, float]
- exclusive_minimum: typing.Union[int, float]
- inclusive_minimum: typing.Union[int, float]
- max_items: int
- min_items: int
- discriminator: typing.Dict[str, typing.Dict[str, typing.Type['Schema']]]
-
-
- class properties:
- # to hold object properties
- pass
-
- additional_properties: typing.Optional[typing.Type['Schema']]
- max_properties: int
- min_properties: int
- all_of: typing.List[typing.Type['Schema']]
- one_of: typing.List[typing.Type['Schema']]
- any_of: typing.List[typing.Type['Schema']]
- not_schema: typing.Type['Schema']
- max_length: int
- min_length: int
- items: typing.Type['Schema']
-
-
-class Schema:
- """
- the base class of all swagger/openapi schemas/models
- """
- __inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict.frozendict, FileIO, bytes, BoolClass, NoneClass}
- _types: typing.Set[typing.Type]
- MetaOapg = MetaOapgTyped
-
- @staticmethod
- def __get_valid_classes_phrase(input_classes):
- """Returns a string phrase describing what types are allowed"""
- all_classes = list(input_classes)
- all_classes = sorted(all_classes, key=lambda cls: cls.__name__)
- all_class_names = [cls.__name__ for cls in all_classes]
- if len(all_class_names) == 1:
- return "is {0}".format(all_class_names[0])
- return "is one of [{0}]".format(", ".join(all_class_names))
-
- @staticmethod
- def _get_class_oapg(item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type['Schema']]) -> typing.Type['Schema']:
- if isinstance(item_cls, types.FunctionType):
- # referenced schema
- return item_cls()
- elif isinstance(item_cls, staticmethod):
- # referenced schema
- return item_cls.__func__()
- return item_cls
-
- @classmethod
- def __type_error_message(
- cls, var_value=None, var_name=None, valid_classes=None, key_type=None
- ):
- """
- Keyword Args:
- var_value (any): the variable which has the type_error
- var_name (str): the name of the variable which has the typ error
- valid_classes (tuple): the accepted classes for current_item's
- value
- key_type (bool): False if our value is a value in a dict
- True if it is a key in a dict
- False if our item is an item in a tuple
- """
- key_or_value = "value"
- if key_type:
- key_or_value = "key"
- valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes)
- msg = "Invalid type. Required {1} type {2} and " "passed type was {3}".format(
- var_name,
- key_or_value,
- valid_classes_phrase,
- type(var_value).__name__,
- )
- return msg
-
- @classmethod
- def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False):
- error_msg = cls.__type_error_message(
- var_name=path_to_item[-1],
- var_value=var_value,
- valid_classes=valid_classes,
- key_type=key_type,
- )
- return ApiTypeError(
- error_msg,
- path_to_item=path_to_item,
- valid_classes=valid_classes,
- key_type=key_type,
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- Schema _validate_oapg
- All keyword validation except for type checking was done in calling stack frames
- If those validations passed, the validated classes are collected in path_to_schemas
-
- Returns:
- path_to_schemas: a map of path to schemas
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- base_class = type(arg)
- if base_class not in cls._types:
- raise cls.__get_type_error(
- arg,
- validation_metadata.path_to_item,
- cls._types,
- key_type=False,
- )
-
- path_to_schemas = {validation_metadata.path_to_item: set()}
- path_to_schemas[validation_metadata.path_to_item].add(cls)
- path_to_schemas[validation_metadata.path_to_item].add(base_class)
- return path_to_schemas
-
- @staticmethod
- def _process_schema_classes_oapg(
- schema_classes: typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]
- ):
- """
- Processes and mutates schema_classes
- If a SomeSchema is a subclass of DictSchema then remove DictSchema because it is already included
- """
- if len(schema_classes) < 2:
- return
- if len(schema_classes) > 2 and UnsetAnyTypeSchema in schema_classes:
- schema_classes.remove(UnsetAnyTypeSchema)
- x_schema = schema_type_classes & schema_classes
- if not x_schema:
- return
- x_schema = x_schema.pop()
- if any(c is not x_schema and issubclass(c, x_schema) for c in schema_classes):
- # needed to not have a mro error in get_new_class
- schema_classes.remove(x_schema)
-
- @classmethod
- def __get_new_cls(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]:
- """
- Make a new dynamic class and return an instance of that class
- We are making an instance of cls, but instead of making cls
- make a new class, new_cls
- which includes dynamic bases including cls
- return an instance of that new class
-
- Dict property + List Item Assignment Use cases:
- 1. value is NOT an instance of the required schema class
- the value is validated by _validate_oapg
- _validate_oapg returns a key value pair
- where the key is the path to the item, and the value will be the required manufactured class
- made out of the matching schemas
- 2. value is an instance of the the correct schema type
- the value is NOT validated by _validate_oapg, _validate_oapg only checks that the instance is of the correct schema type
- for this value, _validate_oapg does NOT return an entry for it in _path_to_schemas
- and in list/dict _get_items_oapg,_get_properties_oapg the value will be directly assigned
- because value is of the correct type, and validation was run earlier when the instance was created
- """
- _path_to_schemas = {}
- if validation_metadata.validated_path_to_schemas:
- update(_path_to_schemas, validation_metadata.validated_path_to_schemas)
- if not validation_metadata.validation_ran_earlier(cls):
- other_path_to_schemas = cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- # loop through it make a new class for each entry
- # do not modify the returned result because it is cached and we would be modifying the cached value
- path_to_schemas = {}
- for path, schema_classes in _path_to_schemas.items():
- """
- Use cases
- 1. N number of schema classes + enum + type != bool/None, classes in path_to_schemas: tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- needs Singleton added
- 2. N number of schema classes + enum + type == bool/None, classes in path_to_schemas: BoolClass/NoneClass
- Singleton already added
- 3. N number of schema classes, classes in path_to_schemas: BoolClass/NoneClass/tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- """
- cls._process_schema_classes_oapg(schema_classes)
- enum_schema = any(
- issubclass(this_cls, EnumBase) for this_cls in schema_classes)
- inheritable_primitive_type = schema_classes.intersection(cls.__inheritable_primitive_types_set)
- chosen_schema_classes = schema_classes - inheritable_primitive_type
- suffix = tuple(inheritable_primitive_type)
- if enum_schema and suffix[0] not in {NoneClass, BoolClass}:
- suffix = (Singleton,) + suffix
-
- used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix
- mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes)
- path_to_schemas[path] = mfg_cls
-
- return path_to_schemas
-
- @classmethod
- def _get_new_instance_without_conversion_oapg(
- cls,
- arg: typing.Any,
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- # We have a Dynamic class and we are making an instance of it
- if issubclass(cls, frozendict.frozendict) and issubclass(cls, DictBase):
- properties = cls._get_properties_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, properties)
- elif issubclass(cls, tuple) and issubclass(cls, ListBase):
- items = cls._get_items_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, items)
- """
- str = openapi str, date, and datetime
- decimal.Decimal = openapi int and float
- FileIO = openapi binary type and the user inputs a file
- bytes = openapi binary type and the user inputs bytes
- """
- return super(Schema, cls).__new__(cls, arg)
-
- @classmethod
- def from_openapi_data_oapg(
- cls,
- arg: typing.Union[
- str,
- date,
- datetime,
- int,
- float,
- decimal.Decimal,
- bool,
- None,
- 'Schema',
- dict,
- frozendict.frozendict,
- tuple,
- list,
- io.FileIO,
- io.BufferedReader,
- bytes
- ],
- _configuration: typing.Optional[Configuration]
- ):
- """
- Schema from_openapi_data_oapg
- """
- from_server = True
- validated_path_to_schemas = {}
- arg = cast_to_allowed_types(arg, from_server, validated_path_to_schemas)
- validation_metadata = ValidationMetadata(
- from_server=from_server, configuration=_configuration, validated_path_to_schemas=validated_path_to_schemas)
- path_to_schemas = cls.__get_new_cls(arg, validation_metadata)
- new_cls = path_to_schemas[validation_metadata.path_to_item]
- new_inst = new_cls._get_new_instance_without_conversion_oapg(
- arg,
- validation_metadata.path_to_item,
- path_to_schemas
- )
- return new_inst
-
- @staticmethod
- def __get_input_dict(*args, **kwargs) -> frozendict.frozendict:
- input_dict = {}
- if args and isinstance(args[0], (dict, frozendict.frozendict)):
- input_dict.update(args[0])
- if kwargs:
- input_dict.update(kwargs)
- return frozendict.frozendict(input_dict)
-
- @staticmethod
- def __remove_unsets(kwargs):
- return {key: val for key, val in kwargs.items() if val is not unset}
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]):
- """
- Schema __new__
-
- Args:
- args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): the value
- kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): dict values
- _configuration: contains the Configuration that enables json schema validation keywords
- like minItems, minLength etc
-
- Note: double underscores are used here because pycharm thinks that these variables
- are instance properties if they are named normally :(
- """
- __kwargs = cls.__remove_unsets(kwargs)
- if not args and not __kwargs:
- raise TypeError(
- 'No input given. args or kwargs must be given.'
- )
- if not __kwargs and args and not isinstance(args[0], dict):
- __arg = args[0]
- else:
- __arg = cls.__get_input_dict(*args, **__kwargs)
- __from_server = False
- __validated_path_to_schemas = {}
- __arg = cast_to_allowed_types(
- __arg, __from_server, __validated_path_to_schemas)
- __validation_metadata = ValidationMetadata(
- configuration=_configuration, from_server=__from_server, validated_path_to_schemas=__validated_path_to_schemas)
- __path_to_schemas = cls.__get_new_cls(__arg, __validation_metadata)
- __new_cls = __path_to_schemas[__validation_metadata.path_to_item]
- return __new_cls._get_new_instance_without_conversion_oapg(
- __arg,
- __validation_metadata.path_to_item,
- __path_to_schemas
- )
-
- def __init__(
- self,
- *args: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'],
- _configuration: typing.Optional[Configuration] = None,
- **kwargs: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset
- ]
- ):
- """
- this is needed to fix 'Unexpected argument' warning in pycharm
- this code does nothing because all Schema instances are immutable
- this means that all input data is passed into and used in new, and after the new instance is made
- no new attributes are assigned and init is not used
- """
- pass
-
-"""
-import itertools
-data_types = ('None', 'FrozenDict', 'Tuple', 'Str', 'Decimal', 'Bool')
-type_to_cls = {
- 'None': 'NoneClass',
- 'FrozenDict': 'frozendict.frozendict',
- 'Tuple': 'tuple',
- 'Str': 'str',
- 'Decimal': 'decimal.Decimal',
- 'Bool': 'BoolClass'
-}
-cls_tuples = [v for v in itertools.combinations(data_types, 5)]
-typed_classes = [f"class {''.join(cls_tuple)}Mixin({', '.join(type_to_cls[typ] for typ in cls_tuple)}):\n pass" for cls_tuple in cls_tuples]
-for cls in typed_classes:
- print(cls)
-object_classes = [f"{''.join(cls_tuple)}Mixin = object" for cls_tuple in cls_tuples]
-for cls in object_classes:
- print(cls)
-"""
-if typing.TYPE_CHECKING:
- # qty 1
- NoneMixin = NoneClass
- FrozenDictMixin = frozendict.frozendict
- TupleMixin = tuple
- StrMixin = str
- DecimalMixin = decimal.Decimal
- BoolMixin = BoolClass
- BytesMixin = bytes
- FileMixin = FileIO
- # qty 2
- class BinaryMixin(bytes, FileIO):
- pass
- class NoneFrozenDictMixin(NoneClass, frozendict.frozendict):
- pass
- class NoneTupleMixin(NoneClass, tuple):
- pass
- class NoneStrMixin(NoneClass, str):
- pass
- class NoneDecimalMixin(NoneClass, decimal.Decimal):
- pass
- class NoneBoolMixin(NoneClass, BoolClass):
- pass
- class FrozenDictTupleMixin(frozendict.frozendict, tuple):
- pass
- class FrozenDictStrMixin(frozendict.frozendict, str):
- pass
- class FrozenDictDecimalMixin(frozendict.frozendict, decimal.Decimal):
- pass
- class FrozenDictBoolMixin(frozendict.frozendict, BoolClass):
- pass
- class TupleStrMixin(tuple, str):
- pass
- class TupleDecimalMixin(tuple, decimal.Decimal):
- pass
- class TupleBoolMixin(tuple, BoolClass):
- pass
- class StrDecimalMixin(str, decimal.Decimal):
- pass
- class StrBoolMixin(str, BoolClass):
- pass
- class DecimalBoolMixin(decimal.Decimal, BoolClass):
- pass
- # qty 3
- class NoneFrozenDictTupleMixin(NoneClass, frozendict.frozendict, tuple):
- pass
- class NoneFrozenDictStrMixin(NoneClass, frozendict.frozendict, str):
- pass
- class NoneFrozenDictDecimalMixin(NoneClass, frozendict.frozendict, decimal.Decimal):
- pass
- class NoneFrozenDictBoolMixin(NoneClass, frozendict.frozendict, BoolClass):
- pass
- class NoneTupleStrMixin(NoneClass, tuple, str):
- pass
- class NoneTupleDecimalMixin(NoneClass, tuple, decimal.Decimal):
- pass
- class NoneTupleBoolMixin(NoneClass, tuple, BoolClass):
- pass
- class NoneStrDecimalMixin(NoneClass, str, decimal.Decimal):
- pass
- class NoneStrBoolMixin(NoneClass, str, BoolClass):
- pass
- class NoneDecimalBoolMixin(NoneClass, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrMixin(frozendict.frozendict, tuple, str):
- pass
- class FrozenDictTupleDecimalMixin(frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class FrozenDictTupleBoolMixin(frozendict.frozendict, tuple, BoolClass):
- pass
- class FrozenDictStrDecimalMixin(frozendict.frozendict, str, decimal.Decimal):
- pass
- class FrozenDictStrBoolMixin(frozendict.frozendict, str, BoolClass):
- pass
- class FrozenDictDecimalBoolMixin(frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalMixin(tuple, str, decimal.Decimal):
- pass
- class TupleStrBoolMixin(tuple, str, BoolClass):
- pass
- class TupleDecimalBoolMixin(tuple, decimal.Decimal, BoolClass):
- pass
- class StrDecimalBoolMixin(str, decimal.Decimal, BoolClass):
- pass
- # qty 4
- class NoneFrozenDictTupleStrMixin(NoneClass, frozendict.frozendict, tuple, str):
- pass
- class NoneFrozenDictTupleDecimalMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class NoneFrozenDictTupleBoolMixin(NoneClass, frozendict.frozendict, tuple, BoolClass):
- pass
- class NoneFrozenDictStrDecimalMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal):
- pass
- class NoneFrozenDictStrBoolMixin(NoneClass, frozendict.frozendict, str, BoolClass):
- pass
- class NoneFrozenDictDecimalBoolMixin(NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalMixin(NoneClass, tuple, str, decimal.Decimal):
- pass
- class NoneTupleStrBoolMixin(NoneClass, tuple, str, BoolClass):
- pass
- class NoneTupleDecimalBoolMixin(NoneClass, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneStrDecimalBoolMixin(NoneClass, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalMixin(frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class FrozenDictTupleStrBoolMixin(frozendict.frozendict, tuple, str, BoolClass):
- pass
- class FrozenDictTupleDecimalBoolMixin(frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class FrozenDictStrDecimalBoolMixin(frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalBoolMixin(tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class NoneFrozenDictTupleStrBoolMixin(NoneClass, frozendict.frozendict, tuple, str, BoolClass):
- pass
- class NoneFrozenDictTupleDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneFrozenDictStrDecimalBoolMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalBoolMixin(NoneClass, tuple, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalBoolMixin(frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes):
- pass
-else:
- # qty 1
- class NoneMixin:
- _types = {NoneClass}
- class FrozenDictMixin:
- _types = {frozendict.frozendict}
- class TupleMixin:
- _types = {tuple}
- class StrMixin:
- _types = {str}
- class DecimalMixin:
- _types = {decimal.Decimal}
- class BoolMixin:
- _types = {BoolClass}
- class BytesMixin:
- _types = {bytes}
- class FileMixin:
- _types = {FileIO}
- # qty 2
- class BinaryMixin:
- _types = {bytes, FileIO}
- class NoneFrozenDictMixin:
- _types = {NoneClass, frozendict.frozendict}
- class NoneTupleMixin:
- _types = {NoneClass, tuple}
- class NoneStrMixin:
- _types = {NoneClass, str}
- class NoneDecimalMixin:
- _types = {NoneClass, decimal.Decimal}
- class NoneBoolMixin:
- _types = {NoneClass, BoolClass}
- class FrozenDictTupleMixin:
- _types = {frozendict.frozendict, tuple}
- class FrozenDictStrMixin:
- _types = {frozendict.frozendict, str}
- class FrozenDictDecimalMixin:
- _types = {frozendict.frozendict, decimal.Decimal}
- class FrozenDictBoolMixin:
- _types = {frozendict.frozendict, BoolClass}
- class TupleStrMixin:
- _types = {tuple, str}
- class TupleDecimalMixin:
- _types = {tuple, decimal.Decimal}
- class TupleBoolMixin:
- _types = {tuple, BoolClass}
- class StrDecimalMixin:
- _types = {str, decimal.Decimal}
- class StrBoolMixin:
- _types = {str, BoolClass}
- class DecimalBoolMixin:
- _types = {decimal.Decimal, BoolClass}
- # qty 3
- class NoneFrozenDictTupleMixin:
- _types = {NoneClass, frozendict.frozendict, tuple}
- class NoneFrozenDictStrMixin:
- _types = {NoneClass, frozendict.frozendict, str}
- class NoneFrozenDictDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal}
- class NoneFrozenDictBoolMixin:
- _types = {NoneClass, frozendict.frozendict, BoolClass}
- class NoneTupleStrMixin:
- _types = {NoneClass, tuple, str}
- class NoneTupleDecimalMixin:
- _types = {NoneClass, tuple, decimal.Decimal}
- class NoneTupleBoolMixin:
- _types = {NoneClass, tuple, BoolClass}
- class NoneStrDecimalMixin:
- _types = {NoneClass, str, decimal.Decimal}
- class NoneStrBoolMixin:
- _types = {NoneClass, str, BoolClass}
- class NoneDecimalBoolMixin:
- _types = {NoneClass, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrMixin:
- _types = {frozendict.frozendict, tuple, str}
- class FrozenDictTupleDecimalMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal}
- class FrozenDictTupleBoolMixin:
- _types = {frozendict.frozendict, tuple, BoolClass}
- class FrozenDictStrDecimalMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal}
- class FrozenDictStrBoolMixin:
- _types = {frozendict.frozendict, str, BoolClass}
- class FrozenDictDecimalBoolMixin:
- _types = {frozendict.frozendict, decimal.Decimal, BoolClass}
- class TupleStrDecimalMixin:
- _types = {tuple, str, decimal.Decimal}
- class TupleStrBoolMixin:
- _types = {tuple, str, BoolClass}
- class TupleDecimalBoolMixin:
- _types = {tuple, decimal.Decimal, BoolClass}
- class StrDecimalBoolMixin:
- _types = {str, decimal.Decimal, BoolClass}
- # qty 4
- class NoneFrozenDictTupleStrMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str}
- class NoneFrozenDictTupleDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal}
- class NoneFrozenDictTupleBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, BoolClass}
- class NoneFrozenDictStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal}
- class NoneFrozenDictStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, BoolClass}
- class NoneFrozenDictDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal}
- class NoneTupleStrBoolMixin:
- _types = {NoneClass, tuple, str, BoolClass}
- class NoneTupleDecimalBoolMixin:
- _types = {NoneClass, tuple, decimal.Decimal, BoolClass}
- class NoneStrDecimalBoolMixin:
- _types = {NoneClass, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal}
- class FrozenDictTupleStrBoolMixin:
- _types = {frozendict.frozendict, tuple, str, BoolClass}
- class FrozenDictTupleDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class FrozenDictStrDecimalBoolMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class TupleStrDecimalBoolMixin:
- _types = {tuple, str, decimal.Decimal, BoolClass}
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal}
- class NoneFrozenDictTupleStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, BoolClass}
- class NoneFrozenDictTupleDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class NoneFrozenDictStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalBoolMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes}
-
-
-class ValidatorBase:
- @staticmethod
- def _is_json_validation_enabled_oapg(schema_keyword, configuration=None):
- """Returns true if JSON schema validation is enabled for the specified
- validation keyword. This can be used to skip JSON schema structural validation
- as requested in the configuration.
- Note: the suffix _oapg stands for openapi python (experimental) generator and
- it has been added to prevent collisions with other methods and properties
-
- Args:
- schema_keyword (string): the name of a JSON schema validation keyword.
- configuration (Configuration): the configuration class.
- """
-
- return (configuration is None or
- not hasattr(configuration, '_disabled_client_side_validations') or
- schema_keyword not in configuration._disabled_client_side_validations)
-
- @staticmethod
- def _raise_validation_errror_message_oapg(value, constraint_msg, constraint_value, path_to_item, additional_txt=""):
- raise ApiValueError(
- "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format(
- value=value,
- constraint_msg=constraint_msg,
- constraint_value=constraint_value,
- additional_txt=additional_txt,
- path_to_item=path_to_item,
- )
- )
-
-
-class EnumBase:
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- EnumBase _validate_oapg
- Validates that arg is in the enum's allowed values
- """
- try:
- cls.MetaOapg.enum_value_to_name[arg]
- except KeyError:
- raise ApiValueError("Invalid value {} passed in to {}, allowed_values={}".format(arg, cls, cls.MetaOapg.enum_value_to_name.keys()))
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class BoolBase:
- def is_true_oapg(self) -> bool:
- """
- A replacement for x is True
- True if the instance is a BoolClass True Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self)
-
- def is_false_oapg(self) -> bool:
- """
- A replacement for x is False
- True if the instance is a BoolClass False Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self) is False
-
-
-class NoneBase:
- def is_none_oapg(self) -> bool:
- """
- A replacement for x is None
- True if the instance is a NoneClass None Singleton
- """
- if issubclass(self.__class__, NoneClass):
- return True
- return False
-
-
-class StrBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_str_oapg(self) -> str:
- return self
-
- @property
- def as_date_oapg(self) -> date:
- raise Exception('not implemented')
-
- @property
- def as_datetime_oapg(self) -> datetime:
- raise Exception('not implemented')
-
- @property
- def as_decimal_oapg(self) -> decimal.Decimal:
- raise Exception('not implemented')
-
- @property
- def as_uuid_oapg(self) -> uuid.UUID:
- raise Exception('not implemented')
-
- @classmethod
- def __check_str_validations(
- cls,
- arg: str,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_length') and
- len(arg) > cls.MetaOapg.max_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be less than or equal to",
- constraint_value=cls.MetaOapg.max_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_length') and
- len(arg) < cls.MetaOapg.min_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'regex')):
- for regex_dict in cls.MetaOapg.regex:
- flags = regex_dict.get('flags', 0)
- if not re.search(regex_dict['pattern'], arg, flags=flags):
- if flags != 0:
- # Don't print the regex flags if the flags are not
- # specified in the OAS document.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item,
- additional_txt=" with flags=`{}`".format(flags)
- )
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- StrBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, str):
- cls.__check_str_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class UUIDBase:
- @property
- @functools.lru_cache()
- def as_uuid_oapg(self) -> uuid.UUID:
- return uuid.UUID(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- uuid.UUID(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Invalid value '{}' for type UUID at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- UUIDBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class CustomIsoparser(isoparser):
-
- @_takes_ascii
- def parse_isodatetime(self, dt_str):
- components, pos = self._parse_isodate(dt_str)
- if len(dt_str) > pos:
- if self._sep is None or dt_str[pos:pos + 1] == self._sep:
- components += self._parse_isotime(dt_str[pos + 1:])
- else:
- raise ValueError('String contains unknown ISO components')
-
- if len(components) > 3 and components[3] == 24:
- components[3] = 0
- return datetime(*components) + timedelta(days=1)
-
- if len(components) <= 3:
- raise ValueError('Value is not a datetime')
-
- return datetime(*components)
-
- @_takes_ascii
- def parse_isodate(self, datestr):
- components, pos = self._parse_isodate(datestr)
-
- if len(datestr) > pos:
- raise ValueError('String contains invalid time components')
-
- if len(components) > 3:
- raise ValueError('String contains invalid time components')
-
- return date(*components)
-
-
-DEFAULT_ISOPARSER = CustomIsoparser()
-
-
-class DateBase:
- @property
- @functools.lru_cache()
- def as_date_oapg(self) -> date:
- return DEFAULT_ISOPARSER.parse_isodate(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodate(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 date format. "
- "Invalid value '{}' for type date at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- DateBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DateTimeBase:
- @property
- @functools.lru_cache()
- def as_datetime_oapg(self) -> datetime:
- return DEFAULT_ISOPARSER.parse_isodatetime(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodatetime(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 datetime format. "
- "Invalid value '{}' for type datetime at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DateTimeBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DecimalBase:
- """
- A class for storing decimals that are sent over the wire as strings
- These schemas must remain based on StrBase rather than NumberBase
- because picking base classes must be deterministic
- """
-
- @property
- @functools.lru_cache()
- def as_decimal_oapg(self) -> decimal.Decimal:
- return decimal.Decimal(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- decimal.Decimal(arg)
- return True
- except decimal.InvalidOperation:
- raise ApiValueError(
- "Value cannot be converted to a decimal. "
- "Invalid value '{}' for type decimal at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DecimalBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class NumberBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- """
- Note: for some numbers like 9.0 they could be represented as an
- integer but our code chooses to store them as
- >>> Decimal('9.0').as_tuple()
- DecimalTuple(sign=0, digits=(9, 0), exponent=-1)
- so we can tell that the value came from a float and convert it back to a float
- during later serialization
- """
- if self.as_tuple().exponent < 0:
- # this could be represented as an integer but should be represented as a float
- # because that's what it was serialized from
- raise ApiValueError(f'{self} is not an integer')
- self._as_int = int(self)
- return self._as_int
-
- @property
- def as_float_oapg(self) -> float:
- try:
- return self._as_float
- except AttributeError:
- if self.as_tuple().exponent >= 0:
- raise ApiValueError(f'{self} is not an float')
- self._as_float = float(self)
- return self._as_float
-
- @classmethod
- def __check_numeric_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if cls._is_json_validation_enabled_oapg('multipleOf',
- validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
- multiple_of_value = cls.MetaOapg.multiple_of
- if (not (float(arg) / multiple_of_value).is_integer()):
- # Note 'multipleOf' will be as good as the floating point arithmetic.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="value must be a multiple of",
- constraint_value=multiple_of_value,
- path_to_item=validation_metadata.path_to_item
- )
-
- checking_max_or_min_values = any(
- hasattr(cls.MetaOapg, validation_key) for validation_key in {
- 'exclusive_maximum',
- 'inclusive_maximum',
- 'exclusive_minimum',
- 'inclusive_minimum',
- }
- )
- if not checking_max_or_min_values:
- return
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_maximum') and
- arg >= cls.MetaOapg.exclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_maximum') and
- arg > cls.MetaOapg.inclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than or equal to",
- constraint_value=cls.MetaOapg.inclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_minimum') and
- arg <= cls.MetaOapg.exclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_minimum') and
- arg < cls.MetaOapg.inclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than or equal to",
- constraint_value=cls.MetaOapg.inclusive_minimum,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- NumberBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, decimal.Decimal):
- cls.__check_numeric_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class ListBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def __validate_items(cls, list_items, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for items are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- list_items: the input list of items
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
-
- # if we have definitions for an items schema, use it
- # otherwise accept anything
- item_cls = getattr(cls.MetaOapg, 'items', UnsetAnyTypeSchema)
- item_cls = cls._get_class_oapg(item_cls)
- path_to_schemas = {}
- for i, value in enumerate(list_items):
- item_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=validation_metadata.path_to_item+(i,),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if item_validation_metadata.validation_ran_earlier(item_cls):
- continue
- other_path_to_schemas = item_cls._validate_oapg(
- value, validation_metadata=item_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_tuple_validations(
- cls, arg,
- validation_metadata: ValidationMetadata):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_items') and
- len(arg) > cls.MetaOapg.max_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be less than or equal to",
- constraint_value=cls.MetaOapg.max_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_items') and
- len(arg) < cls.MetaOapg.min_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
- unique_items = set(arg)
- if len(arg) > len(unique_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="duplicate items were found, and the tuple must not contain duplicates because",
- constraint_value='unique_items==True',
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- ListBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, tuple):
- cls.__check_tuple_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, tuple):
- return _path_to_schemas
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- other_path_to_schemas = cls.__validate_items(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_items_oapg(
- cls: 'Schema',
- arg: typing.List[typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- '''
- ListBase _get_items_oapg
- '''
- cast_items = []
-
- for i, value in enumerate(arg):
- item_path_to_item = path_to_item + (i,)
- item_cls = path_to_schemas[item_path_to_item]
- new_value = item_cls._get_new_instance_without_conversion_oapg(
- value,
- item_path_to_item,
- path_to_schemas
- )
- cast_items.append(new_value)
-
- return cast_items
-
-
-class Discriminable:
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def _ensure_discriminator_value_present_oapg(cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args):
- if not args or args and disc_property_name not in args[0]:
- # The input data does not contain the discriminator property
- raise ApiValueError(
- "Cannot deserialize input data due to missing discriminator. "
- "The discriminator property '{}' is missing at path: {}".format(disc_property_name, validation_metadata.path_to_item)
- )
-
- @classmethod
- def get_discriminated_class_oapg(cls, disc_property_name: str, disc_payload_value: str):
- """
- Used in schemas with discriminators
- """
- if not hasattr(cls.MetaOapg, 'discriminator'):
- return None
- disc = cls.MetaOapg.discriminator()
- if disc_property_name not in disc:
- return None
- discriminated_cls = disc[disc_property_name].get(disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if not hasattr(cls, 'MetaOapg'):
- return None
- elif not (
- hasattr(cls.MetaOapg, 'all_of') or
- hasattr(cls.MetaOapg, 'one_of') or
- hasattr(cls.MetaOapg, 'any_of')
- ):
- return None
- # TODO stop traveling if a cycle is hit
- if hasattr(cls.MetaOapg, 'all_of'):
- for allof_cls in cls.MetaOapg.all_of():
- discriminated_cls = allof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'one_of'):
- for oneof_cls in cls.MetaOapg.one_of():
- discriminated_cls = oneof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'any_of'):
- for anyof_cls in cls.MetaOapg.any_of():
- discriminated_cls = anyof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- return None
-
-
-class DictBase(Discriminable, ValidatorBase):
-
- @classmethod
- def __validate_arg_presence(cls, arg):
- """
- Ensures that:
- - all required arguments are passed in
- - the input variable names are valid
- - present in properties or
- - accepted because additionalProperties exists
- Exceptions will be raised if:
- - invalid arguments were passed in
- - a var_name is invalid if additional_properties == NotAnyTypeSchema
- and var_name not in properties.__annotations__
- - required properties were not passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- seen_required_properties = set()
- invalid_arguments = []
- required_property_names = getattr(cls.MetaOapg, 'required', set())
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name in arg:
- if property_name in required_property_names:
- seen_required_properties.add(property_name)
- elif property_name in property_annotations:
- continue
- elif additional_properties is not NotAnyTypeSchema:
- continue
- else:
- invalid_arguments.append(property_name)
- missing_required_arguments = list(required_property_names - seen_required_properties)
- if missing_required_arguments:
- missing_required_arguments.sort()
- raise ApiTypeError(
- "{} is missing {} required argument{}: {}".format(
- cls.__name__,
- len(missing_required_arguments),
- "s" if len(missing_required_arguments) > 1 else "",
- missing_required_arguments
- )
- )
- if invalid_arguments:
- invalid_arguments.sort()
- raise ApiTypeError(
- "{} was passed {} invalid argument{}: {}".format(
- cls.__name__,
- len(invalid_arguments),
- "s" if len(invalid_arguments) > 1 else "",
- invalid_arguments
- )
- )
-
- @classmethod
- def __validate_args(cls, arg, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for properties are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- path_to_schemas = {}
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name, value in arg.items():
- path_to_item = validation_metadata.path_to_item+(property_name,)
- if property_name in property_annotations:
- schema = property_annotations[property_name]
- elif additional_properties is not NotAnyTypeSchema:
- if additional_properties is UnsetAnyTypeSchema:
- """
- If additionalProperties is unset and this path_to_item does not yet have
- any validations on it, validate it.
- If it already has validations on it, skip this validation.
- """
- if path_to_item in path_to_schemas:
- continue
- schema = additional_properties
- else:
- raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format(
- value, cls, validation_metadata.path_to_item+(property_name,)
- ))
- schema = cls._get_class_oapg(schema)
- arg_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=path_to_item,
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if arg_validation_metadata.validation_ran_earlier(schema):
- continue
- other_path_to_schemas = schema._validate_oapg(value, validation_metadata=arg_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_dict_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_properties') and
- len(arg) > cls.MetaOapg.max_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be less than or equal to",
- constraint_value=cls.MetaOapg.max_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_properties') and
- len(arg) < cls.MetaOapg.min_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DictBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, frozendict.frozendict):
- cls.__check_dict_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, frozendict.frozendict):
- return _path_to_schemas
- cls.__validate_arg_presence(arg)
- other_path_to_schemas = cls.__validate_args(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- try:
- discriminator = cls.MetaOapg.discriminator()
- except AttributeError:
- return _path_to_schemas
- # discriminator exists
- disc_prop_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_prop_name, validation_metadata, arg)
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format(
- cls.__name__,
- disc_prop_name,
- list(discriminator[disc_prop_name].keys()),
- validation_metadata.path_to_item + (disc_prop_name,)
- )
- )
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if updated_vm.validation_ran_earlier(discriminated_cls):
- return _path_to_schemas
- other_path_to_schemas = discriminated_cls._validate_oapg(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_properties_oapg(
- cls,
- arg: typing.Dict[str, typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- """
- DictBase _get_properties_oapg, this is how properties are set
- These values already passed validation
- """
- dict_items = {}
-
- for property_name_js, value in arg.items():
- property_path_to_item = path_to_item + (property_name_js,)
- property_cls = path_to_schemas[property_path_to_item]
- new_value = property_cls._get_new_instance_without_conversion_oapg(
- value,
- property_path_to_item,
- path_to_schemas
- )
- dict_items[property_name_js] = new_value
-
- return dict_items
-
- def __setattr__(self, name: str, value: typing.Any):
- if not isinstance(self, FileIO):
- raise AttributeError('property setting not supported on immutable instances')
-
- def __getattr__(self, name: str):
- """
- for instance.name access
- Properties are only type hinted for required properties
- so that hasattr(instance, 'optionalProp') is False when that key is not present
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- if name not in self.__class__.__annotations__:
- raise AttributeError(f"{self} has no attribute '{name}'")
- try:
- value = self[name]
- return value
- except KeyError as ex:
- raise AttributeError(str(ex))
-
- def __getitem__(self, name: str):
- """
- dict_instance[name] accessor
- key errors thrown
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- return super().__getitem__(name)
-
- def get_item_oapg(self, name: str) -> typing.Union['AnyTypeSchema', Unset]:
- # dict_instance[name] accessor
- if not isinstance(self, frozendict.frozendict):
- raise NotImplementedError()
- try:
- return super().__getitem__(name)
- except KeyError:
- return unset
-
-
-def cast_to_allowed_types(
- arg: typing.Union[str, date, datetime, uuid.UUID, decimal.Decimal, int, float, None, dict, frozendict.frozendict, list, tuple, bytes, Schema, io.FileIO, io.BufferedReader],
- from_server: bool,
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]],
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
-) -> typing.Union[frozendict.frozendict, tuple, decimal.Decimal, str, bytes, BoolClass, NoneClass, FileIO]:
- """
- Casts the input payload arg into the allowed types
- The input validated_path_to_schemas is mutated by running this function
-
- When from_server is False then
- - date/datetime is cast to str
- - int/float is cast to Decimal
-
- If a Schema instance is passed in it is converted back to a primitive instance because
- One may need to validate that data to the original Schema class AND additional different classes
- those additional classes will need to be added to the new manufactured class for that payload
- If the code didn't do this and kept the payload as a Schema instance it would fail to validate to other
- Schema classes and the code wouldn't be able to mfg a new class that includes all valid schemas
- TODO: store the validated schema classes in validation_metadata
-
- Args:
- arg: the payload
- from_server: whether this payload came from the server or not
- validated_path_to_schemas: a dict that stores the validated classes at any path location in the payload
- """
- if isinstance(arg, Schema):
- # store the already run validations
- schema_classes = set()
- source_schema_was_unset = len(arg.__class__.__bases__) == 2 and UnsetAnyTypeSchema in arg.__class__.__bases__
- if not source_schema_was_unset:
- """
- Do not include UnsetAnyTypeSchema and its base class because
- it did not exist in the original spec schema definition
- It was added to ensure that all instances are of type Schema and the allowed base types
- """
- for cls in arg.__class__.__bases__:
- if cls is Singleton:
- # Skip Singleton
- continue
- schema_classes.add(cls)
- validated_path_to_schemas[path_to_item] = schema_classes
-
- type_error = ApiTypeError(f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}")
- if isinstance(arg, str):
- return str(arg)
- elif isinstance(arg, (dict, frozendict.frozendict)):
- return frozendict.frozendict({key: cast_to_allowed_types(val, from_server, validated_path_to_schemas, path_to_item + (key,)) for key, val in arg.items()})
- elif isinstance(arg, (bool, BoolClass)):
- """
- this check must come before isinstance(arg, (int, float))
- because isinstance(True, int) is True
- """
- if arg:
- return BoolClass.TRUE
- return BoolClass.FALSE
- elif isinstance(arg, int):
- return decimal.Decimal(arg)
- elif isinstance(arg, float):
- decimal_from_float = decimal.Decimal(arg)
- if decimal_from_float.as_integer_ratio()[1] == 1:
- # 9.0 -> Decimal('9.0')
- # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0')
- return decimal.Decimal(str(decimal_from_float)+'.0')
- return decimal_from_float
- elif isinstance(arg, (tuple, list)):
- return tuple([cast_to_allowed_types(item, from_server, validated_path_to_schemas, path_to_item + (i,)) for i, item in enumerate(arg)])
- elif isinstance(arg, (none_type, NoneClass)):
- return NoneClass.NONE
- elif isinstance(arg, (date, datetime)):
- if not from_server:
- return arg.isoformat()
- raise type_error
- elif isinstance(arg, uuid.UUID):
- if not from_server:
- return str(arg)
- raise type_error
- elif isinstance(arg, decimal.Decimal):
- return decimal.Decimal(arg)
- elif isinstance(arg, bytes):
- return bytes(arg)
- elif isinstance(arg, (io.FileIO, io.BufferedReader)):
- return FileIO(arg)
- raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg)))
-
-
-class ComposedBase(Discriminable):
-
- @classmethod
- def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
- path_to_schemas = defaultdict(set)
- for allof_cls in cls.MetaOapg.all_of():
- if validation_metadata.validation_ran_earlier(allof_cls):
- continue
- other_path_to_schemas = allof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __get_oneof_class(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata,
- ):
- oneof_classes = []
- path_to_schemas = defaultdict(set)
- for oneof_cls in cls.MetaOapg.one_of():
- if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
- oneof_classes.append(oneof_cls)
- continue
- if validation_metadata.validation_ran_earlier(oneof_cls):
- oneof_classes.append(oneof_cls)
- continue
- try:
- path_to_schemas = oneof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and oneof_cls is discriminated_cls:
- raise ex
- continue
- oneof_classes.append(oneof_cls)
- if not oneof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the oneOf schemas matched the input data.".format(cls)
- )
- elif len(oneof_classes) > 1:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. Multiple "
- "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes)
- )
- # exactly one class matches
- return path_to_schemas
-
- @classmethod
- def __get_anyof_classes(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata
- ):
- anyof_classes = []
- path_to_schemas = defaultdict(set)
- for anyof_cls in cls.MetaOapg.any_of():
- if validation_metadata.validation_ran_earlier(anyof_cls):
- anyof_classes.append(anyof_cls)
- continue
-
- try:
- other_path_to_schemas = anyof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and anyof_cls is discriminated_cls:
- raise ex
- continue
- anyof_classes.append(anyof_cls)
- update(path_to_schemas, other_path_to_schemas)
- if not anyof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the anyOf schemas matched the input data.".format(cls)
- )
- return path_to_schemas
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- ComposedBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- # validation checking on types, validations, and enums
- path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
-
- # process composed schema
- discriminator = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'discriminator'):
- discriminator = cls.MetaOapg.discriminator()
- discriminated_cls = None
- if discriminator and arg and isinstance(arg, frozendict.frozendict):
- disc_property_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_property_name, updated_vm, arg)
- # get discriminated_cls by looking at the dict in the current class
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=arg[disc_property_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format(
- arg[disc_property_name],
- cls.__name__,
- disc_property_name,
- list(discriminator[disc_property_name].keys()),
- updated_vm.path_to_item + (disc_property_name,)
- )
- )
-
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
- other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
- other_path_to_schemas = cls.__get_oneof_class(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
- other_path_to_schemas = cls.__get_anyof_classes(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- not_cls = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'not_schema'):
- not_cls = cls.MetaOapg.not_schema
- not_cls = cls._get_class_oapg(not_cls)
- if not_cls:
- other_path_to_schemas = None
- not_exception = ApiValueError(
- "Invalid value '{}' was passed in to {}. Value is invalid because it is disallowed by {}".format(
- arg,
- cls.__name__,
- not_cls.__name__,
- )
- )
- if updated_vm.validation_ran_earlier(not_cls):
- raise not_exception
-
- try:
- other_path_to_schemas = not_cls._validate_oapg(arg, validation_metadata=updated_vm)
- except (ApiValueError, ApiTypeError):
- pass
- if other_path_to_schemas:
- raise not_exception
-
- if discriminated_cls is not None and not updated_vm.validation_ran_earlier(discriminated_cls):
- # TODO use an exception from this package here
- assert discriminated_cls in path_to_schemas[updated_vm.path_to_item]
- return path_to_schemas
-
-
-# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase
-class ComposedSchema(
- ComposedBase,
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, *args: typing.Any, _configuration: typing.Optional[Configuration] = None, **kwargs):
- if not args:
- if not kwargs:
- raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__))
- args = (kwargs, )
- return super().from_openapi_data_oapg(args[0], _configuration=_configuration)
-
-
-class ListSchema(
- ListBase,
- Schema,
- TupleMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.List[typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NoneSchema(
- NoneBase,
- Schema,
- NoneMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: None, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: None, **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NumberSchema(
- NumberBase,
- Schema,
- DecimalMixin
-):
- """
- This is used for type: number with no format
- Both integers AND floats are accepted
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Union[int, float], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class IntBase:
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- self._as_int = int(self)
- return self._as_int
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
-
- denominator = arg.as_integer_ratio()[-1]
- if denominator != 1:
- raise ApiValueError(
- "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- IntBase _validate_oapg
- TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class IntSchema(IntBase, NumberSchema):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: int, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class Int32Base:
- __inclusive_minimum = decimal.Decimal(-2147483648)
- __inclusive_maximum = decimal.Decimal(2147483647)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int32 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int32Schema(
- Int32Base,
- IntSchema
-):
- pass
-
-
-class Int64Base:
- __inclusive_minimum = decimal.Decimal(-9223372036854775808)
- __inclusive_maximum = decimal.Decimal(9223372036854775807)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int64 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int64Schema(
- Int64Base,
- IntSchema
-):
- pass
-
-
-class Float32Base:
- __inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
- __inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type float at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Float32Schema(
- Float32Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class Float64Base:
- __inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
- __inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type double at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-class Float64Schema(
- Float64Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- # todo check format
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class StrSchema(
- StrBase,
- Schema,
- StrMixin
-):
- """
- date + datetime string types must inherit from this class
- That is because one can validate a str payload as both:
- - type: string (format unset)
- - type: string, format: date
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: str, _configuration: typing.Optional[Configuration] = None) -> 'StrSchema':
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class UUIDSchema(UUIDBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateSchema(DateBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, date], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateTimeSchema(DateTimeBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, datetime], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DecimalSchema(DecimalBase, StrSchema):
-
- def __new__(cls, arg: str, **kwargs: Configuration):
- """
- Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads
- which can be simple (str) or complex (dicts or lists with nested values)
- Because casting is only done once and recursively casts all values prior to validation then for a potential
- client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know
- if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema
- where it should stay as Decimal.
- """
- return super().__new__(cls, arg, **kwargs)
-
-
-class BytesSchema(
- Schema,
- BytesMixin
-):
- """
- this class will subclass bytes and is immutable
- """
- def __new__(cls, arg: bytes, **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class FileSchema(
- Schema,
- FileMixin
-):
- """
- This class is NOT immutable
- Dynamic classes are built using it for example when AnyType allows in binary data
- Al other schema classes ARE immutable
- If one wanted to make this immutable one could make this a DictSchema with required properties:
- - data = BytesSchema (which would be an immutable bytes based schema)
- - file_name = StrSchema
- and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name
- The downside would be that data would be stored in memory which one may not want to do for very large files
-
- The developer is responsible for closing this file and deleting it
-
- This class was kept as mutable:
- - to allow file reading and writing to disk
- - to be able to preserve file name info
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class BinaryBase:
- pass
-
-
-class BinarySchema(
- ComposedBase,
- BinaryBase,
- Schema,
- BinaryMixin
-):
- class MetaOapg:
- @staticmethod
- def one_of():
- return [
- BytesSchema,
- FileSchema,
- ]
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: Configuration):
- return super().__new__(cls, arg)
-
-
-class BoolSchema(
- BoolBase,
- Schema,
- BoolMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: bool, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: bool, **kwargs: ValidationMetadata):
- return super().__new__(cls, arg, **kwargs)
-
-
-class AnyTypeSchema(
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolFileBytesMixin
-):
- # Python representation of a schema defined as true or {}
- pass
-
-
-class UnsetAnyTypeSchema(AnyTypeSchema):
- # Used when additionalProperties/items was not explicitly defined and a defining schema is needed
- pass
-
-
-class NotAnyTypeSchema(
- ComposedSchema,
-):
- """
- Python representation of a schema defined as false or {'not': {}}
- Does not allow inputs in of AnyType
- Note: validations on this class are never run because the code knows that no inputs will ever validate
- """
-
- class MetaOapg:
- not_schema = AnyTypeSchema
-
- def __new__(
- cls,
- *args,
- _configuration: typing.Optional[Configuration] = None,
- ) -> 'NotAnyTypeSchema':
- return super().__new__(
- cls,
- *args,
- _configuration=_configuration,
- )
-
-
-class DictSchema(
- DictBase,
- Schema,
- FrozenDictMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Dict[str, typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict], **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, ValidationMetadata]):
- return super().__new__(cls, *args, **kwargs)
-
-
-schema_type_classes = {NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema, AnyTypeSchema}
-
-
-@functools.lru_cache()
-def get_new_class(
- class_name: str,
- bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...]
-) -> typing.Type[Schema]:
- """
- Returns a new class that is made with the subclass bases
- """
- new_cls: typing.Type[Schema] = type(class_name, bases, {})
- return new_cls
-
-
-LOG_CACHE_USAGE = False
-
-
-def log_cache_usage(cache_fn):
- if LOG_CACHE_USAGE:
- print(cache_fn.__name__, cache_fn.cache_info())
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/__init__.py
index d6e814b4a..e3d40f9ee 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/__init__.py
@@ -5,25 +5,36 @@
"""
CloudHarness Sample API
- CloudHarness Sample api # noqa: E501
+ CloudHarness Sample api
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
__version__ = "1.0.0"
+# import apis into sdk package
+from cloudharness_cli.samples.api.auth_api import AuthApi
+from cloudharness_cli.samples.api.resource_api import ResourceApi
+from cloudharness_cli.samples.api.test_api import TestApi
+from cloudharness_cli.samples.api.workflows_api import WorkflowsApi
+
# import ApiClient
+from cloudharness_cli.samples.api_response import ApiResponse
from cloudharness_cli.samples.api_client import ApiClient
-
-# import Configuration
from cloudharness_cli.samples.configuration import Configuration
-
-# import exceptions
from cloudharness_cli.samples.exceptions import OpenApiException
-from cloudharness_cli.samples.exceptions import ApiAttributeError
from cloudharness_cli.samples.exceptions import ApiTypeError
from cloudharness_cli.samples.exceptions import ApiValueError
from cloudharness_cli.samples.exceptions import ApiKeyError
+from cloudharness_cli.samples.exceptions import ApiAttributeError
from cloudharness_cli.samples.exceptions import ApiException
+
+# import models into sdk package
+from cloudharness_cli.samples.models.inline_response202 import InlineResponse202
+from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task
+from cloudharness_cli.samples.models.sample_resource import SampleResource
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py
new file mode 100644
index 000000000..bfa2acc19
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py
@@ -0,0 +1,8 @@
+# flake8: noqa
+
+# import apis into api package
+from cloudharness_cli.samples.api.auth_api import AuthApi
+from cloudharness_cli.samples.api.resource_api import ResourceApi
+from cloudharness_cli.samples.api.test_api import TestApi
+from cloudharness_cli.samples.api.workflows_api import WorkflowsApi
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py
new file mode 100644
index 000000000..a0cdfc34c
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py
@@ -0,0 +1,531 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import StrictStr
+
+from cloudharness_cli.samples.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.samples.api_response import ApiResponse
+from cloudharness_cli.samples.rest import RESTResponseType
+
+
+class AuthApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def valid_cookie(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> str:
+ """Check if the token is valid. Get a token by logging into the base url
+
+ Check if the token is valid
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._valid_cookie_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '401': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def valid_cookie_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[str]:
+ """Check if the token is valid. Get a token by logging into the base url
+
+ Check if the token is valid
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._valid_cookie_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '401': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def valid_cookie_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Check if the token is valid. Get a token by logging into the base url
+
+ Check if the token is valid
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._valid_cookie_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '401': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _valid_cookie_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ 'cookieAuth'
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/valid-cookie',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def valid_token(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> str:
+ """Check if the token is valid. Get a token by logging into the base url
+
+ Check if the token is valid
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._valid_token_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '401': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def valid_token_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[str]:
+ """Check if the token is valid. Get a token by logging into the base url
+
+ Check if the token is valid
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._valid_token_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '401': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def valid_token_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Check if the token is valid. Get a token by logging into the base url
+
+ Check if the token is valid
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._valid_token_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '401': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _valid_token_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ 'bearerAuth'
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/valid',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/resource_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/resource_api.py
new file mode 100644
index 000000000..247b0b58b
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/resource_api.py
@@ -0,0 +1,1356 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import Field, StrictStr
+from typing import List
+from typing_extensions import Annotated
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+
+from cloudharness_cli.samples.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.samples.api_response import ApiResponse
+from cloudharness_cli.samples.rest import RESTResponseType
+
+
+class ResourceApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def create_sample_resource(
+ self,
+ sample_resource: Annotated[SampleResource, Field(description="A new `SampleResource` to be created.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Create a SampleResource
+
+ Creates a new instance of a `SampleResource`.
+
+ :param sample_resource: A new `SampleResource` to be created. (required)
+ :type sample_resource: SampleResource
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._create_sample_resource_serialize(
+ sample_resource=sample_resource,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '201': None,
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def create_sample_resource_with_http_info(
+ self,
+ sample_resource: Annotated[SampleResource, Field(description="A new `SampleResource` to be created.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Create a SampleResource
+
+ Creates a new instance of a `SampleResource`.
+
+ :param sample_resource: A new `SampleResource` to be created. (required)
+ :type sample_resource: SampleResource
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._create_sample_resource_serialize(
+ sample_resource=sample_resource,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '201': None,
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def create_sample_resource_without_preload_content(
+ self,
+ sample_resource: Annotated[SampleResource, Field(description="A new `SampleResource` to be created.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create a SampleResource
+
+ Creates a new instance of a `SampleResource`.
+
+ :param sample_resource: A new `SampleResource` to be created. (required)
+ :type sample_resource: SampleResource
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._create_sample_resource_serialize(
+ sample_resource=sample_resource,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '201': None,
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _create_sample_resource_serialize(
+ self,
+ sample_resource,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if sample_resource is not None:
+ _body_params = sample_resource
+
+
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params['Content-Type'] = _content_type
+ else:
+ _default_content_type = (
+ self.api_client.select_header_content_type(
+ [
+ 'application/json'
+ ]
+ )
+ )
+ if _default_content_type is not None:
+ _header_params['Content-Type'] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='POST',
+ resource_path='/sampleresources',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def delete_sample_resource(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Delete a SampleResource
+
+ Deletes an existing `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._delete_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '204': None,
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def delete_sample_resource_with_http_info(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Delete a SampleResource
+
+ Deletes an existing `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._delete_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '204': None,
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def delete_sample_resource_without_preload_content(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Delete a SampleResource
+
+ Deletes an existing `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._delete_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '204': None,
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _delete_sample_resource_serialize(
+ self,
+ sampleresource_id,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if sampleresource_id is not None:
+ _path_params['sampleresourceId'] = sampleresource_id
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='DELETE',
+ resource_path='/sampleresources/{sampleresourceId}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def get_sample_resource(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> SampleResource:
+ """Get a SampleResource
+
+ Gets the details of a single instance of a `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "SampleResource",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def get_sample_resource_with_http_info(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[SampleResource]:
+ """Get a SampleResource
+
+ Gets the details of a single instance of a `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "SampleResource",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def get_sample_resource_without_preload_content(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Get a SampleResource
+
+ Gets the details of a single instance of a `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "SampleResource",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _get_sample_resource_serialize(
+ self,
+ sampleresource_id,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if sampleresource_id is not None:
+ _path_params['sampleresourceId'] = sampleresource_id
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/sampleresources/{sampleresourceId}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def get_sample_resources(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> List[SampleResource]:
+ """List All SampleResources
+
+ Gets a list of all `SampleResource` entities.
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_sample_resources_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "List[SampleResource]",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def get_sample_resources_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[List[SampleResource]]:
+ """List All SampleResources
+
+ Gets a list of all `SampleResource` entities.
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_sample_resources_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "List[SampleResource]",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def get_sample_resources_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """List All SampleResources
+
+ Gets a list of all `SampleResource` entities.
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_sample_resources_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "List[SampleResource]",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _get_sample_resources_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/sampleresources',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def update_sample_resource(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ sample_resource: Annotated[SampleResource, Field(description="Updated `SampleResource` information.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """Update a SampleResource
+
+ Updates an existing `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param sample_resource: Updated `SampleResource` information. (required)
+ :type sample_resource: SampleResource
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._update_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ sample_resource=sample_resource,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '202': None,
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def update_sample_resource_with_http_info(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ sample_resource: Annotated[SampleResource, Field(description="Updated `SampleResource` information.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """Update a SampleResource
+
+ Updates an existing `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param sample_resource: Updated `SampleResource` information. (required)
+ :type sample_resource: SampleResource
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._update_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ sample_resource=sample_resource,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '202': None,
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def update_sample_resource_without_preload_content(
+ self,
+ sampleresource_id: Annotated[StrictStr, Field(description="A unique identifier for a `SampleResource`.")],
+ sample_resource: Annotated[SampleResource, Field(description="Updated `SampleResource` information.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Update a SampleResource
+
+ Updates an existing `SampleResource`.
+
+ :param sampleresource_id: A unique identifier for a `SampleResource`. (required)
+ :type sampleresource_id: str
+ :param sample_resource: Updated `SampleResource` information. (required)
+ :type sample_resource: SampleResource
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._update_sample_resource_serialize(
+ sampleresource_id=sampleresource_id,
+ sample_resource=sample_resource,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '202': None,
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _update_sample_resource_serialize(
+ self,
+ sampleresource_id,
+ sample_resource,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if sampleresource_id is not None:
+ _path_params['sampleresourceId'] = sampleresource_id
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if sample_resource is not None:
+ _body_params = sample_resource
+
+
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params['Content-Type'] = _content_type
+ else:
+ _default_content_type = (
+ self.api_client.select_header_content_type(
+ [
+ 'application/json'
+ ]
+ )
+ )
+ if _default_content_type is not None:
+ _header_params['Content-Type'] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='PUT',
+ resource_path='/sampleresources/{sampleresourceId}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/test_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/test_api.py
new file mode 100644
index 000000000..6b10cae5a
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/test_api.py
@@ -0,0 +1,527 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import StrictFloat, StrictInt, StrictStr
+from typing import Union
+
+from cloudharness_cli.samples.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.samples.api_response import ApiResponse
+from cloudharness_cli.samples.rest import RESTResponseType
+
+
+class TestApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def error(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> str:
+ """(Deprecated) test sentry is working
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /error is deprecated.", DeprecationWarning)
+
+ _param = self._error_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '500': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def error_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[str]:
+ """(Deprecated) test sentry is working
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /error is deprecated.", DeprecationWarning)
+
+ _param = self._error_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '500': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def error_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """(Deprecated) test sentry is working
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /error is deprecated.", DeprecationWarning)
+
+ _param = self._error_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '500': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _error_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/error',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def ping(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> float:
+ """test the application is up
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._ping_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "float",
+ '500': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def ping_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[float]:
+ """test the application is up
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._ping_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "float",
+ '500': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def ping_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """test the application is up
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._ping_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "float",
+ '500': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _ping_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/ping',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py
new file mode 100644
index 000000000..4256614f8
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py
@@ -0,0 +1,800 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import Field, StrictFloat, StrictInt, StrictStr
+from typing import Any, Dict, Union
+from typing_extensions import Annotated
+from cloudharness_cli.samples.models.inline_response202 import InlineResponse202
+
+from cloudharness_cli.samples.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.samples.api_response import ApiResponse
+from cloudharness_cli.samples.rest import RESTResponseType
+
+
+class WorkflowsApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def submit_async(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> InlineResponse202:
+ """Send an asynchronous operation
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._submit_async_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '202': "InlineResponse202",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def submit_async_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[InlineResponse202]:
+ """Send an asynchronous operation
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._submit_async_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '202': "InlineResponse202",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def submit_async_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Send an asynchronous operation
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._submit_async_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '202': "InlineResponse202",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _submit_async_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/operation_async',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def submit_sync(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> object:
+ """(Deprecated) Send a synchronous operation
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /operation_sync is deprecated.", DeprecationWarning)
+
+ _param = self._submit_sync_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "object",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def submit_sync_with_http_info(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[object]:
+ """(Deprecated) Send a synchronous operation
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /operation_sync is deprecated.", DeprecationWarning)
+
+ _param = self._submit_sync_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "object",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def submit_sync_without_preload_content(
+ self,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """(Deprecated) Send a synchronous operation
+
+
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /operation_sync is deprecated.", DeprecationWarning)
+
+ _param = self._submit_sync_serialize(
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "object",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _submit_sync_serialize(
+ self,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/operation_sync',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def submit_sync_with_results(
+ self,
+ a: Annotated[Union[StrictFloat, StrictInt], Field(description="first number to sum")],
+ b: Annotated[Union[StrictFloat, StrictInt], Field(description="second number to sum")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> str:
+ """(Deprecated) Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+
+
+ :param a: first number to sum (required)
+ :type a: float
+ :param b: second number to sum (required)
+ :type b: float
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /operation_sync_results is deprecated.", DeprecationWarning)
+
+ _param = self._submit_sync_with_results_serialize(
+ a=a,
+ b=b,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def submit_sync_with_results_with_http_info(
+ self,
+ a: Annotated[Union[StrictFloat, StrictInt], Field(description="first number to sum")],
+ b: Annotated[Union[StrictFloat, StrictInt], Field(description="second number to sum")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[str]:
+ """(Deprecated) Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+
+
+ :param a: first number to sum (required)
+ :type a: float
+ :param b: second number to sum (required)
+ :type b: float
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /operation_sync_results is deprecated.", DeprecationWarning)
+
+ _param = self._submit_sync_with_results_serialize(
+ a=a,
+ b=b,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def submit_sync_with_results_without_preload_content(
+ self,
+ a: Annotated[Union[StrictFloat, StrictInt], Field(description="first number to sum")],
+ b: Annotated[Union[StrictFloat, StrictInt], Field(description="second number to sum")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """(Deprecated) Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+
+
+ :param a: first number to sum (required)
+ :type a: float
+ :param b: second number to sum (required)
+ :type b: float
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+ warnings.warn("GET /operation_sync_results is deprecated.", DeprecationWarning)
+
+ _param = self._submit_sync_with_results_serialize(
+ a=a,
+ b=b,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _submit_sync_with_results_serialize(
+ self,
+ a,
+ b,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ if a is not None:
+
+ _query_params.append(('a', a))
+
+ if b is not None:
+
+ _query_params.append(('b', b))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/operation_sync_results',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_client.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_client.py
index d2ecfb6c3..3485088c9 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_client.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_client.py
@@ -1,1500 +1,782 @@
# coding: utf-8
+
"""
CloudHarness Sample API
- CloudHarness Sample api # noqa: E501
+ CloudHarness Sample api
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
-from dataclasses import dataclass
-from decimal import Decimal
-import enum
-import email
+import datetime
+from dateutil.parser import parse
+from enum import Enum
import json
+import mimetypes
import os
-import io
-import atexit
-from multiprocessing.pool import ThreadPool
import re
import tempfile
-import typing
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-from urllib.parse import urlparse, quote
-from urllib3.fields import RequestField as RequestFieldBase
-import frozendict
+from urllib.parse import quote
+from typing import Tuple, Optional, List, Dict, Union
+from pydantic import SecretStr
-from cloudharness_cli.samples import rest
from cloudharness_cli.samples.configuration import Configuration
-from cloudharness_cli.samples.exceptions import ApiTypeError, ApiValueError
-from cloudharness_cli.samples.schemas import (
- NoneClass,
- BoolClass,
- Schema,
- FileIO,
- BinarySchema,
- date,
- datetime,
- none_type,
- Unset,
- unset,
+from cloudharness_cli.samples.api_response import ApiResponse, T as ApiResponseT
+import cloudharness_cli.samples.models
+from cloudharness_cli.samples import rest
+from cloudharness_cli.samples.exceptions import (
+ ApiValueError,
+ ApiException,
+ BadRequestException,
+ UnauthorizedException,
+ ForbiddenException,
+ NotFoundException,
+ ServiceException
)
+RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]]
-class RequestField(RequestFieldBase):
- def __eq__(self, other):
- if not isinstance(other, RequestField):
- return False
- return self.__dict__ == other.__dict__
-
-
-class JSONEncoder(json.JSONEncoder):
- compact_separators = (',', ':')
-
- def default(self, obj):
- if isinstance(obj, str):
- return str(obj)
- elif isinstance(obj, float):
- return float(obj)
- elif isinstance(obj, int):
- return int(obj)
- elif isinstance(obj, Decimal):
- if obj.as_tuple().exponent >= 0:
- return int(obj)
- return float(obj)
- elif isinstance(obj, NoneClass):
- return None
- elif isinstance(obj, BoolClass):
- return bool(obj)
- elif isinstance(obj, (dict, frozendict.frozendict)):
- return {key: self.default(val) for key, val in obj.items()}
- elif isinstance(obj, (list, tuple)):
- return [self.default(item) for item in obj]
- raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
-
-
-class ParameterInType(enum.Enum):
- QUERY = 'query'
- HEADER = 'header'
- PATH = 'path'
- COOKIE = 'cookie'
-
-
-class ParameterStyle(enum.Enum):
- MATRIX = 'matrix'
- LABEL = 'label'
- FORM = 'form'
- SIMPLE = 'simple'
- SPACE_DELIMITED = 'spaceDelimited'
- PIPE_DELIMITED = 'pipeDelimited'
- DEEP_OBJECT = 'deepObject'
-
-
-class PrefixSeparatorIterator:
- # A class to store prefixes and separators for rfc6570 expansions
-
- def __init__(self, prefix: str, separator: str):
- self.prefix = prefix
- self.separator = separator
- self.first = True
- if separator in {'.', '|', '%20'}:
- item_separator = separator
- else:
- item_separator = ','
- self.item_separator = item_separator
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self.first:
- self.first = False
- return self.prefix
- return self.separator
-
-
-class ParameterSerializerBase:
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- return False
-
- @staticmethod
- def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
- """
- Get representation if str/float/int/None/items in list/ values in dict
- None is returned if an item is undefined, use cases are value=
- - None
- - []
- - {}
- - [None, None None]
- - {'a': None, 'b': None}
- """
- if type(in_data) in {str, float, int}:
- if percent_encode:
- return quote(str(in_data))
- return str(in_data)
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, list) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, dict) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- raise ApiValueError('Unable to generate a ref6570 item representation of {}'.format(in_data))
-
- @staticmethod
- def _to_dict(name: str, value: str):
- return {name: value}
-
- @classmethod
- def __ref6570_str_float_int_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_value = cls.__ref6570_item_value(in_data, percent_encode)
- if item_value is None or (item_value == '' and prefix_separator_iterator.separator == ';'):
- return next(prefix_separator_iterator) + var_name_piece
- value_pair_equals = '=' if named_parameter_expansion else ''
- return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+class ApiClient:
+ """Generic API client for OpenAPI client library builds.
- @classmethod
- def __ref6570_list_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
- item_values = [v for v in item_values if v is not None]
- if not item_values:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece +
- value_pair_equals +
- prefix_separator_iterator.item_separator.join(item_values)
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [var_name_piece + value_pair_equals + val for val in item_values]
- )
+ OpenAPI generic API client. This client handles the client-
+ server communication, and is invariant across implementations. Specifics of
+ the methods and models for each application are generated from the OpenAPI
+ templates.
- @classmethod
- def __ref6570_dict_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
- in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
- if not in_data_transformed:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece + value_pair_equals +
- prefix_separator_iterator.item_separator.join(
- prefix_separator_iterator.item_separator.join(
- item_pair
- ) for item_pair in in_data_transformed.items()
- )
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [key + '=' + val for key, val in in_data_transformed.items()]
- )
+ :param configuration: .Configuration object for this client
+ :param header_name: a header to pass when making calls to the API.
+ :param header_value: a header value to pass when making calls to
+ the API.
+ :param cookie: a cookie to include in the header when making calls
+ to the API
+ """
- @classmethod
- def _ref6570_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator
- ) -> str:
- """
- Separator is for separate variables like dict with explode true, not for array item separation
- """
- named_parameter_expansion = prefix_separator_iterator.separator in {'&', ';'}
- var_name_piece = variable_name if named_parameter_expansion else ''
- if type(in_data) in {str, float, int}:
- return cls.__ref6570_str_float_int_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- elif isinstance(in_data, list):
- return cls.__ref6570_list_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, dict):
- return cls.__ref6570_dict_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- # bool, bytes, etc
- raise ApiValueError('Unable to generate a ref6570 representation of {}'.format(in_data))
+ PRIMITIVE_TYPES = (float, bool, bytes, str, int)
+ NATIVE_TYPES_MAPPING = {
+ 'int': int,
+ 'long': int, # TODO remove as only py3 is supported?
+ 'float': float,
+ 'str': str,
+ 'bool': bool,
+ 'date': datetime.date,
+ 'datetime': datetime.datetime,
+ 'object': object,
+ }
+ _pool = None
+ def __init__(
+ self,
+ configuration=None,
+ header_name=None,
+ header_value=None,
+ cookie=None
+ ) -> None:
+ # use default configuration if none is provided
+ if configuration is None:
+ configuration = Configuration.get_default()
+ self.configuration = configuration
-class StyleFormSerializer(ParameterSerializerBase):
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- if style is ParameterStyle.FORM:
- return True
- return super()._get_default_explode(style)
+ self.rest_client = rest.RESTClientObject(configuration)
+ self.default_headers = {}
+ if header_name is not None:
+ self.default_headers[header_name] = header_value
+ self.cookie = cookie
+ # Set default User-Agent.
+ self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ self.client_side_validation = configuration.client_side_validation
- def _serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> str:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = PrefixSeparatorIterator('', '&')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
-class StyleSimpleSerializer(ParameterSerializerBase):
+ @property
+ def user_agent(self):
+ """User agent for this API client"""
+ return self.default_headers['User-Agent']
- def _serialize_simple(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool
- ) -> str:
- prefix_separator_iterator = PrefixSeparatorIterator('', ',')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ @user_agent.setter
+ def user_agent(self, value):
+ self.default_headers['User-Agent'] = value
+ def set_default_header(self, header_name, header_value):
+ self.default_headers[header_name] = header_value
-class JSONDetector:
- """
- Works for:
- application/json
- application/json; charset=UTF-8
- application/json-patch+json
- application/geo+json
- """
- __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
- @classmethod
- def _content_type_is_json(cls, content_type: str) -> bool:
- if cls.__json_content_type_pattern.match(content_type):
- return True
- return False
-
-
-@dataclass
-class ParameterBase(JSONDetector):
- name: str
- in_type: ParameterInType
- required: bool
- style: typing.Optional[ParameterStyle]
- explode: typing.Optional[bool]
- allow_reserved: typing.Optional[bool]
- schema: typing.Optional[typing.Type[Schema]]
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
-
- __style_to_in_type = {
- ParameterStyle.MATRIX: {ParameterInType.PATH},
- ParameterStyle.LABEL: {ParameterInType.PATH},
- ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
- ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
- ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
- }
- __in_type_to_default_style = {
- ParameterInType.QUERY: ParameterStyle.FORM,
- ParameterInType.PATH: ParameterStyle.SIMPLE,
- ParameterInType.HEADER: ParameterStyle.SIMPLE,
- ParameterInType.COOKIE: ParameterStyle.FORM,
- }
- __disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'}
- _json_encoder = JSONEncoder()
+ _default = None
@classmethod
- def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
- if style is None:
- return
- in_type_set = cls.__style_to_in_type[style]
- if in_type not in in_type_set:
- raise ValueError(
- 'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format(
- style, in_type_set
- )
- )
-
- def __init__(
- self,
- name: str,
- in_type: ParameterInType,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- if schema is None and content is None:
- raise ValueError('Value missing; Pass in either schema or content')
- if schema and content:
- raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input')
- if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
- raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names))
- self.__verify_style_to_in_type(style, in_type)
- if content is None and style is None:
- style = self.__in_type_to_default_style[in_type]
- if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
- raise ValueError('Invalid content length, content length must equal 1')
- self.in_type = in_type
- self.name = name
- self.required = required
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
- self.schema = schema
- self.content = content
-
- def _serialize_json(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- eliminate_whitespace: bool = False
- ) -> str:
- if eliminate_whitespace:
- return json.dumps(in_data, separators=self._json_encoder.compact_separators)
- return json.dumps(in_data)
+ def get_default(cls):
+ """Return new instance of ApiClient.
+ This method returns newly created, based on default constructor,
+ object of ApiClient class or returns a copy of default
+ ApiClient.
-class PathParameter(ParameterBase, StyleSimpleSerializer):
+ :return: The ApiClient object.
+ """
+ if cls._default is None:
+ cls._default = ApiClient()
+ return cls._default
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.PATH,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ @classmethod
+ def set_default(cls, default):
+ """Set default instance of ApiClient.
- def __serialize_label(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator('.', '.')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ It stores default ApiClient.
- def __serialize_matrix(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator(';', ';')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ :param default: object of ApiClient.
+ """
+ cls._default = default
- def __serialize_simple(
+ def param_serialize(
self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- ) -> typing.Dict[str, str]:
- value = self._serialize_simple(
- in_data=in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True
- )
- return self._to_dict(self.name, value)
+ method,
+ resource_path,
+ path_params=None,
+ query_params=None,
+ header_params=None,
+ body=None,
+ post_params=None,
+ files=None, auth_settings=None,
+ collection_formats=None,
+ _host=None,
+ _request_auth=None
+ ) -> RequestSerialized:
+
+ """Builds the HTTP request params needed by the request.
+ :param method: Method to call.
+ :param resource_path: Path to method endpoint.
+ :param path_params: Path parameters in the url.
+ :param query_params: Query parameters in the url.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings list: Auth Settings names for the request.
+ :param files dict: key -> filename, value -> filepath,
+ for `multipart/form-data`.
+ :param collection_formats: dict of collection formats for path, query,
+ header, and post parameters.
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the authentication
+ in the spec for a single request.
+ :return: tuple of form (path, http_method, query_params, header_params,
+ body, post_params, files)
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> path
- path:
- returns path_params: dict
- label -> path
- returns path_params
- matrix -> path
- returns path_params
- """
- if self.style:
- if self.style is ParameterStyle.SIMPLE:
- return self.__serialize_simple(cast_in_data)
- elif self.style is ParameterStyle.LABEL:
- return self.__serialize_label(cast_in_data)
- elif self.style is ParameterStyle.MATRIX:
- return self.__serialize_matrix(cast_in_data)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class QueryParameter(ParameterBase, StyleFormSerializer):
+ config = self.configuration
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.QUERY,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ # header parameters
+ header_params = header_params or {}
+ header_params.update(self.default_headers)
+ if self.cookie:
+ header_params['Cookie'] = self.cookie
+ if header_params:
+ header_params = self.sanitize_for_serialization(header_params)
+ header_params = dict(
+ self.parameters_to_tuples(header_params,collection_formats)
+ )
- def __serialize_space_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # path parameters
+ if path_params:
+ path_params = self.sanitize_for_serialization(path_params)
+ path_params = self.parameters_to_tuples(
+ path_params,
+ collection_formats
+ )
+ for k, v in path_params:
+ # specified safe chars, encode everything
+ resource_path = resource_path.replace(
+ '{%s}' % k,
+ quote(str(v), safe=config.safe_chars_for_path_param)
+ )
- def __serialize_pipe_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # post parameters
+ if post_params or files:
+ post_params = post_params if post_params else []
+ post_params = self.sanitize_for_serialization(post_params)
+ post_params = self.parameters_to_tuples(
+ post_params,
+ collection_formats
+ )
+ if files:
+ post_params.extend(self.files_parameters(files))
- def __serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._serialize_form(
- in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
+ # auth setting
+ self.update_params_for_auth(
+ header_params,
+ query_params,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=_request_auth
)
- return self._to_dict(self.name, value)
- def get_prefix_separator_iterator(self) -> typing.Optional[PrefixSeparatorIterator]:
- if self.style is ParameterStyle.FORM:
- return PrefixSeparatorIterator('?', '&')
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return PrefixSeparatorIterator('', '%20')
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return PrefixSeparatorIterator('', '|')
-
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> query
- query:
- - GET/HEAD/DELETE: could use fields
- - PUT/POST: must use urlencode to send parameters
- returns fields: tuple
- spaceDelimited -> query
- returns fields
- pipeDelimited -> query
- returns fields
- deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
- returns fields
- """
- if self.style:
- # TODO update query ones to omit setting values when [] {} or None is input
- if self.style is ParameterStyle.FORM:
- return self.__serialize_form(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
- # self.content will be length one
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
- return self._to_dict(
- self.name,
- next(prefix_separator_iterator) + self.name + '=' + quote(value)
- )
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+ # body
+ if body:
+ body = self.sanitize_for_serialization(body)
+ # request url
+ if _host is None or self.configuration.ignore_operation_servers:
+ url = self.configuration.host + resource_path
+ else:
+ # use server/host defined in path or operation instead
+ url = _host + resource_path
+
+ # query parameters
+ if query_params:
+ query_params = self.sanitize_for_serialization(query_params)
+ url_query = self.parameters_to_url_query(
+ query_params,
+ collection_formats
+ )
+ url += "?" + url_query
-class CookieParameter(ParameterBase, StyleFormSerializer):
+ return method, url, header_params, body, post_params
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None and content is None and schema else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.COOKIE,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> cookie
- returns fields: tuple
- """
- if self.style:
- """
- TODO add escaping of comma, space, equals
- or turn encoding on
- """
- value = self._serialize_form(
- cast_in_data,
- explode=self.explode,
- name=self.name,
- percent_encode=False,
- prefix_separator_iterator=PrefixSeparatorIterator('', '&')
- )
- return self._to_dict(self.name, value)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class HeaderParameter(ParameterBase, StyleSimpleSerializer):
- def __init__(
+ def call_api(
self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.HEADER,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
-
- @staticmethod
- def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
- data = tuple(t for t in in_data if t)
- headers = HTTPHeaderDict()
- if not data:
- return headers
- headers.extend(data)
- return headers
+ method,
+ url,
+ header_params=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ) -> rest.RESTResponse:
+ """Makes the HTTP request (synchronous)
+ :param method: Method to call.
+ :param url: Path to method endpoint.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param _request_timeout: timeout setting for this request.
+ :return: RESTResponse
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> HTTPHeaderDict:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> header
- headers: PoolManager needs a mapping, tuple is close
- returns headers: dict
- """
- if self.style:
- value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
- return self.__to_headers(((self.name, value),))
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self.__to_headers(((self.name, value),))
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class Encoding:
- def __init__(
- self,
- content_type: str,
- headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: bool = False,
- ):
- self.content_type = content_type
- self.headers = headers
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
-
-
-@dataclass
-class MediaType:
- """
- Used to store request and response body schema information
- encoding:
- A map between a property name and its encoding information.
- The key, being the property name, MUST exist in the schema as a property.
- The encoding object SHALL only apply to requestBody objects when the media type is
- multipart or application/x-www-form-urlencoded.
- """
- schema: typing.Optional[typing.Type[Schema]] = None
- encoding: typing.Optional[typing.Dict[str, Encoding]] = None
+ try:
+ # perform request and return response
+ response_data = self.rest_client.request(
+ method, url,
+ headers=header_params,
+ body=body, post_params=post_params,
+ _request_timeout=_request_timeout
+ )
+ except ApiException as e:
+ raise e
-@dataclass
-class ApiResponse:
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, Schema]
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
+ return response_data
- def __init__(
+ def response_deserialize(
self,
- response: urllib3.HTTPResponse,
- body: typing.Union[Unset, typing.Type[Schema]],
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
- ):
- """
- pycharm needs this to prevent 'Unexpected argument' warnings
+ response_data: rest.RESTResponse,
+ response_types_map: Optional[Dict[str, ApiResponseT]]=None
+ ) -> ApiResponse[ApiResponseT]:
+ """Deserializes response into an object.
+ :param response_data: RESTResponse object to be deserialized.
+ :param response_types_map: dict of response types.
+ :return: ApiResponse
"""
- self.response = response
- self.body = body
- self.headers = headers
+ msg = "RESTResponse.read() must be called before passing it to response_deserialize()"
+ assert response_data.data is not None, msg
-@dataclass
-class ApiResponseWithoutDeserialization(ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, typing.Type[Schema]] = unset
- headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
+ response_type = response_types_map.get(str(response_data.status), None)
+ if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599:
+ # if not found, look for '1XX', '2XX', etc.
+ response_type = response_types_map.get(str(response_data.status)[0] + "XX", None)
+ # deserialize response data
+ response_text = None
+ return_data = None
+ try:
+ if response_type == "bytearray":
+ return_data = response_data.data
+ elif response_type == "file":
+ return_data = self.__deserialize_file(response_data)
+ elif response_type is not None:
+ match = None
+ content_type = response_data.getheader('content-type')
+ if content_type is not None:
+ match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type)
+ encoding = match.group(1) if match else "utf-8"
+ response_text = response_data.data.decode(encoding)
+ return_data = self.deserialize(response_text, response_type, content_type)
+ finally:
+ if not 200 <= response_data.status <= 299:
+ raise ApiException.from_response(
+ http_resp=response_data,
+ body=response_text,
+ data=return_data,
+ )
-class OpenApiResponse(JSONDetector):
- __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
+ return ApiResponse(
+ status_code = response_data.status,
+ data = return_data,
+ headers = response_data.getheaders(),
+ raw_data = response_data.data
+ )
- def __init__(
- self,
- response_cls: typing.Type[ApiResponse] = ApiResponse,
- content: typing.Optional[typing.Dict[str, MediaType]] = None,
- headers: typing.Optional[typing.List[HeaderParameter]] = None,
- ):
- self.headers = headers
- if content is not None and len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
- self.response_cls = response_cls
-
- @staticmethod
- def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
- # python must be >= 3.9 so we can pass in bytes into json.loads
- return json.loads(response.data)
-
- @staticmethod
- def __file_name_from_response_url(response_url: typing.Optional[str]) -> typing.Optional[str]:
- if response_url is None:
- return None
- url_path = urlparse(response_url).path
- if url_path:
- path_basename = os.path.basename(url_path)
- if path_basename:
- _filename, ext = os.path.splitext(path_basename)
- if ext:
- return path_basename
- return None
+ def sanitize_for_serialization(self, obj):
+ """Builds a JSON POST object.
- @classmethod
- def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
- if content_disposition is None:
- return None
- match = cls.__filename_content_disposition_pattern.search(content_disposition)
- if not match:
- return None
- return match.group(1)
+ If obj is None, return None.
+ If obj is SecretStr, return obj.get_secret_value()
+ If obj is str, int, long, float, bool, return directly.
+ If obj is datetime.datetime, datetime.date
+ convert to string in iso8601 format.
+ If obj is list, sanitize each element in the list.
+ If obj is dict, return the dict.
+ If obj is OpenAPI model, return the properties dict.
- def __deserialize_application_octet_stream(
- self, response: urllib3.HTTPResponse
- ) -> typing.Union[bytes, io.BufferedReader]:
- """
- urllib3 use cases:
- 1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
- 2. when preload_content=False (stream=True) then supports_chunked_reads is True and
- a file will be written and returned
+ :param obj: The data to serialize.
+ :return: The serialized form of data.
"""
- if response.supports_chunked_reads():
- file_name = (
- self.__file_name_from_content_disposition(response.headers.get('content-disposition'))
- or self.__file_name_from_response_url(response.geturl())
+ if obj is None:
+ return None
+ elif isinstance(obj, Enum):
+ return obj.value
+ elif isinstance(obj, SecretStr):
+ return obj.get_secret_value()
+ elif isinstance(obj, self.PRIMITIVE_TYPES):
+ return obj
+ elif isinstance(obj, list):
+ return [
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
+ ]
+ elif isinstance(obj, tuple):
+ return tuple(
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
)
+ elif isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
- if file_name is None:
- _fd, path = tempfile.mkstemp()
- else:
- path = os.path.join(tempfile.gettempdir(), file_name)
-
- with open(path, 'wb') as new_file:
- chunk_size = 1024
- while True:
- data = response.read(chunk_size)
- if not data:
- break
- new_file.write(data)
- # release_conn is needed for streaming connections only
- response.release_conn()
- new_file = open(path, 'rb')
- return new_file
+ elif isinstance(obj, dict):
+ obj_dict = obj
else:
- return response.data
+ # Convert model obj to dict except
+ # attributes `openapi_types`, `attribute_map`
+ # and attributes which value is not None.
+ # Convert attribute name to json key in
+ # model definition for request.
+ if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')):
+ obj_dict = obj.to_dict()
+ else:
+ obj_dict = obj.__dict__
- @staticmethod
- def __deserialize_multipart_form_data(
- response: urllib3.HTTPResponse
- ) -> typing.Dict[str, typing.Any]:
- msg = email.message_from_bytes(response.data)
return {
- part.get_param("name", header="Content-Disposition"): part.get_payload(
- decode=True
- ).decode(part.get_content_charset())
- if part.get_content_charset()
- else part.get_payload()
- for part in msg.get_payload()
+ key: self.sanitize_for_serialization(val)
+ for key, val in obj_dict.items()
}
- def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
- content_type = response.getheader('content-type')
- deserialized_body = unset
- streamed = response.supports_chunked_reads()
-
- deserialized_headers = unset
- if self.headers is not None:
- # TODO add header deserialiation here
- pass
-
- if self.content is not None:
- if content_type not in self.content:
- raise ApiValueError(
- f"Invalid content_type returned. Content_type='{content_type}' was returned "
- f"when only {str(set(self.content))} are defined for status_code={str(response.status)}"
- )
- body_schema = self.content[content_type].schema
- if body_schema is None:
- # some specs do not define response content media type schemas
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=unset
- )
+ def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]):
+ """Deserializes response into an object.
+
+ :param response: RESTResponse object to be deserialized.
+ :param response_type: class literal for
+ deserialized object, or string of class name.
+ :param content_type: content type of response.
+
+ :return: deserialized object.
+ """
- if self._content_type_is_json(content_type):
- body_data = self.__deserialize_json(response)
- elif content_type == 'application/octet-stream':
- body_data = self.__deserialize_application_octet_stream(response)
- elif content_type.startswith('multipart/form-data'):
- body_data = self.__deserialize_multipart_form_data(response)
- content_type = 'multipart/form-data'
+ # fetch data from response object
+ if content_type is None:
+ try:
+ data = json.loads(response_text)
+ except ValueError:
+ data = response_text
+ elif content_type.startswith("application/json"):
+ if response_text == "":
+ data = ""
else:
- raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type))
- deserialized_body = body_schema.from_openapi_data_oapg(
- body_data, _configuration=configuration)
- elif streamed:
- response.release_conn()
-
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=deserialized_body
- )
+ data = json.loads(response_text)
+ elif content_type.startswith("text/plain"):
+ data = response_text
+ else:
+ raise ApiException(
+ status=0,
+ reason="Unsupported content type: {0}".format(content_type)
+ )
+ return self.__deserialize(data, response_type)
-class ApiClient:
- """Generic API client for OpenAPI client library builds.
+ def __deserialize(self, data, klass):
+ """Deserializes dict, list, str into an object.
- OpenAPI generic API client. This client handles the client-
- server communication, and is invariant across implementations. Specifics of
- the methods and models for each application are generated from the OpenAPI
- templates.
+ :param data: dict, list or str.
+ :param klass: class literal, or string of class name.
- NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
+ :return: object.
+ """
+ if data is None:
+ return None
- :param configuration: .Configuration object for this client
- :param header_name: a header to pass when making calls to the API.
- :param header_value: a header value to pass when making calls to
- the API.
- :param cookie: a cookie to include in the header when making calls
- to the API
- :param pool_threads: The number of threads to use for async requests
- to the API. More threads means more concurrent API requests.
- """
+ if isinstance(klass, str):
+ if klass.startswith('List['):
+ m = re.match(r'List\[(.*)]', klass)
+ assert m is not None, "Malformed List type definition"
+ sub_kls = m.group(1)
+ return [self.__deserialize(sub_data, sub_kls)
+ for sub_data in data]
+
+ if klass.startswith('Dict['):
+ m = re.match(r'Dict\[([^,]*), (.*)]', klass)
+ assert m is not None, "Malformed Dict type definition"
+ sub_kls = m.group(2)
+ return {k: self.__deserialize(v, sub_kls)
+ for k, v in data.items()}
+
+ # convert str to class
+ if klass in self.NATIVE_TYPES_MAPPING:
+ klass = self.NATIVE_TYPES_MAPPING[klass]
+ else:
+ klass = getattr(cloudharness_cli.samples.models, klass)
+
+ if klass in self.PRIMITIVE_TYPES:
+ return self.__deserialize_primitive(data, klass)
+ elif klass == object:
+ return self.__deserialize_object(data)
+ elif klass == datetime.date:
+ return self.__deserialize_date(data)
+ elif klass == datetime.datetime:
+ return self.__deserialize_datetime(data)
+ elif issubclass(klass, Enum):
+ return self.__deserialize_enum(data, klass)
+ else:
+ return self.__deserialize_model(data, klass)
- _pool = None
+ def parameters_to_tuples(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __init__(
- self,
- configuration: typing.Optional[Configuration] = None,
- header_name: typing.Optional[str] = None,
- header_value: typing.Optional[str] = None,
- cookie: typing.Optional[str] = None,
- pool_threads: int = 1
- ):
- if configuration is None:
- configuration = Configuration()
- self.configuration = configuration
- self.pool_threads = pool_threads
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: Parameters as list of tuples, collections formatted
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, value) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(str(value) for value in v)))
+ else:
+ new_params.append((k, v))
+ return new_params
- self.rest_client = rest.RESTClientObject(configuration)
- self.default_headers = HTTPHeaderDict()
- if header_name is not None:
- self.default_headers[header_name] = header_value
- self.cookie = cookie
- # Set default User-Agent.
- self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ def parameters_to_url_query(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __enter__(self):
- return self
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: URL query string (e.g. a=Hello%20World&b=123)
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if isinstance(v, bool):
+ v = str(v).lower()
+ if isinstance(v, (int, float)):
+ v = str(v)
+ if isinstance(v, dict):
+ v = json.dumps(v)
+
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, str(value)) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(quote(str(value)) for value in v))
+ )
+ else:
+ new_params.append((k, quote(str(v))))
- def __exit__(self, exc_type, exc_value, traceback):
- self.close()
+ return "&".join(["=".join(map(str, item)) for item in new_params])
- def close(self):
- if self._pool:
- self._pool.close()
- self._pool.join()
- self._pool = None
- if hasattr(atexit, 'unregister'):
- atexit.unregister(self.close)
+ def files_parameters(self, files: Dict[str, Union[str, bytes]]):
+ """Builds form parameters.
- @property
- def pool(self):
- """Create thread pool on first request
- avoids instantiating unused threadpool for blocking clients.
+ :param files: File parameters.
+ :return: Form parameters with files.
"""
- if self._pool is None:
- atexit.register(self.close)
- self._pool = ThreadPool(self.pool_threads)
- return self._pool
-
- @property
- def user_agent(self):
- """User agent for this API client"""
- return self.default_headers['User-Agent']
+ params = []
+ for k, v in files.items():
+ if isinstance(v, str):
+ with open(v, 'rb') as f:
+ filename = os.path.basename(f.name)
+ filedata = f.read()
+ elif isinstance(v, bytes):
+ filename = k
+ filedata = v
+ else:
+ raise ValueError("Unsupported file value")
+ mimetype = (
+ mimetypes.guess_type(filename)[0]
+ or 'application/octet-stream'
+ )
+ params.append(
+ tuple([k, tuple([filename, filedata, mimetype])])
+ )
+ return params
- @user_agent.setter
- def user_agent(self, value):
- self.default_headers['User-Agent'] = value
+ def select_header_accept(self, accepts: List[str]) -> Optional[str]:
+ """Returns `Accept` based on an array of accepts provided.
- def set_default_header(self, header_name, header_value):
- self.default_headers[header_name] = header_value
+ :param accepts: List of headers.
+ :return: Accept (e.g. application/json).
+ """
+ if not accepts:
+ return None
- def __call_api(
- self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
+ for accept in accepts:
+ if re.search('json', accept, re.IGNORECASE):
+ return accept
- # header parameters
- used_headers = HTTPHeaderDict(self.default_headers)
- if self.cookie:
- headers['Cookie'] = self.cookie
+ return accepts[0]
- # auth setting
- self.update_params_for_auth(used_headers,
- auth_settings, resource_path, method, body)
+ def select_header_content_type(self, content_types):
+ """Returns `Content-Type` based on an array of content_types provided.
- # must happen after cookie setting and auth setting in case user is overriding those
- if headers:
- used_headers.update(headers)
+ :param content_types: List of content-types.
+ :return: Content-Type (e.g. application/json).
+ """
+ if not content_types:
+ return None
- # request url
- if host is None:
- url = self.configuration.host + resource_path
- else:
- # use server/host defined in path or operation instead
- url = host + resource_path
+ for content_type in content_types:
+ if re.search('json', content_type, re.IGNORECASE):
+ return content_type
- # perform request and return response
- response = self.request(
- method,
- url,
- headers=used_headers,
- fields=fields,
- body=body,
- stream=stream,
- timeout=timeout,
- )
- return response
+ return content_types[0]
- def call_api(
+ def update_params_for_auth(
self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- async_req: typing.Optional[bool] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request (synchronous) and returns deserialized data.
-
- To make an async_req request, set the async_req parameter.
+ headers,
+ queries,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=None
+ ) -> None:
+ """Updates header and query params based on authentication setting.
- :param resource_path: Path to method endpoint.
- :param method: Method to call.
- :param headers: Header parameters to be
- placed in the request header.
- :param body: Request body.
- :param fields: Request post form parameters,
- for `application/x-www-form-urlencoded`, `multipart/form-data`.
- :param auth_settings: Auth Settings names for the request.
- :param async_req: execute request asynchronously
- :type async_req: bool, optional TODO remove, unused
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Also when True, if the openapi spec describes a file download,
- the data will be written to a local filesystme file and the BinarySchema
- instance will also inherit from FileSchema and FileIO
- Default is False.
- :type stream: bool, optional
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
- :param host: api endpoint host
- :return:
- If async_req parameter is True,
- the request will be called asynchronously.
- The method will return the request thread.
- If parameter async_req is False or missing,
- then the method will return the response directly.
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :param auth_settings: Authentication setting identifiers list.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param request_auth: if set, the provided settings will
+ override the token in the configuration.
"""
+ if not auth_settings:
+ return
- if not async_req:
- return self.__call_api(
- resource_path,
- method,
+ if request_auth:
+ self._apply_auth_params(
headers,
- body,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
- )
-
- return self.pool.apply_async(
- self.__call_api,
- (
+ queries,
resource_path,
method,
- headers,
body,
- json,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
+ request_auth
)
- )
-
- def request(
+ else:
+ for auth in auth_settings:
+ auth_setting = self.configuration.auth_settings().get(auth)
+ if auth_setting:
+ self._apply_auth_params(
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ )
+
+ def _apply_auth_params(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request using RESTClient."""
- if method == "GET":
- return self.rest_client.GET(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "HEAD":
- return self.rest_client.HEAD(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "OPTIONS":
- return self.rest_client.OPTIONS(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "POST":
- return self.rest_client.POST(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PUT":
- return self.rest_client.PUT(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PATCH":
- return self.rest_client.PATCH(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "DELETE":
- return self.rest_client.DELETE(url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body)
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ ) -> None:
+ """Updates the request parameters based on a single auth_setting
+
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param auth_setting: auth settings for the endpoint
+ """
+ if auth_setting['in'] == 'cookie':
+ headers['Cookie'] = auth_setting['value']
+ elif auth_setting['in'] == 'header':
+ if auth_setting['type'] != 'http-signature':
+ headers[auth_setting['key']] = auth_setting['value']
+ elif auth_setting['in'] == 'query':
+ queries.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
- "http method must be `GET`, `HEAD`, `OPTIONS`,"
- " `POST`, `PATCH`, `PUT` or `DELETE`."
+ 'Authentication token must be in `query` or `header`'
)
- def update_params_for_auth(self, headers, auth_settings,
- resource_path, method, body):
- """Updates header and query params based on authentication setting.
+ def __deserialize_file(self, response):
+ """Deserializes body to file
- :param headers: Header parameters dict to be updated.
- :param auth_settings: Authentication setting identifiers list.
- :param resource_path: A string representation of the HTTP request resource path.
- :param method: A string representation of the HTTP request method.
- :param body: A object representing the body of the HTTP request.
- The object type is the return value of _encoder.default().
- """
- if not auth_settings:
- return
+ Saves response body into a file in a temporary folder,
+ using the filename from the `Content-Disposition` header if provided.
- for auth in auth_settings:
- auth_setting = self.configuration.auth_settings().get(auth)
- if not auth_setting:
- continue
- if auth_setting['in'] == 'cookie':
- headers.add('Cookie', auth_setting['value'])
- elif auth_setting['in'] == 'header':
- if auth_setting['type'] != 'http-signature':
- headers.add(auth_setting['key'], auth_setting['value'])
- elif auth_setting['in'] == 'query':
- """ TODO implement auth in query
- need to pass in prefix_separator_iterator
- and need to output resource_path with query params added
- """
- raise ApiValueError("Auth in query not yet implemented")
- else:
- raise ApiValueError(
- 'Authentication token must be in `query` or `header`'
- )
+ handle file downloading
+ save response body into a tmp file and return the instance
+ :param response: RESTResponse.
+ :return: file path.
+ """
+ fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
+ os.close(fd)
+ os.remove(path)
+
+ content_disposition = response.getheader("Content-Disposition")
+ if content_disposition:
+ m = re.search(
+ r'filename=[\'"]?([^\'"\s]+)[\'"]?',
+ content_disposition
+ )
+ assert m is not None, "Unexpected 'content-disposition' header value"
+ filename = m.group(1)
+ path = os.path.join(os.path.dirname(path), filename)
-class Api:
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
+ with open(path, "wb") as f:
+ f.write(response.data)
- Do not edit the class manually.
- """
+ return path
+
+ def __deserialize_primitive(self, data, klass):
+ """Deserializes string to primitive type.
- def __init__(self, api_client: typing.Optional[ApiClient] = None):
- if api_client is None:
- api_client = ApiClient()
- self.api_client = api_client
+ :param data: str.
+ :param klass: class literal.
- @staticmethod
- def _verify_typed_dict_inputs_oapg(cls: typing.Type[typing_extensions.TypedDict], data: typing.Dict[str, typing.Any]):
+ :return: int, long, float, str, bool.
"""
- Ensures that:
- - required keys are present
- - additional properties are not input
- - value stored under required keys do not have the value unset
- Note: detailed value checking is done in schema classes
+ try:
+ return klass(data)
+ except UnicodeEncodeError:
+ return str(data)
+ except TypeError:
+ return data
+
+ def __deserialize_object(self, value):
+ """Return an original value.
+
+ :return: object.
"""
- missing_required_keys = []
- required_keys_with_unset_values = []
- for required_key in cls.__required_keys__:
- if required_key not in data:
- missing_required_keys.append(required_key)
- continue
- value = data[required_key]
- if value is unset:
- required_keys_with_unset_values.append(required_key)
- if missing_required_keys:
- raise ApiTypeError(
- '{} missing {} required arguments: {}'.format(
- cls.__name__, len(missing_required_keys), missing_required_keys
- )
- )
- if required_keys_with_unset_values:
- raise ApiValueError(
- '{} contains invalid unset values for {} required keys: {}'.format(
- cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values
- )
- )
+ return value
- disallowed_additional_keys = []
- for key in data:
- if key in cls.__required_keys__ or key in cls.__optional_keys__:
- continue
- disallowed_additional_keys.append(key)
- if disallowed_additional_keys:
- raise ApiTypeError(
- '{} got {} unexpected keyword arguments: {}'.format(
- cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys
- )
- )
+ def __deserialize_date(self, string):
+ """Deserializes string to date.
- def _get_host_oapg(
- self,
- operation_id: str,
- servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
- host_index: typing.Optional[int] = None
- ) -> typing.Optional[str]:
- configuration = self.api_client.configuration
+ :param string: str.
+ :return: date.
+ """
try:
- if host_index is None:
- index = configuration.server_operation_index.get(
- operation_id, configuration.server_index
- )
- else:
- index = host_index
- server_variables = configuration.server_operation_variables.get(
- operation_id, configuration.server_variables
- )
- host = configuration.get_host_from_settings(
- index, variables=server_variables, servers=servers
+ return parse(string).date()
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason="Failed to parse `{0}` as date object".format(string)
)
- except IndexError:
- if servers:
- raise ApiValueError(
- "Invalid host index. Must be 0 <= index < %s" %
- len(servers)
- )
- host = None
- return host
-
-
-class SerializedRequestBody(typing_extensions.TypedDict, total=False):
- body: typing.Union[str, bytes]
- fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
+ def __deserialize_datetime(self, string):
+ """Deserializes string to datetime.
-class RequestBody(StyleFormSerializer, JSONDetector):
- """
- A request body parameter
- content: content_type to MediaType Schema info
- """
- __json_encoder = JSONEncoder()
+ The string should be in iso8601 datetime format.
- def __init__(
- self,
- content: typing.Dict[str, MediaType],
- required: bool = False,
- ):
- self.required = required
- if len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
-
- def __serialize_json(
- self,
- in_data: typing.Any
- ) -> typing.Dict[str, bytes]:
- in_data = self.__json_encoder.default(in_data)
- json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode(
- "utf-8"
- )
- return dict(body=json_str)
-
- @staticmethod
- def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
- if isinstance(in_data, frozendict.frozendict):
- raise ValueError('Unable to serialize type frozendict.frozendict to text/plain')
- elif isinstance(in_data, tuple):
- raise ValueError('Unable to serialize type tuple to text/plain')
- elif isinstance(in_data, NoneClass):
- raise ValueError('Unable to serialize type NoneClass to text/plain')
- elif isinstance(in_data, BoolClass):
- raise ValueError('Unable to serialize type BoolClass to text/plain')
- return dict(body=str(in_data))
-
- def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
- json_value = self.__json_encoder.default(value)
- return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'})
-
- def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
- if isinstance(value, str):
- return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'})
- elif isinstance(value, bytes):
- return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'})
- elif isinstance(value, FileIO):
- request_field = RequestField(
- name=key,
- data=value.read(),
- filename=os.path.basename(value.name),
- headers={'Content-Type': 'application/octet-stream'}
+ :param string: str.
+ :return: datetime.
+ """
+ try:
+ return parse(string)
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as datetime object"
+ .format(string)
+ )
)
- value.close()
- return request_field
- else:
- return self.__multipart_json_item(key=key, value=value)
- def __serialize_multipart_form_data(
- self, in_data: Schema
- ) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data')
+ def __deserialize_enum(self, data, klass):
+ """Deserializes primitive type to enum.
+
+ :param data: primitive type.
+ :param klass: class literal.
+ :return: enum value.
"""
- In a multipart/form-data request body, each schema property, or each element of a schema array property,
- takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
- for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
+ try:
+ return klass(data)
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as `{1}`"
+ .format(data, klass)
+ )
+ )
- When passing in multipart types, boundaries MAY be used to separate sections of the content being
- transferred – thus, the following default Content-Types are defined for multipart:
+ def __deserialize_model(self, data, klass):
+ """Deserializes list or dict to model.
- If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
- If the property is complex, or an array of complex values, the default Content-Type is application/json
- Question: how is the array of primitives encoded?
- If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
- """
- fields = []
- for key, value in in_data.items():
- if isinstance(value, tuple):
- if value:
- # values use explode = True, so the code makes a RequestField for each item with name=key
- for item in value:
- request_field = self.__multipart_form_item(key=key, value=item)
- fields.append(request_field)
- else:
- # send an empty array as json because exploding will not send it
- request_field = self.__multipart_json_item(key=key, value=value)
- fields.append(request_field)
- else:
- request_field = self.__multipart_form_item(key=key, value=value)
- fields.append(request_field)
-
- return dict(fields=tuple(fields))
-
- def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
- if isinstance(in_data, bytes):
- return dict(body=in_data)
- # FileIO type
- result = dict(body=in_data.read())
- in_data.close()
- return result
-
- def __serialize_application_x_www_form_data(
- self, in_data: typing.Any
- ) -> SerializedRequestBody:
+ :param data: dict, list.
+ :param klass: class literal.
+ :return: model object.
"""
- POST submission of form data in body
- """
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(
- f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data')
- cast_in_data = self.__json_encoder.default(in_data)
- value = self._serialize_form(cast_in_data, name='', explode=True, percent_encode=True)
- return dict(body=value)
-
- def serialize(
- self, in_data: typing.Any, content_type: str
- ) -> SerializedRequestBody:
- """
- If a str is returned then the result will be assigned to data when making the request
- If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
- Return a tuple of
- The key of the return dict is
- - body for application/json
- - encode_multipart and fields for multipart/form-data
- """
- media_type = self.content[content_type]
- if isinstance(in_data, media_type.schema):
- cast_in_data = in_data
- elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
- cast_in_data = media_type.schema(**in_data)
- else:
- cast_in_data = media_type.schema(in_data)
- # TODO check for and use encoding if it exists
- # and content_type is multipart or application/x-www-form-urlencoded
- if self._content_type_is_json(content_type):
- return self.__serialize_json(cast_in_data)
- elif content_type == 'text/plain':
- return self.__serialize_text_plain(cast_in_data)
- elif content_type == 'multipart/form-data':
- return self.__serialize_multipart_form_data(cast_in_data)
- elif content_type == 'application/x-www-form-urlencoded':
- return self.__serialize_application_x_www_form_data(cast_in_data)
- elif content_type == 'application/octet-stream':
- return self.__serialize_application_octet_stream(cast_in_data)
- raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type))
+ return klass.from_dict(data)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_response.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_response.py
new file mode 100644
index 000000000..9bc7c11f6
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/api_response.py
@@ -0,0 +1,21 @@
+"""API response object."""
+
+from __future__ import annotations
+from typing import Optional, Generic, Mapping, TypeVar
+from pydantic import Field, StrictInt, StrictBytes, BaseModel
+
+T = TypeVar("T")
+
+class ApiResponse(BaseModel, Generic[T]):
+ """
+ API response object
+ """
+
+ status_code: StrictInt = Field(description="HTTP status code")
+ headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers")
+ data: T = Field(description="Deserialized data given the data type")
+ raw_data: StrictBytes = Field(description="Raw data (HTTP response body)")
+
+ model_config = {
+ "arbitrary_types_allowed": True
+ }
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/__init__.py
deleted file mode 100644
index 7840f7726..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints then import them from
-# tags, paths, or path_to_api, or tag_to_api
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/path_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/path_to_api.py
deleted file mode 100644
index 0cc5ed787..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/path_to_api.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.samples.paths import PathValues
-from cloudharness_cli.samples.apis.paths.error import Error
-from cloudharness_cli.samples.apis.paths.ping import Ping
-from cloudharness_cli.samples.apis.paths.valid import Valid
-from cloudharness_cli.samples.apis.paths.valid_cookie import ValidCookie
-from cloudharness_cli.samples.apis.paths.sampleresources import Sampleresources
-from cloudharness_cli.samples.apis.paths.sampleresources_sampleresource_id import SampleresourcesSampleresourceId
-from cloudharness_cli.samples.apis.paths.operation_async import OperationAsync
-from cloudharness_cli.samples.apis.paths.operation_sync import OperationSync
-from cloudharness_cli.samples.apis.paths.operation_sync_results import OperationSyncResults
-
-PathToApi = typing_extensions.TypedDict(
- 'PathToApi',
- {
- PathValues.ERROR: Error,
- PathValues.PING: Ping,
- PathValues.VALID: Valid,
- PathValues.VALIDCOOKIE: ValidCookie,
- PathValues.SAMPLERESOURCES: Sampleresources,
- PathValues.SAMPLERESOURCES_SAMPLERESOURCE_ID: SampleresourcesSampleresourceId,
- PathValues.OPERATION_ASYNC: OperationAsync,
- PathValues.OPERATION_SYNC: OperationSync,
- PathValues.OPERATION_SYNC_RESULTS: OperationSyncResults,
- }
-)
-
-path_to_api = PathToApi(
- {
- PathValues.ERROR: Error,
- PathValues.PING: Ping,
- PathValues.VALID: Valid,
- PathValues.VALIDCOOKIE: ValidCookie,
- PathValues.SAMPLERESOURCES: Sampleresources,
- PathValues.SAMPLERESOURCES_SAMPLERESOURCE_ID: SampleresourcesSampleresourceId,
- PathValues.OPERATION_ASYNC: OperationAsync,
- PathValues.OPERATION_SYNC: OperationSync,
- PathValues.OPERATION_SYNC_RESULTS: OperationSyncResults,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/__init__.py
deleted file mode 100644
index a8ed00d23..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.apis.path_to_api import path_to_api
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/error.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/error.py
deleted file mode 100644
index d7aaf40ba..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/error.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.error.get import ApiForget
-
-
-class Error(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_async.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_async.py
deleted file mode 100644
index 04e40332f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_async.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.operation_async.get import ApiForget
-
-
-class OperationAsync(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync.py
deleted file mode 100644
index 83b406495..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.operation_sync.get import ApiForget
-
-
-class OperationSync(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync_results.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync_results.py
deleted file mode 100644
index fa72ae66a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/operation_sync_results.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.operation_sync_results.get import ApiForget
-
-
-class OperationSyncResults(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/ping.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/ping.py
deleted file mode 100644
index b8e3b5682..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/ping.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.ping.get import ApiForget
-
-
-class Ping(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources.py
deleted file mode 100644
index 3b513ab0a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from cloudharness_cli.samples.paths.sampleresources.get import ApiForget
-from cloudharness_cli.samples.paths.sampleresources.post import ApiForpost
-
-
-class Sampleresources(
- ApiForget,
- ApiForpost,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources_sampleresource_id.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources_sampleresource_id.py
deleted file mode 100644
index 1e8b7793b..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/sampleresources_sampleresource_id.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id.get import ApiForget
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id.put import ApiForput
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id.delete import ApiFordelete
-
-
-class SampleresourcesSampleresourceId(
- ApiForget,
- ApiForput,
- ApiFordelete,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid.py
deleted file mode 100644
index 930a13b9c..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.valid.get import ApiForget
-
-
-class Valid(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid_cookie.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid_cookie.py
deleted file mode 100644
index d843d017f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/paths/valid_cookie.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.samples.paths.valid_cookie.get import ApiForget
-
-
-class ValidCookie(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tag_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tag_to_api.py
deleted file mode 100644
index a6c707bdb..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tag_to_api.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.samples.apis.tags import TagValues
-from cloudharness_cli.samples.apis.tags.auth_api import AuthApi
-from cloudharness_cli.samples.apis.tags.workflows_api import WorkflowsApi
-from cloudharness_cli.samples.apis.tags.resource_api import ResourceApi
-from cloudharness_cli.samples.apis.tags.test_api import TestApi
-
-TagToApi = typing_extensions.TypedDict(
- 'TagToApi',
- {
- TagValues.AUTH: AuthApi,
- TagValues.WORKFLOWS: WorkflowsApi,
- TagValues.RESOURCE: ResourceApi,
- TagValues.TEST: TestApi,
- }
-)
-
-tag_to_api = TagToApi(
- {
- TagValues.AUTH: AuthApi,
- TagValues.WORKFLOWS: WorkflowsApi,
- TagValues.RESOURCE: ResourceApi,
- TagValues.TEST: TestApi,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/__init__.py
deleted file mode 100644
index 9605b3116..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.apis.tag_to_api import tag_to_api
-
-import enum
-
-
-class TagValues(str, enum.Enum):
- AUTH = "auth"
- WORKFLOWS = "workflows"
- RESOURCE = "resource"
- TEST = "test"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/auth_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/auth_api.py
deleted file mode 100644
index dd5aec507..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/auth_api.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.samples.paths.valid_cookie.get import ValidCookie
-from cloudharness_cli.samples.paths.valid.get import ValidToken
-
-
-class AuthApi(
- ValidCookie,
- ValidToken,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/resource_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/resource_api.py
deleted file mode 100644
index e80716199..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/resource_api.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.samples.paths.sampleresources.post import CreateSampleResource
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id.delete import DeleteSampleResource
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id.get import GetSampleResource
-from cloudharness_cli.samples.paths.sampleresources.get import GetSampleResources
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id.put import UpdateSampleResource
-
-
-class ResourceApi(
- CreateSampleResource,
- DeleteSampleResource,
- GetSampleResource,
- GetSampleResources,
- UpdateSampleResource,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/test_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/test_api.py
deleted file mode 100644
index 6961d1b36..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/test_api.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.samples.paths.error.get import Error
-from cloudharness_cli.samples.paths.ping.get import Ping
-
-
-class TestApi(
- Error,
- Ping,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/workflows_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/workflows_api.py
deleted file mode 100644
index 289ce506e..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/apis/tags/workflows_api.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.samples.paths.operation_async.get import SubmitAsync
-from cloudharness_cli.samples.paths.operation_sync.get import SubmitSync
-from cloudharness_cli.samples.paths.operation_sync_results.get import SubmitSyncWithResults
-
-
-class WorkflowsApi(
- SubmitAsync,
- SubmitSync,
- SubmitSyncWithResults,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/configuration.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/configuration.py
index 296662ac5..09f057091 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/configuration.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/configuration.py
@@ -3,70 +3,49 @@
"""
CloudHarness Sample API
- CloudHarness Sample api # noqa: E501
+ CloudHarness Sample api
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
import copy
import logging
+from logging import FileHandler
import multiprocessing
import sys
+from typing import Optional
import urllib3
-from http import client as http_client
-from cloudharness_cli.samples.exceptions import ApiValueError
-
+import http.client as httplib
JSON_SCHEMA_VALIDATION_KEYWORDS = {
'multipleOf', 'maximum', 'exclusiveMaximum',
'minimum', 'exclusiveMinimum', 'maxLength',
- 'minLength', 'pattern', 'maxItems', 'minItems',
- 'uniqueItems', 'maxProperties', 'minProperties',
+ 'minLength', 'pattern', 'maxItems', 'minItems'
}
-class Configuration(object):
- """NOTE: This class is auto generated by OpenAPI Generator
+class Configuration:
+ """This class contains various settings of the API client.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
-
- :param host: Base url
+ :param host: Base url.
+ :param ignore_operation_servers
+ Boolean to ignore operation servers for the API client.
+ Config will use `host` as the base url regardless of the operation servers.
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
- :param api_key_prefix: Dict to store API prefix (e.g. Bearer)
+ :param api_key_prefix: Dict to store API prefix (e.g. Bearer).
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
- :param username: Username for HTTP basic authentication
- :param password: Password for HTTP basic authentication
- :param discard_unknown_keys: Boolean value indicating whether to discard
- unknown properties. A server may send a response that includes additional
- properties that are not known by the client in the following scenarios:
- 1. The OpenAPI document is incomplete, i.e. it does not match the server
- implementation.
- 2. The client was generated using an older version of the OpenAPI document
- and the server has been upgraded since then.
- If a schema in the OpenAPI document defines the additionalProperties attribute,
- then all undeclared properties received by the server are injected into the
- additional properties map. In that case, there are undeclared properties, and
- nothing to discard.
- :param disabled_client_side_validations (string): Comma-separated list of
- JSON schema validation keywords to disable JSON schema structural validation
- rules. The following keywords may be specified: multipleOf, maximum,
- exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
- maxItems, minItems.
- By default, the validation is performed for data generated locally by the client
- and data received from the server, independent of any validation performed by
- the server side. If the input data does not satisfy the JSON schema validation
- rules specified in the OpenAPI document, an exception is raised.
- If disabled_client_side_validations is set, structural validation is
- disabled. This can be useful to troubleshoot data validation problem, such as
- when the OpenAPI document validation rules do not match the actual API data
- received by the server.
+ :param username: Username for HTTP basic authentication.
+ :param password: Password for HTTP basic authentication.
+ :param access_token: Access token.
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
@@ -75,7 +54,11 @@ class Configuration(object):
configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
- The validation of enums is performed for variables with defined enum values before.
+ The validation of enums is performed for variables with defined enum
+ values before.
+ :param ssl_ca_cert: str - the path to a file of concatenated CA certificates
+ in PEM format.
+ :param retries: Number of retries for API requests.
:Example:
@@ -104,11 +87,15 @@ class Configuration(object):
def __init__(self, host=None,
api_key=None, api_key_prefix=None,
username=None, password=None,
- discard_unknown_keys=False,
- disabled_client_side_validations="",
+ access_token=None,
server_index=None, server_variables=None,
server_operation_index=None, server_operation_variables=None,
- ):
+ ignore_operation_servers=False,
+ ssl_ca_cert=None,
+ retries=None,
+ *,
+ debug: Optional[bool] = None
+ ) -> None:
"""Constructor
"""
self._base_path = "/api" if host is None else host
@@ -122,6 +109,9 @@ def __init__(self, host=None,
self.server_operation_variables = server_operation_variables or {}
"""Default server variables
"""
+ self.ignore_operation_servers = ignore_operation_servers
+ """Ignore operation servers
+ """
self.temp_folder_path = None
"""Temp file folder for downloading files
"""
@@ -145,8 +135,9 @@ def __init__(self, host=None,
self.password = password
"""Password for HTTP basic authentication
"""
- self.discard_unknown_keys = discard_unknown_keys
- self.disabled_client_side_validations = disabled_client_side_validations
+ self.access_token = access_token
+ """Access token
+ """
self.logger = {}
"""Logging Settings
"""
@@ -158,13 +149,16 @@ def __init__(self, host=None,
self.logger_stream_handler = None
"""Log stream handler
"""
- self.logger_file_handler = None
+ self.logger_file_handler: Optional[FileHandler] = None
"""Log file handler
"""
self.logger_file = None
"""Debug file location
"""
- self.debug = False
+ if debug is not None:
+ self.debug = debug
+ else:
+ self.__debug = False
"""Debug switch
"""
@@ -173,7 +167,7 @@ def __init__(self, host=None,
Set this to false to skip verifying SSL certificate when calling API
from https server.
"""
- self.ssl_ca_cert = None
+ self.ssl_ca_cert = ssl_ca_cert
"""Set this to customize the certificate file to verify the peer.
"""
self.cert_file = None
@@ -185,6 +179,10 @@ def __init__(self, host=None,
self.assert_hostname = None
"""Set this to True/False to enable/disable SSL hostname verification.
"""
+ self.tls_server_name = None
+ """SSL/TLS Server Name Indication (SNI)
+ Set this to the SNI value expected by the server.
+ """
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
"""urllib3 connection pool's maximum number of connections saved
@@ -194,7 +192,7 @@ def __init__(self, host=None,
cpu_count * 5 is used as default value to increase performance.
"""
- self.proxy = None
+ self.proxy: Optional[str] = None
"""Proxy URL
"""
self.proxy_headers = None
@@ -203,14 +201,23 @@ def __init__(self, host=None,
self.safe_chars_for_path_param = ''
"""Safe chars for path_param
"""
- self.retries = None
+ self.retries = retries
"""Adding retries to override urllib3 default value 3
"""
# Enable client side validation
self.client_side_validation = True
- # Options to pass down to the underlying urllib3 socket
self.socket_options = None
+ """Options to pass down to the underlying urllib3 socket
+ """
+
+ self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z"
+ """datetime format
+ """
+
+ self.date_format = "%Y-%m-%d"
+ """date format
+ """
def __deepcopy__(self, memo):
cls = self.__class__
@@ -228,13 +235,6 @@ def __deepcopy__(self, memo):
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
- if name == 'disabled_client_side_validations':
- s = set(filter(None, value.split(',')))
- for v in s:
- if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
- raise ApiValueError(
- "Invalid keyword: '{0}''".format(v))
- self._disabled_client_side_validations = s
@classmethod
def set_default(cls, default):
@@ -245,21 +245,31 @@ def set_default(cls, default):
:param default: object of Configuration
"""
- cls._default = copy.deepcopy(default)
+ cls._default = default
@classmethod
def get_default_copy(cls):
- """Return new instance of configuration.
+ """Deprecated. Please use `get_default` instead.
+
+ Deprecated. Please use `get_default` instead.
+
+ :return: The configuration object.
+ """
+ return cls.get_default()
+
+ @classmethod
+ def get_default(cls):
+ """Return the default configuration.
This method returns newly created, based on default constructor,
object of Configuration class or returns a copy of default
- configuration passed by the set_default method.
+ configuration.
:return: The configuration object.
"""
- if cls._default is not None:
- return copy.deepcopy(cls._default)
- return Configuration()
+ if cls._default is None:
+ cls._default = Configuration()
+ return cls._default
@property
def logger_file(self):
@@ -313,15 +323,15 @@ def debug(self, value):
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
- # turn on http_client debug
- http_client.HTTPConnection.debuglevel = 1
+ # turn on httplib debug
+ httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
- # turn off http_client debug
- http_client.HTTPConnection.debuglevel = 0
+ # turn off httplib debug
+ httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py
index b2654b6d7..6d39d45be 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py
@@ -3,13 +3,17 @@
"""
CloudHarness Sample API
- CloudHarness Sample api # noqa: E501
+ CloudHarness Sample api
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+from typing import Any, Optional
+from typing_extensions import Self
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
@@ -17,7 +21,7 @@ class OpenApiException(Exception):
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
- key_type=None):
+ key_type=None) -> None:
""" Raises an exception for TypeErrors
Args:
@@ -45,7 +49,7 @@ def __init__(self, msg, path_to_item=None, valid_classes=None,
class ApiValueError(OpenApiException, ValueError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -63,7 +67,7 @@ def __init__(self, msg, path_to_item=None):
class ApiAttributeError(OpenApiException, AttributeError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Raised when an attribute reference or assignment fails.
@@ -82,7 +86,7 @@ def __init__(self, msg, path_to_item=None):
class ApiKeyError(OpenApiException, KeyError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -100,17 +104,56 @@ def __init__(self, msg, path_to_item=None):
class ApiException(OpenApiException):
- def __init__(self, status=None, reason=None, api_response: 'cloudharness_cli.samples.api_client.ApiResponse' = None):
- if api_response:
- self.status = api_response.response.status
- self.reason = api_response.response.reason
- self.body = api_response.response.data
- self.headers = api_response.response.getheaders()
- else:
- self.status = status
- self.reason = reason
- self.body = None
- self.headers = None
+ def __init__(
+ self,
+ status=None,
+ reason=None,
+ http_resp=None,
+ *,
+ body: Optional[str] = None,
+ data: Optional[Any] = None,
+ ) -> None:
+ self.status = status
+ self.reason = reason
+ self.body = body
+ self.data = data
+ self.headers = None
+
+ if http_resp:
+ if self.status is None:
+ self.status = http_resp.status
+ if self.reason is None:
+ self.reason = http_resp.reason
+ if self.body is None:
+ try:
+ self.body = http_resp.data.decode('utf-8')
+ except Exception:
+ pass
+ self.headers = http_resp.getheaders()
+
+ @classmethod
+ def from_response(
+ cls,
+ *,
+ http_resp,
+ body: Optional[str],
+ data: Optional[Any],
+ ) -> Self:
+ if http_resp.status == 400:
+ raise BadRequestException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 401:
+ raise UnauthorizedException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 403:
+ raise ForbiddenException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 404:
+ raise NotFoundException(http_resp=http_resp, body=body, data=data)
+
+ if 500 <= http_resp.status <= 599:
+ raise ServiceException(http_resp=http_resp, body=body, data=data)
+ raise ApiException(http_resp=http_resp, body=body, data=data)
def __str__(self):
"""Custom error messages for exception"""
@@ -120,12 +163,32 @@ def __str__(self):
error_message += "HTTP response headers: {0}\n".format(
self.headers)
- if self.body:
- error_message += "HTTP response body: {0}\n".format(self.body)
+ if self.data or self.body:
+ error_message += "HTTP response body: {0}\n".format(self.data or self.body)
return error_message
+class BadRequestException(ApiException):
+ pass
+
+
+class NotFoundException(ApiException):
+ pass
+
+
+class UnauthorizedException(ApiException):
+ pass
+
+
+class ForbiddenException(ApiException):
+ pass
+
+
+class ServiceException(ApiException):
+ pass
+
+
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/__init__.py
deleted file mode 100644
index e34e27482..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# we can not import model classes here because that would create a circular
-# reference which would not work in python2
-# do not import all models into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all models from one package, import them with
-# from cloudharness_cli.samples.models import ModelA, ModelB
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.py
deleted file mode 100644
index 0329ad6bb..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-
-class InlineResponse202(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
-
- @staticmethod
- def task() -> typing.Type['InlineResponse202Task']:
- return InlineResponse202Task
- __annotations__ = {
- "task": task,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["task"]) -> 'InlineResponse202Task': ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["task", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["task"]) -> typing.Union['InlineResponse202Task', schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["task", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- task: typing.Union['InlineResponse202Task', schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'InlineResponse202':
- return super().__new__(
- cls,
- *args,
- task=task,
- _configuration=_configuration,
- **kwargs,
- )
-
-from cloudharness_cli/samples.model.inline_response202_task import InlineResponse202Task
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.pyi
deleted file mode 100644
index 0329ad6bb..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202.pyi
+++ /dev/null
@@ -1,84 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-
-class InlineResponse202(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
-
- @staticmethod
- def task() -> typing.Type['InlineResponse202Task']:
- return InlineResponse202Task
- __annotations__ = {
- "task": task,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["task"]) -> 'InlineResponse202Task': ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["task", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["task"]) -> typing.Union['InlineResponse202Task', schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["task", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- task: typing.Union['InlineResponse202Task', schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'InlineResponse202':
- return super().__new__(
- cls,
- *args,
- task=task,
- _configuration=_configuration,
- **kwargs,
- )
-
-from cloudharness_cli/samples.model.inline_response202_task import InlineResponse202Task
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.py
deleted file mode 100644
index 6fe36fa30..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-
-class InlineResponse202Task(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
- href = schemas.StrSchema
- name = schemas.StrSchema
- __annotations__ = {
- "href": href,
- "name": name,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["href"]) -> MetaOapg.properties.href: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["href", "name", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["href"]) -> typing.Union[MetaOapg.properties.href, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["href", "name", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- href: typing.Union[MetaOapg.properties.href, str, schemas.Unset] = schemas.unset,
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'InlineResponse202Task':
- return super().__new__(
- cls,
- *args,
- href=href,
- name=name,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.pyi
deleted file mode 100644
index 6fe36fa30..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/inline_response202_task.pyi
+++ /dev/null
@@ -1,89 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-
-class InlineResponse202Task(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
- href = schemas.StrSchema
- name = schemas.StrSchema
- __annotations__ = {
- "href": href,
- "name": name,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["href"]) -> MetaOapg.properties.href: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["href", "name", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["href"]) -> typing.Union[MetaOapg.properties.href, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["href", "name", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- href: typing.Union[MetaOapg.properties.href, str, schemas.Unset] = schemas.unset,
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'InlineResponse202Task':
- return super().__new__(
- cls,
- *args,
- href=href,
- name=name,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.py
deleted file mode 100644
index a6c28f5ab..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-
-class SampleResource(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
- required = {
- "a",
- }
-
- class properties:
- a = schemas.NumberSchema
- b = schemas.NumberSchema
- id = schemas.NumberSchema
- __annotations__ = {
- "a": a,
- "b": b,
- "id": id,
- }
-
- a: MetaOapg.properties.a
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["a"]) -> MetaOapg.properties.a: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["b"]) -> MetaOapg.properties.b: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["id"]) -> MetaOapg.properties.id: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["a", "b", "id", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["a"]) -> MetaOapg.properties.a: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["b"]) -> typing.Union[MetaOapg.properties.b, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["id"]) -> typing.Union[MetaOapg.properties.id, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["a", "b", "id", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- a: typing.Union[MetaOapg.properties.a, decimal.Decimal, int, float, ],
- b: typing.Union[MetaOapg.properties.b, decimal.Decimal, int, float, schemas.Unset] = schemas.unset,
- id: typing.Union[MetaOapg.properties.id, decimal.Decimal, int, float, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'SampleResource':
- return super().__new__(
- cls,
- *args,
- a=a,
- b=b,
- id=id,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.pyi
deleted file mode 100644
index a6c28f5ab..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/model/sample_resource.pyi
+++ /dev/null
@@ -1,104 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-
-class SampleResource(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
- required = {
- "a",
- }
-
- class properties:
- a = schemas.NumberSchema
- b = schemas.NumberSchema
- id = schemas.NumberSchema
- __annotations__ = {
- "a": a,
- "b": b,
- "id": id,
- }
-
- a: MetaOapg.properties.a
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["a"]) -> MetaOapg.properties.a: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["b"]) -> MetaOapg.properties.b: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["id"]) -> MetaOapg.properties.id: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["a", "b", "id", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["a"]) -> MetaOapg.properties.a: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["b"]) -> typing.Union[MetaOapg.properties.b, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["id"]) -> typing.Union[MetaOapg.properties.id, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["a", "b", "id", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- a: typing.Union[MetaOapg.properties.a, decimal.Decimal, int, float, ],
- b: typing.Union[MetaOapg.properties.b, decimal.Decimal, int, float, schemas.Unset] = schemas.unset,
- id: typing.Union[MetaOapg.properties.id, decimal.Decimal, int, float, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'SampleResource':
- return super().__new__(
- cls,
- *args,
- a=a,
- b=b,
- id=id,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py
index 2706f5ed6..db37e7e4e 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py
@@ -1,16 +1,20 @@
# coding: utf-8
# flake8: noqa
+"""
+ CloudHarness Sample API
-# import all models into this package
-# if you have many models here with many references from one model to another this may
-# raise a RecursionError
-# to avoid this, import only the models that you directly need like:
-# from from cloudharness_cli.samples.model.pet import Pet
-# or import this package, but before doing it, use:
-# import sys
-# sys.setrecursionlimit(n)
+ CloudHarness Sample api
-from cloudharness_cli.samples.model.inline_response202 import InlineResponse202
-from cloudharness_cli.samples.model.inline_response202_task import InlineResponse202Task
-from cloudharness_cli.samples.model.sample_resource import SampleResource
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+# import models into model package
+from cloudharness_cli.samples.models.inline_response202 import InlineResponse202
+from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task
+from cloudharness_cli.samples.models.sample_resource import SampleResource
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py
new file mode 100644
index 000000000..561d0872d
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict
+from typing import Any, ClassVar, Dict, List, Optional
+from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task
+from typing import Optional, Set
+from typing_extensions import Self
+
+class InlineResponse202(BaseModel):
+ """
+ InlineResponse202
+ """ # noqa: E501
+ task: Optional[InlineResponse202Task] = None
+ __properties: ClassVar[List[str]] = ["task"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of InlineResponse202 from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of task
+ if self.task:
+ _dict['task'] = self.task.to_dict()
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of InlineResponse202 from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "task": InlineResponse202Task.from_dict(obj["task"]) if obj.get("task") is not None else None
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py
new file mode 100644
index 000000000..5b3d9b647
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing import Optional, Set
+from typing_extensions import Self
+
+class InlineResponse202Task(BaseModel):
+ """
+ InlineResponse202Task
+ """ # noqa: E501
+ href: Optional[StrictStr] = Field(default=None, description="the url where to check the operation status")
+ name: Optional[StrictStr] = None
+ __properties: ClassVar[List[str]] = ["href", "name"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of InlineResponse202Task from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of InlineResponse202Task from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "href": obj.get("href"),
+ "name": obj.get("name")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/sample_resource.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/sample_resource.py
new file mode 100644
index 000000000..b72474a5f
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/models/sample_resource.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt
+from typing import Any, ClassVar, Dict, List, Optional, Union
+from typing import Optional, Set
+from typing_extensions import Self
+
+class SampleResource(BaseModel):
+ """
+
+ """ # noqa: E501
+ a: Union[StrictFloat, StrictInt]
+ b: Optional[Union[StrictFloat, StrictInt]] = None
+ id: Optional[Union[StrictFloat, StrictInt]] = None
+ __properties: ClassVar[List[str]] = ["a", "b", "id"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of SampleResource from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of SampleResource from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "a": obj.get("a"),
+ "b": obj.get("b"),
+ "id": obj.get("id")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/__init__.py
deleted file mode 100644
index 48eee2bf6..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.apis.path_to_api import path_to_api
-
-import enum
-
-
-class PathValues(str, enum.Enum):
- ERROR = "/error"
- PING = "/ping"
- VALID = "/valid"
- VALIDCOOKIE = "/valid-cookie"
- SAMPLERESOURCES = "/sampleresources"
- SAMPLERESOURCES_SAMPLERESOURCE_ID = "/sampleresources/{sampleresourceId}"
- OPERATION_ASYNC = "/operation_async"
- OPERATION_SYNC = "/operation_sync"
- OPERATION_SYNC_RESULTS = "/operation_sync_results"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/__init__.py
deleted file mode 100644
index 72c9e3077..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.error import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.ERROR
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.py
deleted file mode 100644
index 7e7aee459..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor500(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_500 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor500,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '500': _response_for_500,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _error_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _error_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _error_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _error_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- test sentry is working
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class Error(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def error(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def error(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def error(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def error(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._error_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._error_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.pyi
deleted file mode 100644
index 704e8aa3f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/error/get.pyi
+++ /dev/null
@@ -1,240 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor500(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_500 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor500,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _error_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _error_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _error_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _error_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- test sentry is working
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class Error(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def error(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def error(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def error(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def error(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._error_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._error_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/__init__.py
deleted file mode 100644
index 654cc7705..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.operation_async import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.OPERATION_ASYNC
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.py
deleted file mode 100644
index 24318033d..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.inline_response202 import InlineResponse202
-
-from . import path
-
-SchemaFor202ResponseBodyApplicationJson = InlineResponse202
-
-
-@dataclass
-class ApiResponseFor202(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor202ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_202 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor202,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor202ResponseBodyApplicationJson),
- },
-)
-_status_code_to_response = {
- '202': _response_for_202,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _submit_async_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def _submit_async_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _submit_async_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _submit_async_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Send an asynchronous operation
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class SubmitAsync(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def submit_async(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def submit_async(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def submit_async(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def submit_async(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_async_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_async_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.pyi
deleted file mode 100644
index 86aa6cf42..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_async/get.pyi
+++ /dev/null
@@ -1,230 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.inline_response202 import InlineResponse202
-
-SchemaFor202ResponseBodyApplicationJson = InlineResponse202
-
-
-@dataclass
-class ApiResponseFor202(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor202ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_202 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor202,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor202ResponseBodyApplicationJson),
- },
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _submit_async_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def _submit_async_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _submit_async_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _submit_async_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Send an asynchronous operation
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class SubmitAsync(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def submit_async(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def submit_async(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def submit_async(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def submit_async(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_async_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_async_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/__init__.py
deleted file mode 100644
index 4e290c44a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.operation_sync import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.OPERATION_SYNC
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.py
deleted file mode 100644
index 17d0d63c1..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.py
+++ /dev/null
@@ -1,233 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-SchemaFor200ResponseBodyApplicationJson = schemas.DictSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_status_code_to_response = {
- '200': _response_for_200,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _submit_sync_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _submit_sync_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _submit_sync_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _submit_sync_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Send a synchronous operation
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class SubmitSync(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def submit_sync(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def submit_sync(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def submit_sync(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def submit_sync(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.pyi
deleted file mode 100644
index a2ec67557..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync/get.pyi
+++ /dev/null
@@ -1,228 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-SchemaFor200ResponseBodyApplicationJson = schemas.DictSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _submit_sync_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _submit_sync_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _submit_sync_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _submit_sync_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Send a synchronous operation
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class SubmitSync(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def submit_sync(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def submit_sync(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def submit_sync(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def submit_sync(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/__init__.py
deleted file mode 100644
index f45000f99..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.operation_sync_results import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.OPERATION_SYNC_RESULTS
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.py
deleted file mode 100644
index 179be8146..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.py
+++ /dev/null
@@ -1,298 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-# Query params
-ASchema = schemas.NumberSchema
-BSchema = schemas.NumberSchema
-RequestRequiredQueryParams = typing_extensions.TypedDict(
- 'RequestRequiredQueryParams',
- {
- 'a': typing.Union[ASchema, decimal.Decimal, int, float, ],
- 'b': typing.Union[BSchema, decimal.Decimal, int, float, ],
- }
-)
-RequestOptionalQueryParams = typing_extensions.TypedDict(
- 'RequestOptionalQueryParams',
- {
- },
- total=False
-)
-
-
-class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
- pass
-
-
-request_query_a = api_client.QueryParameter(
- name="a",
- style=api_client.ParameterStyle.FORM,
- schema=ASchema,
- required=True,
- explode=True,
-)
-request_query_b = api_client.QueryParameter(
- name="b",
- style=api_client.ParameterStyle.FORM,
- schema=BSchema,
- required=True,
- explode=True,
-)
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_status_code_to_response = {
- '200': _response_for_200,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _submit_sync_with_results_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _submit_sync_with_results_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _submit_sync_with_results_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _submit_sync_with_results_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
- used_path = path.value
-
- prefix_separator_iterator = None
- for parameter in (
- request_query_a,
- request_query_b,
- ):
- parameter_data = query_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- if prefix_separator_iterator is None:
- prefix_separator_iterator = parameter.get_prefix_separator_iterator()
- serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
- for serialized_value in serialized_data.values():
- used_path += serialized_value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class SubmitSyncWithResults(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def submit_sync_with_results(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def submit_sync_with_results(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def submit_sync_with_results(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def submit_sync_with_results(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_with_results_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_with_results_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.pyi
deleted file mode 100644
index 1f312bb76..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/operation_sync_results/get.pyi
+++ /dev/null
@@ -1,293 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-# Query params
-ASchema = schemas.NumberSchema
-BSchema = schemas.NumberSchema
-RequestRequiredQueryParams = typing_extensions.TypedDict(
- 'RequestRequiredQueryParams',
- {
- 'a': typing.Union[ASchema, decimal.Decimal, int, float, ],
- 'b': typing.Union[BSchema, decimal.Decimal, int, float, ],
- }
-)
-RequestOptionalQueryParams = typing_extensions.TypedDict(
- 'RequestOptionalQueryParams',
- {
- },
- total=False
-)
-
-
-class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
- pass
-
-
-request_query_a = api_client.QueryParameter(
- name="a",
- style=api_client.ParameterStyle.FORM,
- schema=ASchema,
- required=True,
- explode=True,
-)
-request_query_b = api_client.QueryParameter(
- name="b",
- style=api_client.ParameterStyle.FORM,
- schema=BSchema,
- required=True,
- explode=True,
-)
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _submit_sync_with_results_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _submit_sync_with_results_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _submit_sync_with_results_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _submit_sync_with_results_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
- used_path = path.value
-
- prefix_separator_iterator = None
- for parameter in (
- request_query_a,
- request_query_b,
- ):
- parameter_data = query_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- if prefix_separator_iterator is None:
- prefix_separator_iterator = parameter.get_prefix_separator_iterator()
- serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
- for serialized_value in serialized_data.values():
- used_path += serialized_value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class SubmitSyncWithResults(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def submit_sync_with_results(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def submit_sync_with_results(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def submit_sync_with_results(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def submit_sync_with_results(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_with_results_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._submit_sync_with_results_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/__init__.py
deleted file mode 100644
index 165017f38..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.ping import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.PING
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.py
deleted file mode 100644
index 0dec84887..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-SchemaFor200ResponseBodyApplicationJson = schemas.NumberSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor500(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_500 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor500,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '500': _response_for_500,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _ping_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _ping_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _ping_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _ping_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- test the application is up
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class Ping(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def ping(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def ping(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def ping(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def ping(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._ping_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._ping_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.pyi
deleted file mode 100644
index 10149943f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/ping/get.pyi
+++ /dev/null
@@ -1,240 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-SchemaFor200ResponseBodyApplicationJson = schemas.NumberSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor500(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_500 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor500,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _ping_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _ping_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _ping_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _ping_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- test the application is up
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class Ping(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def ping(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def ping(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def ping(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def ping(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._ping_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._ping_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/__init__.py
deleted file mode 100644
index 116e3210f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.sampleresources import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.SAMPLERESOURCES
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.py
deleted file mode 100644
index 87664d1c0..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.py
+++ /dev/null
@@ -1,260 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-from . import path
-
-
-
-class SchemaFor200ResponseBodyApplicationJson(
- schemas.ListSchema
-):
-
-
- class MetaOapg:
-
- @staticmethod
- def items() -> typing.Type['SampleResource']:
- return SampleResource
-
- def __new__(
- cls,
- arg: typing.Union[typing.Tuple['SampleResource'], typing.List['SampleResource']],
- _configuration: typing.Optional[schemas.Configuration] = None,
- ) -> 'SchemaFor200ResponseBodyApplicationJson':
- return super().__new__(
- cls,
- arg,
- _configuration=_configuration,
- )
-
- def __getitem__(self, i: int) -> 'SampleResource':
- return super().__getitem__(i)
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_status_code_to_response = {
- '200': _response_for_200,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_sample_resources_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_sample_resources_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_sample_resources_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_sample_resources_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- List All SampleResources
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetSampleResources(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_sample_resources(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_sample_resources(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_sample_resources(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_sample_resources(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resources_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resources_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.pyi
deleted file mode 100644
index b10eccbc4..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/get.pyi
+++ /dev/null
@@ -1,255 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-
-
-class SchemaFor200ResponseBodyApplicationJson(
- schemas.ListSchema
-):
-
-
- class MetaOapg:
-
- @staticmethod
- def items() -> typing.Type['SampleResource']:
- return SampleResource
-
- def __new__(
- cls,
- arg: typing.Union[typing.Tuple['SampleResource'], typing.List['SampleResource']],
- _configuration: typing.Optional[schemas.Configuration] = None,
- ) -> 'SchemaFor200ResponseBodyApplicationJson':
- return super().__new__(
- cls,
- arg,
- _configuration=_configuration,
- )
-
- def __getitem__(self, i: int) -> 'SampleResource':
- return super().__getitem__(i)
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_sample_resources_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_sample_resources_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_sample_resources_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_sample_resources_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- List All SampleResources
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetSampleResources(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_sample_resources(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_sample_resources(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_sample_resources(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_sample_resources(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resources_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resources_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.py
deleted file mode 100644
index c0bfe3c59..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-from . import path
-
-# body param
-SchemaForRequestBodyApplicationJson = SampleResource
-
-
-request_body_sample_resource = api_client.RequestBody(
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaForRequestBodyApplicationJson),
- },
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor201(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_201 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor201,
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-_status_code_to_response = {
- '201': _response_for_201,
- '400': _response_for_400,
-}
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Create a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
-
- if body is schemas.unset:
- raise exceptions.ApiValueError(
- 'The required body parameter has an invalid value of: unset. Set a valid value instead')
- _fields = None
- _body = None
- serialized_data = request_body_sample_resource.serialize(body, content_type)
- _headers.add('Content-Type', content_type)
- if 'fields' in serialized_data:
- _fields = serialized_data['fields']
- elif 'body' in serialized_data:
- _body = serialized_data['body']
- response = self.api_client.call_api(
- resource_path=used_path,
- method='post'.upper(),
- headers=_headers,
- fields=_fields,
- body=_body,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class CreateSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._create_sample_resource_oapg(
- body=body,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForpost(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._create_sample_resource_oapg(
- body=body,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.pyi
deleted file mode 100644
index 9c1f60353..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources/post.pyi
+++ /dev/null
@@ -1,306 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-# body param
-SchemaForRequestBodyApplicationJson = SampleResource
-
-
-request_body_sample_resource = api_client.RequestBody(
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaForRequestBodyApplicationJson),
- },
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor201(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_201 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor201,
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _create_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Create a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
-
- if body is schemas.unset:
- raise exceptions.ApiValueError(
- 'The required body parameter has an invalid value of: unset. Set a valid value instead')
- _fields = None
- _body = None
- serialized_data = request_body_sample_resource.serialize(body, content_type)
- _headers.add('Content-Type', content_type)
- if 'fields' in serialized_data:
- _fields = serialized_data['fields']
- elif 'body' in serialized_data:
- _body = serialized_data['body']
- response = self.api_client.call_api(
- resource_path=used_path,
- method='post'.upper(),
- headers=_headers,
- fields=_fields,
- body=_body,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class CreateSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def create_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._create_sample_resource_oapg(
- body=body,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForpost(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._create_sample_resource_oapg(
- body=body,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/__init__.py
deleted file mode 100644
index 8acfb53e2..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.sampleresources_sampleresource_id import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.SAMPLERESOURCES_SAMPLERESOURCE_ID
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.py
deleted file mode 100644
index 65962b94c..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.py
+++ /dev/null
@@ -1,282 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-# Path params
-SampleresourceIdSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'sampleresourceId': typing.Union[SampleresourceIdSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_sampleresource_id = api_client.PathParameter(
- name="sampleresourceId",
- style=api_client.ParameterStyle.SIMPLE,
- schema=SampleresourceIdSchema,
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor204(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_204 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor204,
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '204': _response_for_204,
- '400': _response_for_400,
- '404': _response_for_404,
-}
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _delete_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- ]: ...
-
- @typing.overload
- def _delete_sample_resource_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _delete_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _delete_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Delete a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_sampleresource_id,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
- # TODO add cookie handling
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='delete'.upper(),
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class DeleteSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def delete_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- ]: ...
-
- @typing.overload
- def delete_sample_resource(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_sample_resource_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiFordelete(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- ]: ...
-
- @typing.overload
- def delete(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_sample_resource_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.pyi
deleted file mode 100644
index dcf3f0e54..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/delete.pyi
+++ /dev/null
@@ -1,275 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-# Path params
-SampleresourceIdSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'sampleresourceId': typing.Union[SampleresourceIdSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_sampleresource_id = api_client.PathParameter(
- name="sampleresourceId",
- style=api_client.ParameterStyle.SIMPLE,
- schema=SampleresourceIdSchema,
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor204(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_204 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor204,
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _delete_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- ]: ...
-
- @typing.overload
- def _delete_sample_resource_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _delete_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _delete_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Delete a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_sampleresource_id,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
- # TODO add cookie handling
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='delete'.upper(),
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class DeleteSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def delete_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- ]: ...
-
- @typing.overload
- def delete_sample_resource(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_sample_resource_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiFordelete(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- ]: ...
-
- @typing.overload
- def delete(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor204,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_sample_resource_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.py
deleted file mode 100644
index 6c34cbbcd..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.py
+++ /dev/null
@@ -1,315 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-from . import path
-
-# Path params
-SampleresourceIdSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'sampleresourceId': typing.Union[SampleresourceIdSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_sampleresource_id = api_client.PathParameter(
- name="sampleresourceId",
- style=api_client.ParameterStyle.SIMPLE,
- schema=SampleresourceIdSchema,
- required=True,
-)
-SchemaFor200ResponseBodyApplicationJson = SampleResource
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '400': _response_for_400,
- '404': _response_for_404,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_sample_resource_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Get a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_sampleresource_id,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_sample_resource(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resource_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resource_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.pyi
deleted file mode 100644
index f48bb2436..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/get.pyi
+++ /dev/null
@@ -1,308 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-# Path params
-SampleresourceIdSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'sampleresourceId': typing.Union[SampleresourceIdSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_sampleresource_id = api_client.PathParameter(
- name="sampleresourceId",
- style=api_client.ParameterStyle.SIMPLE,
- schema=SampleresourceIdSchema,
- required=True,
-)
-SchemaFor200ResponseBodyApplicationJson = SampleResource
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_sample_resource_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_sample_resource_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Get a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_sampleresource_id,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_sample_resource(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_sample_resource(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resource_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_sample_resource_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.py
deleted file mode 100644
index 560e5ea4a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.py
+++ /dev/null
@@ -1,382 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-from . import path
-
-# Path params
-SampleresourceIdSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'sampleresourceId': typing.Union[SampleresourceIdSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_sampleresource_id = api_client.PathParameter(
- name="sampleresourceId",
- style=api_client.ParameterStyle.SIMPLE,
- schema=SampleresourceIdSchema,
- required=True,
-)
-# body param
-SchemaForRequestBodyApplicationJson = SampleResource
-
-
-request_body_sample_resource = api_client.RequestBody(
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaForRequestBodyApplicationJson),
- },
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor202(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_202 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor202,
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '202': _response_for_202,
- '400': _response_for_400,
- '404': _response_for_404,
-}
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
-
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Update a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_sampleresource_id,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
-
- if body is schemas.unset:
- raise exceptions.ApiValueError(
- 'The required body parameter has an invalid value of: unset. Set a valid value instead')
- _fields = None
- _body = None
- serialized_data = request_body_sample_resource.serialize(body, content_type)
- _headers.add('Content-Type', content_type)
- if 'fields' in serialized_data:
- _fields = serialized_data['fields']
- elif 'body' in serialized_data:
- _body = serialized_data['body']
- response = self.api_client.call_api(
- resource_path=used_path,
- method='put'.upper(),
- headers=_headers,
- fields=_fields,
- body=_body,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class UpdateSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._update_sample_resource_oapg(
- body=body,
- path_params=path_params,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForput(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._update_sample_resource_oapg(
- body=body,
- path_params=path_params,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.pyi
deleted file mode 100644
index 0cfcd4120..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/sampleresources_sampleresource_id/put.pyi
+++ /dev/null
@@ -1,375 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-
-# Path params
-SampleresourceIdSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'sampleresourceId': typing.Union[SampleresourceIdSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_sampleresource_id = api_client.PathParameter(
- name="sampleresourceId",
- style=api_client.ParameterStyle.SIMPLE,
- schema=SampleresourceIdSchema,
- required=True,
-)
-# body param
-SchemaForRequestBodyApplicationJson = SampleResource
-
-
-request_body_sample_resource = api_client.RequestBody(
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaForRequestBodyApplicationJson),
- },
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor202(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_202 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor202,
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
-
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _update_sample_resource_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Update a SampleResource
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_sampleresource_id,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
-
- if body is schemas.unset:
- raise exceptions.ApiValueError(
- 'The required body parameter has an invalid value of: unset. Set a valid value instead')
- _fields = None
- _body = None
- serialized_data = request_body_sample_resource.serialize(body, content_type)
- _headers.add('Content-Type', content_type)
- if 'fields' in serialized_data:
- _fields = serialized_data['fields']
- elif 'body' in serialized_data:
- _body = serialized_data['body']
- response = self.api_client.call_api(
- resource_path=used_path,
- method='put'.upper(),
- headers=_headers,
- fields=_fields,
- body=_body,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class UpdateSampleResource(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def update_sample_resource(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._update_sample_resource_oapg(
- body=body,
- path_params=path_params,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForput(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- ]: ...
-
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor202,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def put(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._update_sample_resource_oapg(
- body=body,
- path_params=path_params,
- content_type=content_type,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/__init__.py
deleted file mode 100644
index aa36dcdec..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.valid import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.VALID
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.py
deleted file mode 100644
index 0e0a8d5b9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-_auth = [
- 'bearerAuth',
-]
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor401(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_401 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor401,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '401': _response_for_401,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _valid_token_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _valid_token_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _valid_token_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _valid_token_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Check if the token is valid. Get a token by logging into the base url
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class ValidToken(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def valid_token(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def valid_token(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def valid_token(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def valid_token(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_token_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_token_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.pyi
deleted file mode 100644
index 85232a94e..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid/get.pyi
+++ /dev/null
@@ -1,241 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor401(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_401 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor401,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _valid_token_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _valid_token_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _valid_token_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _valid_token_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Check if the token is valid. Get a token by logging into the base url
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class ValidToken(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def valid_token(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def valid_token(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def valid_token(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def valid_token(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_token_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_token_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/__init__.py
deleted file mode 100644
index 8b9910f57..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.samples.paths.valid_cookie import Api
-
-from cloudharness_cli.samples.paths import PathValues
-
-path = PathValues.VALIDCOOKIE
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.py
deleted file mode 100644
index 914a07423..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.py
+++ /dev/null
@@ -1,250 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-from . import path
-
-_auth = [
- 'cookieAuth',
-]
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor401(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_401 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor401,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '401': _response_for_401,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _valid_cookie_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _valid_cookie_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _valid_cookie_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _valid_cookie_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Check if the token is valid. Get a token by logging into the base url
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class ValidCookie(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def valid_cookie(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def valid_cookie(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def valid_cookie(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def valid_cookie(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_cookie_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_cookie_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.pyi
deleted file mode 100644
index bfb82b1c1..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/paths/valid_cookie/get.pyi
+++ /dev/null
@@ -1,241 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.samples import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.samples import schemas # noqa: F401
-
-SchemaFor200ResponseBodyApplicationJson = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor401(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_401 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor401,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _valid_cookie_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _valid_cookie_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _valid_cookie_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _valid_cookie_oapg(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Check if the token is valid. Get a token by logging into the base url
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class ValidCookie(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def valid_cookie(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def valid_cookie(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def valid_cookie(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def valid_cookie(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_cookie_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._valid_cookie_oapg(
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/test/common/test_models/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/py.typed
similarity index 100%
rename from libraries/client/cloudharness_cli/test/common/test_models/__init__.py
rename to libraries/client/cloudharness_cli/cloudharness_cli/samples/py.typed
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/rest.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/rest.py
index 7f79f628b..04de20cef 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/rest.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/samples/rest.py
@@ -3,35 +3,67 @@
"""
CloudHarness Sample API
- CloudHarness Sample api # noqa: E501
+ CloudHarness Sample api
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
-import logging
+import io
+import json
+import re
import ssl
-from urllib.parse import urlencode
-import typing
-import certifi
import urllib3
-from urllib3._collections import HTTPHeaderDict
from cloudharness_cli.samples.exceptions import ApiException, ApiValueError
+SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"}
+RESTResponseType = urllib3.HTTPResponse
+
+
+def is_socks_proxy_url(url):
+ if url is None:
+ return False
+ split_section = url.split("://")
+ if len(split_section) < 2:
+ return False
+ else:
+ return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES
+
+
+class RESTResponse(io.IOBase):
+
+ def __init__(self, resp) -> None:
+ self.response = resp
+ self.status = resp.status
+ self.reason = resp.reason
+ self.data = None
+
+ def read(self):
+ if self.data is None:
+ self.data = self.response.data
+ return self.data
+
+ def getheaders(self):
+ """Returns a dictionary of the response headers."""
+ return self.response.headers
-logger = logging.getLogger(__name__)
+ def getheader(self, name, default=None):
+ """Returns a given response header."""
+ return self.response.headers.get(name, default)
-class RESTClientObject(object):
+class RESTClientObject:
- def __init__(self, configuration, pools_size=4, maxsize=None):
+ def __init__(self, configuration) -> None:
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
- # maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
@@ -40,140 +72,167 @@ def __init__(self, configuration, pools_size=4, maxsize=None):
else:
cert_reqs = ssl.CERT_NONE
- # ca_certs
- if configuration.ssl_ca_cert:
- ca_certs = configuration.ssl_ca_cert
- else:
- # if not set certificate file, use Mozilla's root certificates.
- ca_certs = certifi.where()
-
- addition_pool_args = {}
+ pool_args = {
+ "cert_reqs": cert_reqs,
+ "ca_certs": configuration.ssl_ca_cert,
+ "cert_file": configuration.cert_file,
+ "key_file": configuration.key_file,
+ }
if configuration.assert_hostname is not None:
- addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
+ pool_args['assert_hostname'] = (
+ configuration.assert_hostname
+ )
if configuration.retries is not None:
- addition_pool_args['retries'] = configuration.retries
+ pool_args['retries'] = configuration.retries
+
+ if configuration.tls_server_name:
+ pool_args['server_hostname'] = configuration.tls_server_name
+
if configuration.socket_options is not None:
- addition_pool_args['socket_options'] = configuration.socket_options
+ pool_args['socket_options'] = configuration.socket_options
- if maxsize is None:
- if configuration.connection_pool_maxsize is not None:
- maxsize = configuration.connection_pool_maxsize
- else:
- maxsize = 4
+ if configuration.connection_pool_maxsize is not None:
+ pool_args['maxsize'] = configuration.connection_pool_maxsize
# https pool manager
+ self.pool_manager: urllib3.PoolManager
+
if configuration.proxy:
- self.pool_manager = urllib3.ProxyManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- proxy_url=configuration.proxy,
- proxy_headers=configuration.proxy_headers,
- **addition_pool_args
- )
+ if is_socks_proxy_url(configuration.proxy):
+ from urllib3.contrib.socks import SOCKSProxyManager
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["headers"] = configuration.proxy_headers
+ self.pool_manager = SOCKSProxyManager(**pool_args)
+ else:
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["proxy_headers"] = configuration.proxy_headers
+ self.pool_manager = urllib3.ProxyManager(**pool_args)
else:
- self.pool_manager = urllib3.PoolManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- **addition_pool_args
- )
+ self.pool_manager = urllib3.PoolManager(**pool_args)
def request(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, typing.Any], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
+ method,
+ url,
+ headers=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ):
"""Perform requests.
:param method: http request method
:param url: http request url
:param headers: http request headers
- :param body: request body, for other types
- :param fields: request parameters for
- `application/x-www-form-urlencoded`
- or `multipart/form-data`
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Default is False.
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
+ :param body: request json body, for `application/json`
+ :param post_params: request post parameters,
+ `application/x-www-form-urlencoded`
+ and `multipart/form-data`
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
"""
method = method.upper()
- assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
- 'PATCH', 'OPTIONS']
-
- if fields and body:
+ assert method in [
+ 'GET',
+ 'HEAD',
+ 'DELETE',
+ 'POST',
+ 'PUT',
+ 'PATCH',
+ 'OPTIONS'
+ ]
+
+ if post_params and body:
raise ApiValueError(
- "body parameter cannot be used with fields parameter."
+ "body parameter cannot be used with post_params parameter."
)
- fields = fields or {}
+ post_params = post_params or {}
headers = headers or {}
- if timeout:
- if isinstance(timeout, (int, float)): # noqa: E501,F821
- timeout = urllib3.Timeout(total=timeout)
- elif (isinstance(timeout, tuple) and
- len(timeout) == 2):
- timeout = urllib3.Timeout(connect=timeout[0], read=timeout[1])
+ timeout = None
+ if _request_timeout:
+ if isinstance(_request_timeout, (int, float)):
+ timeout = urllib3.Timeout(total=_request_timeout)
+ elif (
+ isinstance(_request_timeout, tuple)
+ and len(_request_timeout) == 2
+ ):
+ timeout = urllib3.Timeout(
+ connect=_request_timeout[0],
+ read=_request_timeout[1]
+ )
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
- if 'Content-Type' not in headers and body is None:
+
+ # no content type provided or payload is json
+ content_type = headers.get('Content-Type')
+ if (
+ not content_type
+ or re.search('json', content_type, re.IGNORECASE)
+ ):
+ request_body = None
+ if body is not None:
+ request_body = json.dumps(body)
r = self.pool_manager.request(
method,
url,
- preload_content=not stream,
+ body=request_body,
timeout=timeout,
- headers=headers
+ headers=headers,
+ preload_content=False
)
- elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
+ elif content_type == 'application/x-www-form-urlencoded':
r = self.pool_manager.request(
- method, url,
- body=body,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=False,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
- elif headers['Content-Type'] == 'multipart/form-data':
+ headers=headers,
+ preload_content=False
+ )
+ elif content_type == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
+ # Ensures that dict objects are serialized
+ post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params]
r = self.pool_manager.request(
- method, url,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=True,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
+ headers=headers,
+ preload_content=False
+ )
# Pass a `string` parameter directly in the body to support
- # other content types than Json when `body` argument is
- # provided in serialized form
+ # other content types than JSON when `body` argument is
+ # provided in serialized form.
elif isinstance(body, str) or isinstance(body, bytes):
- request_body = body
r = self.pool_manager.request(
- method, url,
+ method,
+ url,
+ body=body,
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
+ elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool):
+ request_body = "true" if body else "false"
+ r = self.pool_manager.request(
+ method,
+ url,
body=request_body,
- preload_content=not stream,
+ preload_content=False,
timeout=timeout,
headers=headers)
else:
@@ -184,72 +243,16 @@ def request(
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
- r = self.pool_manager.request(method, url,
- preload_content=not stream,
- timeout=timeout,
- headers=headers)
+ r = self.pool_manager.request(
+ method,
+ url,
+ fields={},
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
except urllib3.exceptions.SSLError as e:
- msg = "{0}\n{1}".format(type(e).__name__, str(e))
+ msg = "\n".join([type(e).__name__, str(e)])
raise ApiException(status=0, reason=msg)
- if not stream:
- # log response body
- logger.debug("response body: %s", r.data)
-
- return r
-
- def GET(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("GET", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def HEAD(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("HEAD", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def OPTIONS(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("OPTIONS", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def DELETE(self, url, headers=None, body=None,
- stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("DELETE", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def POST(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("POST", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PUT(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PUT", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PATCH(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PATCH", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
+ return RESTResponse(r)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/samples/schemas.py b/libraries/client/cloudharness_cli/cloudharness_cli/samples/schemas.py
deleted file mode 100644
index bf36892bf..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/samples/schemas.py
+++ /dev/null
@@ -1,2463 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from collections import defaultdict
-from datetime import date, datetime, timedelta # noqa: F401
-import functools
-import decimal
-import io
-import re
-import types
-import typing
-import uuid
-
-from dateutil.parser.isoparser import isoparser, _takes_ascii
-import frozendict
-
-from cloudharness_cli.samples.exceptions import (
- ApiTypeError,
- ApiValueError,
-)
-from cloudharness_cli.samples.configuration import (
- Configuration,
-)
-
-
-class Unset(object):
- """
- An instance of this class is set as the default value for object type(dict) properties that are optional
- When a property has an unset value, that property will not be assigned in the dict
- """
- pass
-
-unset = Unset()
-
-none_type = type(None)
-file_type = io.IOBase
-
-
-class FileIO(io.FileIO):
- """
- A class for storing files
- Note: this class is not immutable
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]):
- if isinstance(arg, (io.FileIO, io.BufferedReader)):
- if arg.closed:
- raise ApiValueError('Invalid file state; file is closed and must be open')
- arg.close()
- inst = super(FileIO, cls).__new__(cls, arg.name)
- super(FileIO, inst).__init__(arg.name)
- return inst
- raise ApiValueError('FileIO must be passed arg which contains the open file')
-
- def __init__(self, arg: typing.Union[io.FileIO, io.BufferedReader]):
- pass
-
-
-def update(d: dict, u: dict):
- """
- Adds u to d
- Where each dict is defaultdict(set)
- """
- if not u:
- return d
- for k, v in u.items():
- if k not in d:
- d[k] = v
- else:
- d[k] = d[k] | v
-
-
-class ValidationMetadata(frozendict.frozendict):
- """
- A class storing metadata that is needed to validate OpenApi Schema payloads
- """
- def __new__(
- cls,
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
- from_server: bool = False,
- configuration: typing.Optional[Configuration] = None,
- seen_classes: typing.FrozenSet[typing.Type] = frozenset(),
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]] = frozendict.frozendict()
- ):
- """
- Args:
- path_to_item: the path to the current data being instantiated.
- For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0)
- This changes from location to location
- from_server: whether or not this data came form the server
- True when receiving server data
- False when instantiating model with client side data not form the server
- This does not change from location to location
- configuration: the Configuration instance to use
- This is needed because in Configuration:
- - one can disable validation checking
- This does not change from location to location
- seen_classes: when deserializing data that matches multiple schemas, this is used to store
- the schemas that have been traversed. This is used to stop processing when a cycle is seen.
- This changes from location to location
- validated_path_to_schemas: stores the already validated schema classes for a given path location
- This does not change from location to location
- """
- return super().__new__(
- cls,
- path_to_item=path_to_item,
- from_server=from_server,
- configuration=configuration,
- seen_classes=seen_classes,
- validated_path_to_schemas=validated_path_to_schemas
- )
-
- def validation_ran_earlier(self, cls: type) -> bool:
- validated_schemas = self.validated_path_to_schemas.get(self.path_to_item, set())
- validation_ran_earlier = validated_schemas and cls in validated_schemas
- if validation_ran_earlier:
- return True
- if cls in self.seen_classes:
- return True
- return False
-
- @property
- def path_to_item(self) -> typing.Tuple[typing.Union[str, int], ...]:
- return self.get('path_to_item')
-
- @property
- def from_server(self) -> bool:
- return self.get('from_server')
-
- @property
- def configuration(self) -> typing.Optional[Configuration]:
- return self.get('configuration')
-
- @property
- def seen_classes(self) -> typing.FrozenSet[typing.Type]:
- return self.get('seen_classes')
-
- @property
- def validated_path_to_schemas(self) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]]:
- return self.get('validated_path_to_schemas')
-
-
-class Singleton:
- """
- Enums and singletons are the same
- The same instance is returned for a given key of (cls, arg)
- """
- _instances = {}
-
- def __new__(cls, arg: typing.Any, **kwargs):
- """
- cls base classes: BoolClass, NoneClass, str, decimal.Decimal
- The 3rd key is used in the tuple below for a corner case where an enum contains integer 1
- However 1.0 can also be ingested into that enum schema because 1.0 == 1 and
- Decimal('1.0') == Decimal('1')
- But if we omitted the 3rd value in the key, then Decimal('1.0') would be stored as Decimal('1')
- and json serializing that instance would be '1' rather than the expected '1.0'
- Adding the 3rd value, the str of arg ensures that 1.0 -> Decimal('1.0') which is serialized as 1.0
- """
- key = (cls, arg, str(arg))
- if key not in cls._instances:
- if isinstance(arg, (none_type, bool, BoolClass, NoneClass)):
- inst = super().__new__(cls)
- cls._instances[key] = inst
- else:
- cls._instances[key] = super().__new__(cls, arg)
- return cls._instances[key]
-
- def __repr__(self):
- if isinstance(self, NoneClass):
- return f'<{self.__class__.__name__}: None>'
- elif isinstance(self, BoolClass):
- if bool(self):
- return f'<{self.__class__.__name__}: True>'
- return f'<{self.__class__.__name__}: False>'
- return f'<{self.__class__.__name__}: {super().__repr__()}>'
-
-
-class classproperty:
-
- def __init__(self, fget):
- self.fget = fget
-
- def __get__(self, owner_self, owner_cls):
- return self.fget(owner_cls)
-
-
-class NoneClass(Singleton):
- @classproperty
- def NONE(cls):
- return cls(None)
-
- def __bool__(self) -> bool:
- return False
-
-
-class BoolClass(Singleton):
- @classproperty
- def TRUE(cls):
- return cls(True)
-
- @classproperty
- def FALSE(cls):
- return cls(False)
-
- @functools.lru_cache()
- def __bool__(self) -> bool:
- for key, instance in self._instances.items():
- if self is instance:
- return bool(key[1])
- raise ValueError('Unable to find the boolean value of this instance')
-
-
-class MetaOapgTyped:
- exclusive_maximum: typing.Union[int, float]
- inclusive_maximum: typing.Union[int, float]
- exclusive_minimum: typing.Union[int, float]
- inclusive_minimum: typing.Union[int, float]
- max_items: int
- min_items: int
- discriminator: typing.Dict[str, typing.Dict[str, typing.Type['Schema']]]
-
-
- class properties:
- # to hold object properties
- pass
-
- additional_properties: typing.Optional[typing.Type['Schema']]
- max_properties: int
- min_properties: int
- all_of: typing.List[typing.Type['Schema']]
- one_of: typing.List[typing.Type['Schema']]
- any_of: typing.List[typing.Type['Schema']]
- not_schema: typing.Type['Schema']
- max_length: int
- min_length: int
- items: typing.Type['Schema']
-
-
-class Schema:
- """
- the base class of all swagger/openapi schemas/models
- """
- __inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict.frozendict, FileIO, bytes, BoolClass, NoneClass}
- _types: typing.Set[typing.Type]
- MetaOapg = MetaOapgTyped
-
- @staticmethod
- def __get_valid_classes_phrase(input_classes):
- """Returns a string phrase describing what types are allowed"""
- all_classes = list(input_classes)
- all_classes = sorted(all_classes, key=lambda cls: cls.__name__)
- all_class_names = [cls.__name__ for cls in all_classes]
- if len(all_class_names) == 1:
- return "is {0}".format(all_class_names[0])
- return "is one of [{0}]".format(", ".join(all_class_names))
-
- @staticmethod
- def _get_class_oapg(item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type['Schema']]) -> typing.Type['Schema']:
- if isinstance(item_cls, types.FunctionType):
- # referenced schema
- return item_cls()
- elif isinstance(item_cls, staticmethod):
- # referenced schema
- return item_cls.__func__()
- return item_cls
-
- @classmethod
- def __type_error_message(
- cls, var_value=None, var_name=None, valid_classes=None, key_type=None
- ):
- """
- Keyword Args:
- var_value (any): the variable which has the type_error
- var_name (str): the name of the variable which has the typ error
- valid_classes (tuple): the accepted classes for current_item's
- value
- key_type (bool): False if our value is a value in a dict
- True if it is a key in a dict
- False if our item is an item in a tuple
- """
- key_or_value = "value"
- if key_type:
- key_or_value = "key"
- valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes)
- msg = "Invalid type. Required {1} type {2} and " "passed type was {3}".format(
- var_name,
- key_or_value,
- valid_classes_phrase,
- type(var_value).__name__,
- )
- return msg
-
- @classmethod
- def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False):
- error_msg = cls.__type_error_message(
- var_name=path_to_item[-1],
- var_value=var_value,
- valid_classes=valid_classes,
- key_type=key_type,
- )
- return ApiTypeError(
- error_msg,
- path_to_item=path_to_item,
- valid_classes=valid_classes,
- key_type=key_type,
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- Schema _validate_oapg
- All keyword validation except for type checking was done in calling stack frames
- If those validations passed, the validated classes are collected in path_to_schemas
-
- Returns:
- path_to_schemas: a map of path to schemas
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- base_class = type(arg)
- if base_class not in cls._types:
- raise cls.__get_type_error(
- arg,
- validation_metadata.path_to_item,
- cls._types,
- key_type=False,
- )
-
- path_to_schemas = {validation_metadata.path_to_item: set()}
- path_to_schemas[validation_metadata.path_to_item].add(cls)
- path_to_schemas[validation_metadata.path_to_item].add(base_class)
- return path_to_schemas
-
- @staticmethod
- def _process_schema_classes_oapg(
- schema_classes: typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]
- ):
- """
- Processes and mutates schema_classes
- If a SomeSchema is a subclass of DictSchema then remove DictSchema because it is already included
- """
- if len(schema_classes) < 2:
- return
- if len(schema_classes) > 2 and UnsetAnyTypeSchema in schema_classes:
- schema_classes.remove(UnsetAnyTypeSchema)
- x_schema = schema_type_classes & schema_classes
- if not x_schema:
- return
- x_schema = x_schema.pop()
- if any(c is not x_schema and issubclass(c, x_schema) for c in schema_classes):
- # needed to not have a mro error in get_new_class
- schema_classes.remove(x_schema)
-
- @classmethod
- def __get_new_cls(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]:
- """
- Make a new dynamic class and return an instance of that class
- We are making an instance of cls, but instead of making cls
- make a new class, new_cls
- which includes dynamic bases including cls
- return an instance of that new class
-
- Dict property + List Item Assignment Use cases:
- 1. value is NOT an instance of the required schema class
- the value is validated by _validate_oapg
- _validate_oapg returns a key value pair
- where the key is the path to the item, and the value will be the required manufactured class
- made out of the matching schemas
- 2. value is an instance of the the correct schema type
- the value is NOT validated by _validate_oapg, _validate_oapg only checks that the instance is of the correct schema type
- for this value, _validate_oapg does NOT return an entry for it in _path_to_schemas
- and in list/dict _get_items_oapg,_get_properties_oapg the value will be directly assigned
- because value is of the correct type, and validation was run earlier when the instance was created
- """
- _path_to_schemas = {}
- if validation_metadata.validated_path_to_schemas:
- update(_path_to_schemas, validation_metadata.validated_path_to_schemas)
- if not validation_metadata.validation_ran_earlier(cls):
- other_path_to_schemas = cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- # loop through it make a new class for each entry
- # do not modify the returned result because it is cached and we would be modifying the cached value
- path_to_schemas = {}
- for path, schema_classes in _path_to_schemas.items():
- """
- Use cases
- 1. N number of schema classes + enum + type != bool/None, classes in path_to_schemas: tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- needs Singleton added
- 2. N number of schema classes + enum + type == bool/None, classes in path_to_schemas: BoolClass/NoneClass
- Singleton already added
- 3. N number of schema classes, classes in path_to_schemas: BoolClass/NoneClass/tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- """
- cls._process_schema_classes_oapg(schema_classes)
- enum_schema = any(
- issubclass(this_cls, EnumBase) for this_cls in schema_classes)
- inheritable_primitive_type = schema_classes.intersection(cls.__inheritable_primitive_types_set)
- chosen_schema_classes = schema_classes - inheritable_primitive_type
- suffix = tuple(inheritable_primitive_type)
- if enum_schema and suffix[0] not in {NoneClass, BoolClass}:
- suffix = (Singleton,) + suffix
-
- used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix
- mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes)
- path_to_schemas[path] = mfg_cls
-
- return path_to_schemas
-
- @classmethod
- def _get_new_instance_without_conversion_oapg(
- cls,
- arg: typing.Any,
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- # We have a Dynamic class and we are making an instance of it
- if issubclass(cls, frozendict.frozendict) and issubclass(cls, DictBase):
- properties = cls._get_properties_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, properties)
- elif issubclass(cls, tuple) and issubclass(cls, ListBase):
- items = cls._get_items_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, items)
- """
- str = openapi str, date, and datetime
- decimal.Decimal = openapi int and float
- FileIO = openapi binary type and the user inputs a file
- bytes = openapi binary type and the user inputs bytes
- """
- return super(Schema, cls).__new__(cls, arg)
-
- @classmethod
- def from_openapi_data_oapg(
- cls,
- arg: typing.Union[
- str,
- date,
- datetime,
- int,
- float,
- decimal.Decimal,
- bool,
- None,
- 'Schema',
- dict,
- frozendict.frozendict,
- tuple,
- list,
- io.FileIO,
- io.BufferedReader,
- bytes
- ],
- _configuration: typing.Optional[Configuration]
- ):
- """
- Schema from_openapi_data_oapg
- """
- from_server = True
- validated_path_to_schemas = {}
- arg = cast_to_allowed_types(arg, from_server, validated_path_to_schemas)
- validation_metadata = ValidationMetadata(
- from_server=from_server, configuration=_configuration, validated_path_to_schemas=validated_path_to_schemas)
- path_to_schemas = cls.__get_new_cls(arg, validation_metadata)
- new_cls = path_to_schemas[validation_metadata.path_to_item]
- new_inst = new_cls._get_new_instance_without_conversion_oapg(
- arg,
- validation_metadata.path_to_item,
- path_to_schemas
- )
- return new_inst
-
- @staticmethod
- def __get_input_dict(*args, **kwargs) -> frozendict.frozendict:
- input_dict = {}
- if args and isinstance(args[0], (dict, frozendict.frozendict)):
- input_dict.update(args[0])
- if kwargs:
- input_dict.update(kwargs)
- return frozendict.frozendict(input_dict)
-
- @staticmethod
- def __remove_unsets(kwargs):
- return {key: val for key, val in kwargs.items() if val is not unset}
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]):
- """
- Schema __new__
-
- Args:
- args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): the value
- kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): dict values
- _configuration: contains the Configuration that enables json schema validation keywords
- like minItems, minLength etc
-
- Note: double underscores are used here because pycharm thinks that these variables
- are instance properties if they are named normally :(
- """
- __kwargs = cls.__remove_unsets(kwargs)
- if not args and not __kwargs:
- raise TypeError(
- 'No input given. args or kwargs must be given.'
- )
- if not __kwargs and args and not isinstance(args[0], dict):
- __arg = args[0]
- else:
- __arg = cls.__get_input_dict(*args, **__kwargs)
- __from_server = False
- __validated_path_to_schemas = {}
- __arg = cast_to_allowed_types(
- __arg, __from_server, __validated_path_to_schemas)
- __validation_metadata = ValidationMetadata(
- configuration=_configuration, from_server=__from_server, validated_path_to_schemas=__validated_path_to_schemas)
- __path_to_schemas = cls.__get_new_cls(__arg, __validation_metadata)
- __new_cls = __path_to_schemas[__validation_metadata.path_to_item]
- return __new_cls._get_new_instance_without_conversion_oapg(
- __arg,
- __validation_metadata.path_to_item,
- __path_to_schemas
- )
-
- def __init__(
- self,
- *args: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'],
- _configuration: typing.Optional[Configuration] = None,
- **kwargs: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset
- ]
- ):
- """
- this is needed to fix 'Unexpected argument' warning in pycharm
- this code does nothing because all Schema instances are immutable
- this means that all input data is passed into and used in new, and after the new instance is made
- no new attributes are assigned and init is not used
- """
- pass
-
-"""
-import itertools
-data_types = ('None', 'FrozenDict', 'Tuple', 'Str', 'Decimal', 'Bool')
-type_to_cls = {
- 'None': 'NoneClass',
- 'FrozenDict': 'frozendict.frozendict',
- 'Tuple': 'tuple',
- 'Str': 'str',
- 'Decimal': 'decimal.Decimal',
- 'Bool': 'BoolClass'
-}
-cls_tuples = [v for v in itertools.combinations(data_types, 5)]
-typed_classes = [f"class {''.join(cls_tuple)}Mixin({', '.join(type_to_cls[typ] for typ in cls_tuple)}):\n pass" for cls_tuple in cls_tuples]
-for cls in typed_classes:
- print(cls)
-object_classes = [f"{''.join(cls_tuple)}Mixin = object" for cls_tuple in cls_tuples]
-for cls in object_classes:
- print(cls)
-"""
-if typing.TYPE_CHECKING:
- # qty 1
- NoneMixin = NoneClass
- FrozenDictMixin = frozendict.frozendict
- TupleMixin = tuple
- StrMixin = str
- DecimalMixin = decimal.Decimal
- BoolMixin = BoolClass
- BytesMixin = bytes
- FileMixin = FileIO
- # qty 2
- class BinaryMixin(bytes, FileIO):
- pass
- class NoneFrozenDictMixin(NoneClass, frozendict.frozendict):
- pass
- class NoneTupleMixin(NoneClass, tuple):
- pass
- class NoneStrMixin(NoneClass, str):
- pass
- class NoneDecimalMixin(NoneClass, decimal.Decimal):
- pass
- class NoneBoolMixin(NoneClass, BoolClass):
- pass
- class FrozenDictTupleMixin(frozendict.frozendict, tuple):
- pass
- class FrozenDictStrMixin(frozendict.frozendict, str):
- pass
- class FrozenDictDecimalMixin(frozendict.frozendict, decimal.Decimal):
- pass
- class FrozenDictBoolMixin(frozendict.frozendict, BoolClass):
- pass
- class TupleStrMixin(tuple, str):
- pass
- class TupleDecimalMixin(tuple, decimal.Decimal):
- pass
- class TupleBoolMixin(tuple, BoolClass):
- pass
- class StrDecimalMixin(str, decimal.Decimal):
- pass
- class StrBoolMixin(str, BoolClass):
- pass
- class DecimalBoolMixin(decimal.Decimal, BoolClass):
- pass
- # qty 3
- class NoneFrozenDictTupleMixin(NoneClass, frozendict.frozendict, tuple):
- pass
- class NoneFrozenDictStrMixin(NoneClass, frozendict.frozendict, str):
- pass
- class NoneFrozenDictDecimalMixin(NoneClass, frozendict.frozendict, decimal.Decimal):
- pass
- class NoneFrozenDictBoolMixin(NoneClass, frozendict.frozendict, BoolClass):
- pass
- class NoneTupleStrMixin(NoneClass, tuple, str):
- pass
- class NoneTupleDecimalMixin(NoneClass, tuple, decimal.Decimal):
- pass
- class NoneTupleBoolMixin(NoneClass, tuple, BoolClass):
- pass
- class NoneStrDecimalMixin(NoneClass, str, decimal.Decimal):
- pass
- class NoneStrBoolMixin(NoneClass, str, BoolClass):
- pass
- class NoneDecimalBoolMixin(NoneClass, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrMixin(frozendict.frozendict, tuple, str):
- pass
- class FrozenDictTupleDecimalMixin(frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class FrozenDictTupleBoolMixin(frozendict.frozendict, tuple, BoolClass):
- pass
- class FrozenDictStrDecimalMixin(frozendict.frozendict, str, decimal.Decimal):
- pass
- class FrozenDictStrBoolMixin(frozendict.frozendict, str, BoolClass):
- pass
- class FrozenDictDecimalBoolMixin(frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalMixin(tuple, str, decimal.Decimal):
- pass
- class TupleStrBoolMixin(tuple, str, BoolClass):
- pass
- class TupleDecimalBoolMixin(tuple, decimal.Decimal, BoolClass):
- pass
- class StrDecimalBoolMixin(str, decimal.Decimal, BoolClass):
- pass
- # qty 4
- class NoneFrozenDictTupleStrMixin(NoneClass, frozendict.frozendict, tuple, str):
- pass
- class NoneFrozenDictTupleDecimalMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class NoneFrozenDictTupleBoolMixin(NoneClass, frozendict.frozendict, tuple, BoolClass):
- pass
- class NoneFrozenDictStrDecimalMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal):
- pass
- class NoneFrozenDictStrBoolMixin(NoneClass, frozendict.frozendict, str, BoolClass):
- pass
- class NoneFrozenDictDecimalBoolMixin(NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalMixin(NoneClass, tuple, str, decimal.Decimal):
- pass
- class NoneTupleStrBoolMixin(NoneClass, tuple, str, BoolClass):
- pass
- class NoneTupleDecimalBoolMixin(NoneClass, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneStrDecimalBoolMixin(NoneClass, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalMixin(frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class FrozenDictTupleStrBoolMixin(frozendict.frozendict, tuple, str, BoolClass):
- pass
- class FrozenDictTupleDecimalBoolMixin(frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class FrozenDictStrDecimalBoolMixin(frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalBoolMixin(tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class NoneFrozenDictTupleStrBoolMixin(NoneClass, frozendict.frozendict, tuple, str, BoolClass):
- pass
- class NoneFrozenDictTupleDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneFrozenDictStrDecimalBoolMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalBoolMixin(NoneClass, tuple, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalBoolMixin(frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes):
- pass
-else:
- # qty 1
- class NoneMixin:
- _types = {NoneClass}
- class FrozenDictMixin:
- _types = {frozendict.frozendict}
- class TupleMixin:
- _types = {tuple}
- class StrMixin:
- _types = {str}
- class DecimalMixin:
- _types = {decimal.Decimal}
- class BoolMixin:
- _types = {BoolClass}
- class BytesMixin:
- _types = {bytes}
- class FileMixin:
- _types = {FileIO}
- # qty 2
- class BinaryMixin:
- _types = {bytes, FileIO}
- class NoneFrozenDictMixin:
- _types = {NoneClass, frozendict.frozendict}
- class NoneTupleMixin:
- _types = {NoneClass, tuple}
- class NoneStrMixin:
- _types = {NoneClass, str}
- class NoneDecimalMixin:
- _types = {NoneClass, decimal.Decimal}
- class NoneBoolMixin:
- _types = {NoneClass, BoolClass}
- class FrozenDictTupleMixin:
- _types = {frozendict.frozendict, tuple}
- class FrozenDictStrMixin:
- _types = {frozendict.frozendict, str}
- class FrozenDictDecimalMixin:
- _types = {frozendict.frozendict, decimal.Decimal}
- class FrozenDictBoolMixin:
- _types = {frozendict.frozendict, BoolClass}
- class TupleStrMixin:
- _types = {tuple, str}
- class TupleDecimalMixin:
- _types = {tuple, decimal.Decimal}
- class TupleBoolMixin:
- _types = {tuple, BoolClass}
- class StrDecimalMixin:
- _types = {str, decimal.Decimal}
- class StrBoolMixin:
- _types = {str, BoolClass}
- class DecimalBoolMixin:
- _types = {decimal.Decimal, BoolClass}
- # qty 3
- class NoneFrozenDictTupleMixin:
- _types = {NoneClass, frozendict.frozendict, tuple}
- class NoneFrozenDictStrMixin:
- _types = {NoneClass, frozendict.frozendict, str}
- class NoneFrozenDictDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal}
- class NoneFrozenDictBoolMixin:
- _types = {NoneClass, frozendict.frozendict, BoolClass}
- class NoneTupleStrMixin:
- _types = {NoneClass, tuple, str}
- class NoneTupleDecimalMixin:
- _types = {NoneClass, tuple, decimal.Decimal}
- class NoneTupleBoolMixin:
- _types = {NoneClass, tuple, BoolClass}
- class NoneStrDecimalMixin:
- _types = {NoneClass, str, decimal.Decimal}
- class NoneStrBoolMixin:
- _types = {NoneClass, str, BoolClass}
- class NoneDecimalBoolMixin:
- _types = {NoneClass, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrMixin:
- _types = {frozendict.frozendict, tuple, str}
- class FrozenDictTupleDecimalMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal}
- class FrozenDictTupleBoolMixin:
- _types = {frozendict.frozendict, tuple, BoolClass}
- class FrozenDictStrDecimalMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal}
- class FrozenDictStrBoolMixin:
- _types = {frozendict.frozendict, str, BoolClass}
- class FrozenDictDecimalBoolMixin:
- _types = {frozendict.frozendict, decimal.Decimal, BoolClass}
- class TupleStrDecimalMixin:
- _types = {tuple, str, decimal.Decimal}
- class TupleStrBoolMixin:
- _types = {tuple, str, BoolClass}
- class TupleDecimalBoolMixin:
- _types = {tuple, decimal.Decimal, BoolClass}
- class StrDecimalBoolMixin:
- _types = {str, decimal.Decimal, BoolClass}
- # qty 4
- class NoneFrozenDictTupleStrMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str}
- class NoneFrozenDictTupleDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal}
- class NoneFrozenDictTupleBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, BoolClass}
- class NoneFrozenDictStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal}
- class NoneFrozenDictStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, BoolClass}
- class NoneFrozenDictDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal}
- class NoneTupleStrBoolMixin:
- _types = {NoneClass, tuple, str, BoolClass}
- class NoneTupleDecimalBoolMixin:
- _types = {NoneClass, tuple, decimal.Decimal, BoolClass}
- class NoneStrDecimalBoolMixin:
- _types = {NoneClass, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal}
- class FrozenDictTupleStrBoolMixin:
- _types = {frozendict.frozendict, tuple, str, BoolClass}
- class FrozenDictTupleDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class FrozenDictStrDecimalBoolMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class TupleStrDecimalBoolMixin:
- _types = {tuple, str, decimal.Decimal, BoolClass}
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal}
- class NoneFrozenDictTupleStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, BoolClass}
- class NoneFrozenDictTupleDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class NoneFrozenDictStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalBoolMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes}
-
-
-class ValidatorBase:
- @staticmethod
- def _is_json_validation_enabled_oapg(schema_keyword, configuration=None):
- """Returns true if JSON schema validation is enabled for the specified
- validation keyword. This can be used to skip JSON schema structural validation
- as requested in the configuration.
- Note: the suffix _oapg stands for openapi python (experimental) generator and
- it has been added to prevent collisions with other methods and properties
-
- Args:
- schema_keyword (string): the name of a JSON schema validation keyword.
- configuration (Configuration): the configuration class.
- """
-
- return (configuration is None or
- not hasattr(configuration, '_disabled_client_side_validations') or
- schema_keyword not in configuration._disabled_client_side_validations)
-
- @staticmethod
- def _raise_validation_errror_message_oapg(value, constraint_msg, constraint_value, path_to_item, additional_txt=""):
- raise ApiValueError(
- "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format(
- value=value,
- constraint_msg=constraint_msg,
- constraint_value=constraint_value,
- additional_txt=additional_txt,
- path_to_item=path_to_item,
- )
- )
-
-
-class EnumBase:
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- EnumBase _validate_oapg
- Validates that arg is in the enum's allowed values
- """
- try:
- cls.MetaOapg.enum_value_to_name[arg]
- except KeyError:
- raise ApiValueError("Invalid value {} passed in to {}, allowed_values={}".format(arg, cls, cls.MetaOapg.enum_value_to_name.keys()))
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class BoolBase:
- def is_true_oapg(self) -> bool:
- """
- A replacement for x is True
- True if the instance is a BoolClass True Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self)
-
- def is_false_oapg(self) -> bool:
- """
- A replacement for x is False
- True if the instance is a BoolClass False Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self) is False
-
-
-class NoneBase:
- def is_none_oapg(self) -> bool:
- """
- A replacement for x is None
- True if the instance is a NoneClass None Singleton
- """
- if issubclass(self.__class__, NoneClass):
- return True
- return False
-
-
-class StrBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_str_oapg(self) -> str:
- return self
-
- @property
- def as_date_oapg(self) -> date:
- raise Exception('not implemented')
-
- @property
- def as_datetime_oapg(self) -> datetime:
- raise Exception('not implemented')
-
- @property
- def as_decimal_oapg(self) -> decimal.Decimal:
- raise Exception('not implemented')
-
- @property
- def as_uuid_oapg(self) -> uuid.UUID:
- raise Exception('not implemented')
-
- @classmethod
- def __check_str_validations(
- cls,
- arg: str,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_length') and
- len(arg) > cls.MetaOapg.max_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be less than or equal to",
- constraint_value=cls.MetaOapg.max_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_length') and
- len(arg) < cls.MetaOapg.min_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'regex')):
- for regex_dict in cls.MetaOapg.regex:
- flags = regex_dict.get('flags', 0)
- if not re.search(regex_dict['pattern'], arg, flags=flags):
- if flags != 0:
- # Don't print the regex flags if the flags are not
- # specified in the OAS document.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item,
- additional_txt=" with flags=`{}`".format(flags)
- )
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- StrBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, str):
- cls.__check_str_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class UUIDBase:
- @property
- @functools.lru_cache()
- def as_uuid_oapg(self) -> uuid.UUID:
- return uuid.UUID(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- uuid.UUID(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Invalid value '{}' for type UUID at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- UUIDBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class CustomIsoparser(isoparser):
-
- @_takes_ascii
- def parse_isodatetime(self, dt_str):
- components, pos = self._parse_isodate(dt_str)
- if len(dt_str) > pos:
- if self._sep is None or dt_str[pos:pos + 1] == self._sep:
- components += self._parse_isotime(dt_str[pos + 1:])
- else:
- raise ValueError('String contains unknown ISO components')
-
- if len(components) > 3 and components[3] == 24:
- components[3] = 0
- return datetime(*components) + timedelta(days=1)
-
- if len(components) <= 3:
- raise ValueError('Value is not a datetime')
-
- return datetime(*components)
-
- @_takes_ascii
- def parse_isodate(self, datestr):
- components, pos = self._parse_isodate(datestr)
-
- if len(datestr) > pos:
- raise ValueError('String contains invalid time components')
-
- if len(components) > 3:
- raise ValueError('String contains invalid time components')
-
- return date(*components)
-
-
-DEFAULT_ISOPARSER = CustomIsoparser()
-
-
-class DateBase:
- @property
- @functools.lru_cache()
- def as_date_oapg(self) -> date:
- return DEFAULT_ISOPARSER.parse_isodate(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodate(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 date format. "
- "Invalid value '{}' for type date at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- DateBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DateTimeBase:
- @property
- @functools.lru_cache()
- def as_datetime_oapg(self) -> datetime:
- return DEFAULT_ISOPARSER.parse_isodatetime(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodatetime(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 datetime format. "
- "Invalid value '{}' for type datetime at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DateTimeBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DecimalBase:
- """
- A class for storing decimals that are sent over the wire as strings
- These schemas must remain based on StrBase rather than NumberBase
- because picking base classes must be deterministic
- """
-
- @property
- @functools.lru_cache()
- def as_decimal_oapg(self) -> decimal.Decimal:
- return decimal.Decimal(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- decimal.Decimal(arg)
- return True
- except decimal.InvalidOperation:
- raise ApiValueError(
- "Value cannot be converted to a decimal. "
- "Invalid value '{}' for type decimal at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DecimalBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class NumberBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- """
- Note: for some numbers like 9.0 they could be represented as an
- integer but our code chooses to store them as
- >>> Decimal('9.0').as_tuple()
- DecimalTuple(sign=0, digits=(9, 0), exponent=-1)
- so we can tell that the value came from a float and convert it back to a float
- during later serialization
- """
- if self.as_tuple().exponent < 0:
- # this could be represented as an integer but should be represented as a float
- # because that's what it was serialized from
- raise ApiValueError(f'{self} is not an integer')
- self._as_int = int(self)
- return self._as_int
-
- @property
- def as_float_oapg(self) -> float:
- try:
- return self._as_float
- except AttributeError:
- if self.as_tuple().exponent >= 0:
- raise ApiValueError(f'{self} is not an float')
- self._as_float = float(self)
- return self._as_float
-
- @classmethod
- def __check_numeric_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if cls._is_json_validation_enabled_oapg('multipleOf',
- validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
- multiple_of_value = cls.MetaOapg.multiple_of
- if (not (float(arg) / multiple_of_value).is_integer()):
- # Note 'multipleOf' will be as good as the floating point arithmetic.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="value must be a multiple of",
- constraint_value=multiple_of_value,
- path_to_item=validation_metadata.path_to_item
- )
-
- checking_max_or_min_values = any(
- hasattr(cls.MetaOapg, validation_key) for validation_key in {
- 'exclusive_maximum',
- 'inclusive_maximum',
- 'exclusive_minimum',
- 'inclusive_minimum',
- }
- )
- if not checking_max_or_min_values:
- return
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_maximum') and
- arg >= cls.MetaOapg.exclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_maximum') and
- arg > cls.MetaOapg.inclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than or equal to",
- constraint_value=cls.MetaOapg.inclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_minimum') and
- arg <= cls.MetaOapg.exclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_minimum') and
- arg < cls.MetaOapg.inclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than or equal to",
- constraint_value=cls.MetaOapg.inclusive_minimum,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- NumberBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, decimal.Decimal):
- cls.__check_numeric_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class ListBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def __validate_items(cls, list_items, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for items are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- list_items: the input list of items
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
-
- # if we have definitions for an items schema, use it
- # otherwise accept anything
- item_cls = getattr(cls.MetaOapg, 'items', UnsetAnyTypeSchema)
- item_cls = cls._get_class_oapg(item_cls)
- path_to_schemas = {}
- for i, value in enumerate(list_items):
- item_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=validation_metadata.path_to_item+(i,),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if item_validation_metadata.validation_ran_earlier(item_cls):
- continue
- other_path_to_schemas = item_cls._validate_oapg(
- value, validation_metadata=item_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_tuple_validations(
- cls, arg,
- validation_metadata: ValidationMetadata):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_items') and
- len(arg) > cls.MetaOapg.max_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be less than or equal to",
- constraint_value=cls.MetaOapg.max_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_items') and
- len(arg) < cls.MetaOapg.min_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
- unique_items = set(arg)
- if len(arg) > len(unique_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="duplicate items were found, and the tuple must not contain duplicates because",
- constraint_value='unique_items==True',
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- ListBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, tuple):
- cls.__check_tuple_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, tuple):
- return _path_to_schemas
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- other_path_to_schemas = cls.__validate_items(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_items_oapg(
- cls: 'Schema',
- arg: typing.List[typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- '''
- ListBase _get_items_oapg
- '''
- cast_items = []
-
- for i, value in enumerate(arg):
- item_path_to_item = path_to_item + (i,)
- item_cls = path_to_schemas[item_path_to_item]
- new_value = item_cls._get_new_instance_without_conversion_oapg(
- value,
- item_path_to_item,
- path_to_schemas
- )
- cast_items.append(new_value)
-
- return cast_items
-
-
-class Discriminable:
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def _ensure_discriminator_value_present_oapg(cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args):
- if not args or args and disc_property_name not in args[0]:
- # The input data does not contain the discriminator property
- raise ApiValueError(
- "Cannot deserialize input data due to missing discriminator. "
- "The discriminator property '{}' is missing at path: {}".format(disc_property_name, validation_metadata.path_to_item)
- )
-
- @classmethod
- def get_discriminated_class_oapg(cls, disc_property_name: str, disc_payload_value: str):
- """
- Used in schemas with discriminators
- """
- if not hasattr(cls.MetaOapg, 'discriminator'):
- return None
- disc = cls.MetaOapg.discriminator()
- if disc_property_name not in disc:
- return None
- discriminated_cls = disc[disc_property_name].get(disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if not hasattr(cls, 'MetaOapg'):
- return None
- elif not (
- hasattr(cls.MetaOapg, 'all_of') or
- hasattr(cls.MetaOapg, 'one_of') or
- hasattr(cls.MetaOapg, 'any_of')
- ):
- return None
- # TODO stop traveling if a cycle is hit
- if hasattr(cls.MetaOapg, 'all_of'):
- for allof_cls in cls.MetaOapg.all_of():
- discriminated_cls = allof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'one_of'):
- for oneof_cls in cls.MetaOapg.one_of():
- discriminated_cls = oneof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'any_of'):
- for anyof_cls in cls.MetaOapg.any_of():
- discriminated_cls = anyof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- return None
-
-
-class DictBase(Discriminable, ValidatorBase):
-
- @classmethod
- def __validate_arg_presence(cls, arg):
- """
- Ensures that:
- - all required arguments are passed in
- - the input variable names are valid
- - present in properties or
- - accepted because additionalProperties exists
- Exceptions will be raised if:
- - invalid arguments were passed in
- - a var_name is invalid if additional_properties == NotAnyTypeSchema
- and var_name not in properties.__annotations__
- - required properties were not passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- seen_required_properties = set()
- invalid_arguments = []
- required_property_names = getattr(cls.MetaOapg, 'required', set())
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name in arg:
- if property_name in required_property_names:
- seen_required_properties.add(property_name)
- elif property_name in property_annotations:
- continue
- elif additional_properties is not NotAnyTypeSchema:
- continue
- else:
- invalid_arguments.append(property_name)
- missing_required_arguments = list(required_property_names - seen_required_properties)
- if missing_required_arguments:
- missing_required_arguments.sort()
- raise ApiTypeError(
- "{} is missing {} required argument{}: {}".format(
- cls.__name__,
- len(missing_required_arguments),
- "s" if len(missing_required_arguments) > 1 else "",
- missing_required_arguments
- )
- )
- if invalid_arguments:
- invalid_arguments.sort()
- raise ApiTypeError(
- "{} was passed {} invalid argument{}: {}".format(
- cls.__name__,
- len(invalid_arguments),
- "s" if len(invalid_arguments) > 1 else "",
- invalid_arguments
- )
- )
-
- @classmethod
- def __validate_args(cls, arg, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for properties are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- path_to_schemas = {}
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name, value in arg.items():
- path_to_item = validation_metadata.path_to_item+(property_name,)
- if property_name in property_annotations:
- schema = property_annotations[property_name]
- elif additional_properties is not NotAnyTypeSchema:
- if additional_properties is UnsetAnyTypeSchema:
- """
- If additionalProperties is unset and this path_to_item does not yet have
- any validations on it, validate it.
- If it already has validations on it, skip this validation.
- """
- if path_to_item in path_to_schemas:
- continue
- schema = additional_properties
- else:
- raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format(
- value, cls, validation_metadata.path_to_item+(property_name,)
- ))
- schema = cls._get_class_oapg(schema)
- arg_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=path_to_item,
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if arg_validation_metadata.validation_ran_earlier(schema):
- continue
- other_path_to_schemas = schema._validate_oapg(value, validation_metadata=arg_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_dict_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_properties') and
- len(arg) > cls.MetaOapg.max_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be less than or equal to",
- constraint_value=cls.MetaOapg.max_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_properties') and
- len(arg) < cls.MetaOapg.min_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DictBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, frozendict.frozendict):
- cls.__check_dict_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, frozendict.frozendict):
- return _path_to_schemas
- cls.__validate_arg_presence(arg)
- other_path_to_schemas = cls.__validate_args(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- try:
- discriminator = cls.MetaOapg.discriminator()
- except AttributeError:
- return _path_to_schemas
- # discriminator exists
- disc_prop_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_prop_name, validation_metadata, arg)
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format(
- cls.__name__,
- disc_prop_name,
- list(discriminator[disc_prop_name].keys()),
- validation_metadata.path_to_item + (disc_prop_name,)
- )
- )
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if updated_vm.validation_ran_earlier(discriminated_cls):
- return _path_to_schemas
- other_path_to_schemas = discriminated_cls._validate_oapg(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_properties_oapg(
- cls,
- arg: typing.Dict[str, typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- """
- DictBase _get_properties_oapg, this is how properties are set
- These values already passed validation
- """
- dict_items = {}
-
- for property_name_js, value in arg.items():
- property_path_to_item = path_to_item + (property_name_js,)
- property_cls = path_to_schemas[property_path_to_item]
- new_value = property_cls._get_new_instance_without_conversion_oapg(
- value,
- property_path_to_item,
- path_to_schemas
- )
- dict_items[property_name_js] = new_value
-
- return dict_items
-
- def __setattr__(self, name: str, value: typing.Any):
- if not isinstance(self, FileIO):
- raise AttributeError('property setting not supported on immutable instances')
-
- def __getattr__(self, name: str):
- """
- for instance.name access
- Properties are only type hinted for required properties
- so that hasattr(instance, 'optionalProp') is False when that key is not present
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- if name not in self.__class__.__annotations__:
- raise AttributeError(f"{self} has no attribute '{name}'")
- try:
- value = self[name]
- return value
- except KeyError as ex:
- raise AttributeError(str(ex))
-
- def __getitem__(self, name: str):
- """
- dict_instance[name] accessor
- key errors thrown
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- return super().__getitem__(name)
-
- def get_item_oapg(self, name: str) -> typing.Union['AnyTypeSchema', Unset]:
- # dict_instance[name] accessor
- if not isinstance(self, frozendict.frozendict):
- raise NotImplementedError()
- try:
- return super().__getitem__(name)
- except KeyError:
- return unset
-
-
-def cast_to_allowed_types(
- arg: typing.Union[str, date, datetime, uuid.UUID, decimal.Decimal, int, float, None, dict, frozendict.frozendict, list, tuple, bytes, Schema, io.FileIO, io.BufferedReader],
- from_server: bool,
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]],
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
-) -> typing.Union[frozendict.frozendict, tuple, decimal.Decimal, str, bytes, BoolClass, NoneClass, FileIO]:
- """
- Casts the input payload arg into the allowed types
- The input validated_path_to_schemas is mutated by running this function
-
- When from_server is False then
- - date/datetime is cast to str
- - int/float is cast to Decimal
-
- If a Schema instance is passed in it is converted back to a primitive instance because
- One may need to validate that data to the original Schema class AND additional different classes
- those additional classes will need to be added to the new manufactured class for that payload
- If the code didn't do this and kept the payload as a Schema instance it would fail to validate to other
- Schema classes and the code wouldn't be able to mfg a new class that includes all valid schemas
- TODO: store the validated schema classes in validation_metadata
-
- Args:
- arg: the payload
- from_server: whether this payload came from the server or not
- validated_path_to_schemas: a dict that stores the validated classes at any path location in the payload
- """
- if isinstance(arg, Schema):
- # store the already run validations
- schema_classes = set()
- source_schema_was_unset = len(arg.__class__.__bases__) == 2 and UnsetAnyTypeSchema in arg.__class__.__bases__
- if not source_schema_was_unset:
- """
- Do not include UnsetAnyTypeSchema and its base class because
- it did not exist in the original spec schema definition
- It was added to ensure that all instances are of type Schema and the allowed base types
- """
- for cls in arg.__class__.__bases__:
- if cls is Singleton:
- # Skip Singleton
- continue
- schema_classes.add(cls)
- validated_path_to_schemas[path_to_item] = schema_classes
-
- type_error = ApiTypeError(f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}")
- if isinstance(arg, str):
- return str(arg)
- elif isinstance(arg, (dict, frozendict.frozendict)):
- return frozendict.frozendict({key: cast_to_allowed_types(val, from_server, validated_path_to_schemas, path_to_item + (key,)) for key, val in arg.items()})
- elif isinstance(arg, (bool, BoolClass)):
- """
- this check must come before isinstance(arg, (int, float))
- because isinstance(True, int) is True
- """
- if arg:
- return BoolClass.TRUE
- return BoolClass.FALSE
- elif isinstance(arg, int):
- return decimal.Decimal(arg)
- elif isinstance(arg, float):
- decimal_from_float = decimal.Decimal(arg)
- if decimal_from_float.as_integer_ratio()[1] == 1:
- # 9.0 -> Decimal('9.0')
- # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0')
- return decimal.Decimal(str(decimal_from_float)+'.0')
- return decimal_from_float
- elif isinstance(arg, (tuple, list)):
- return tuple([cast_to_allowed_types(item, from_server, validated_path_to_schemas, path_to_item + (i,)) for i, item in enumerate(arg)])
- elif isinstance(arg, (none_type, NoneClass)):
- return NoneClass.NONE
- elif isinstance(arg, (date, datetime)):
- if not from_server:
- return arg.isoformat()
- raise type_error
- elif isinstance(arg, uuid.UUID):
- if not from_server:
- return str(arg)
- raise type_error
- elif isinstance(arg, decimal.Decimal):
- return decimal.Decimal(arg)
- elif isinstance(arg, bytes):
- return bytes(arg)
- elif isinstance(arg, (io.FileIO, io.BufferedReader)):
- return FileIO(arg)
- raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg)))
-
-
-class ComposedBase(Discriminable):
-
- @classmethod
- def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
- path_to_schemas = defaultdict(set)
- for allof_cls in cls.MetaOapg.all_of():
- if validation_metadata.validation_ran_earlier(allof_cls):
- continue
- other_path_to_schemas = allof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __get_oneof_class(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata,
- ):
- oneof_classes = []
- path_to_schemas = defaultdict(set)
- for oneof_cls in cls.MetaOapg.one_of():
- if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
- oneof_classes.append(oneof_cls)
- continue
- if validation_metadata.validation_ran_earlier(oneof_cls):
- oneof_classes.append(oneof_cls)
- continue
- try:
- path_to_schemas = oneof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and oneof_cls is discriminated_cls:
- raise ex
- continue
- oneof_classes.append(oneof_cls)
- if not oneof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the oneOf schemas matched the input data.".format(cls)
- )
- elif len(oneof_classes) > 1:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. Multiple "
- "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes)
- )
- # exactly one class matches
- return path_to_schemas
-
- @classmethod
- def __get_anyof_classes(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata
- ):
- anyof_classes = []
- path_to_schemas = defaultdict(set)
- for anyof_cls in cls.MetaOapg.any_of():
- if validation_metadata.validation_ran_earlier(anyof_cls):
- anyof_classes.append(anyof_cls)
- continue
-
- try:
- other_path_to_schemas = anyof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and anyof_cls is discriminated_cls:
- raise ex
- continue
- anyof_classes.append(anyof_cls)
- update(path_to_schemas, other_path_to_schemas)
- if not anyof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the anyOf schemas matched the input data.".format(cls)
- )
- return path_to_schemas
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- ComposedBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- # validation checking on types, validations, and enums
- path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
-
- # process composed schema
- discriminator = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'discriminator'):
- discriminator = cls.MetaOapg.discriminator()
- discriminated_cls = None
- if discriminator and arg and isinstance(arg, frozendict.frozendict):
- disc_property_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_property_name, updated_vm, arg)
- # get discriminated_cls by looking at the dict in the current class
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=arg[disc_property_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format(
- arg[disc_property_name],
- cls.__name__,
- disc_property_name,
- list(discriminator[disc_property_name].keys()),
- updated_vm.path_to_item + (disc_property_name,)
- )
- )
-
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
- other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
- other_path_to_schemas = cls.__get_oneof_class(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
- other_path_to_schemas = cls.__get_anyof_classes(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- not_cls = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'not_schema'):
- not_cls = cls.MetaOapg.not_schema
- not_cls = cls._get_class_oapg(not_cls)
- if not_cls:
- other_path_to_schemas = None
- not_exception = ApiValueError(
- "Invalid value '{}' was passed in to {}. Value is invalid because it is disallowed by {}".format(
- arg,
- cls.__name__,
- not_cls.__name__,
- )
- )
- if updated_vm.validation_ran_earlier(not_cls):
- raise not_exception
-
- try:
- other_path_to_schemas = not_cls._validate_oapg(arg, validation_metadata=updated_vm)
- except (ApiValueError, ApiTypeError):
- pass
- if other_path_to_schemas:
- raise not_exception
-
- if discriminated_cls is not None and not updated_vm.validation_ran_earlier(discriminated_cls):
- # TODO use an exception from this package here
- assert discriminated_cls in path_to_schemas[updated_vm.path_to_item]
- return path_to_schemas
-
-
-# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase
-class ComposedSchema(
- ComposedBase,
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, *args: typing.Any, _configuration: typing.Optional[Configuration] = None, **kwargs):
- if not args:
- if not kwargs:
- raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__))
- args = (kwargs, )
- return super().from_openapi_data_oapg(args[0], _configuration=_configuration)
-
-
-class ListSchema(
- ListBase,
- Schema,
- TupleMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.List[typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NoneSchema(
- NoneBase,
- Schema,
- NoneMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: None, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: None, **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NumberSchema(
- NumberBase,
- Schema,
- DecimalMixin
-):
- """
- This is used for type: number with no format
- Both integers AND floats are accepted
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Union[int, float], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class IntBase:
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- self._as_int = int(self)
- return self._as_int
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
-
- denominator = arg.as_integer_ratio()[-1]
- if denominator != 1:
- raise ApiValueError(
- "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- IntBase _validate_oapg
- TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class IntSchema(IntBase, NumberSchema):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: int, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class Int32Base:
- __inclusive_minimum = decimal.Decimal(-2147483648)
- __inclusive_maximum = decimal.Decimal(2147483647)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int32 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int32Schema(
- Int32Base,
- IntSchema
-):
- pass
-
-
-class Int64Base:
- __inclusive_minimum = decimal.Decimal(-9223372036854775808)
- __inclusive_maximum = decimal.Decimal(9223372036854775807)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int64 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int64Schema(
- Int64Base,
- IntSchema
-):
- pass
-
-
-class Float32Base:
- __inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
- __inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type float at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Float32Schema(
- Float32Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class Float64Base:
- __inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
- __inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type double at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-class Float64Schema(
- Float64Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- # todo check format
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class StrSchema(
- StrBase,
- Schema,
- StrMixin
-):
- """
- date + datetime string types must inherit from this class
- That is because one can validate a str payload as both:
- - type: string (format unset)
- - type: string, format: date
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: str, _configuration: typing.Optional[Configuration] = None) -> 'StrSchema':
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class UUIDSchema(UUIDBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateSchema(DateBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, date], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateTimeSchema(DateTimeBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, datetime], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DecimalSchema(DecimalBase, StrSchema):
-
- def __new__(cls, arg: str, **kwargs: Configuration):
- """
- Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads
- which can be simple (str) or complex (dicts or lists with nested values)
- Because casting is only done once and recursively casts all values prior to validation then for a potential
- client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know
- if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema
- where it should stay as Decimal.
- """
- return super().__new__(cls, arg, **kwargs)
-
-
-class BytesSchema(
- Schema,
- BytesMixin
-):
- """
- this class will subclass bytes and is immutable
- """
- def __new__(cls, arg: bytes, **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class FileSchema(
- Schema,
- FileMixin
-):
- """
- This class is NOT immutable
- Dynamic classes are built using it for example when AnyType allows in binary data
- Al other schema classes ARE immutable
- If one wanted to make this immutable one could make this a DictSchema with required properties:
- - data = BytesSchema (which would be an immutable bytes based schema)
- - file_name = StrSchema
- and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name
- The downside would be that data would be stored in memory which one may not want to do for very large files
-
- The developer is responsible for closing this file and deleting it
-
- This class was kept as mutable:
- - to allow file reading and writing to disk
- - to be able to preserve file name info
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class BinaryBase:
- pass
-
-
-class BinarySchema(
- ComposedBase,
- BinaryBase,
- Schema,
- BinaryMixin
-):
- class MetaOapg:
- @staticmethod
- def one_of():
- return [
- BytesSchema,
- FileSchema,
- ]
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: Configuration):
- return super().__new__(cls, arg)
-
-
-class BoolSchema(
- BoolBase,
- Schema,
- BoolMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: bool, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: bool, **kwargs: ValidationMetadata):
- return super().__new__(cls, arg, **kwargs)
-
-
-class AnyTypeSchema(
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolFileBytesMixin
-):
- # Python representation of a schema defined as true or {}
- pass
-
-
-class UnsetAnyTypeSchema(AnyTypeSchema):
- # Used when additionalProperties/items was not explicitly defined and a defining schema is needed
- pass
-
-
-class NotAnyTypeSchema(
- ComposedSchema,
-):
- """
- Python representation of a schema defined as false or {'not': {}}
- Does not allow inputs in of AnyType
- Note: validations on this class are never run because the code knows that no inputs will ever validate
- """
-
- class MetaOapg:
- not_schema = AnyTypeSchema
-
- def __new__(
- cls,
- *args,
- _configuration: typing.Optional[Configuration] = None,
- ) -> 'NotAnyTypeSchema':
- return super().__new__(
- cls,
- *args,
- _configuration=_configuration,
- )
-
-
-class DictSchema(
- DictBase,
- Schema,
- FrozenDictMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Dict[str, typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict], **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, ValidationMetadata]):
- return super().__new__(cls, *args, **kwargs)
-
-
-schema_type_classes = {NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema, AnyTypeSchema}
-
-
-@functools.lru_cache()
-def get_new_class(
- class_name: str,
- bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...]
-) -> typing.Type[Schema]:
- """
- Returns a new class that is made with the subclass bases
- """
- new_cls: typing.Type[Schema] = type(class_name, bases, {})
- return new_cls
-
-
-LOG_CACHE_USAGE = False
-
-
-def log_cache_usage(cache_fn):
- if LOG_CACHE_USAGE:
- print(cache_fn.__name__, cache_fn.cache_info())
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/__init__.py
index 5af7c6f0e..cb3b2e655 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/__init__.py
@@ -5,24 +5,31 @@
"""
Volumes manager API
- CloudHarness Volumes manager API # noqa: E501
+ CloudHarness Volumes manager API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
__version__ = "1.0.0"
+# import apis into sdk package
+from cloudharness_cli.volumemanager.api.rest_api import RestApi
+
# import ApiClient
+from cloudharness_cli.volumemanager.api_response import ApiResponse
from cloudharness_cli.volumemanager.api_client import ApiClient
-
-# import Configuration
from cloudharness_cli.volumemanager.configuration import Configuration
-
-# import exceptions
from cloudharness_cli.volumemanager.exceptions import OpenApiException
-from cloudharness_cli.volumemanager.exceptions import ApiAttributeError
from cloudharness_cli.volumemanager.exceptions import ApiTypeError
from cloudharness_cli.volumemanager.exceptions import ApiValueError
from cloudharness_cli.volumemanager.exceptions import ApiKeyError
+from cloudharness_cli.volumemanager.exceptions import ApiAttributeError
from cloudharness_cli.volumemanager.exceptions import ApiException
+
+# import models into sdk package
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+from cloudharness_cli.volumemanager.models.persistent_volume_claim_create import PersistentVolumeClaimCreate
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/__init__.py
new file mode 100644
index 000000000..4bf39aeb9
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/__init__.py
@@ -0,0 +1,5 @@
+# flake8: noqa
+
+# import apis into api package
+from cloudharness_cli.volumemanager.api.rest_api import RestApi
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/rest_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/rest_api.py
new file mode 100644
index 000000000..6e4764a44
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api/rest_api.py
@@ -0,0 +1,570 @@
+# coding: utf-8
+
+"""
+ Volumes manager API
+
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import Field, StrictStr
+from typing_extensions import Annotated
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+from cloudharness_cli.volumemanager.models.persistent_volume_claim_create import PersistentVolumeClaimCreate
+
+from cloudharness_cli.volumemanager.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.volumemanager.api_response import ApiResponse
+from cloudharness_cli.volumemanager.rest import RESTResponseType
+
+
+class RestApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def pvc_name_get(
+ self,
+ name: Annotated[StrictStr, Field(description="The name of the Persistent Volume Claim to be retrieved")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> PersistentVolumeClaim:
+ """Retrieve a Persistent Volume Claim from the Kubernetes repository.
+
+
+ :param name: The name of the Persistent Volume Claim to be retrieved (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._pvc_name_get_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "PersistentVolumeClaim",
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def pvc_name_get_with_http_info(
+ self,
+ name: Annotated[StrictStr, Field(description="The name of the Persistent Volume Claim to be retrieved")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[PersistentVolumeClaim]:
+ """Retrieve a Persistent Volume Claim from the Kubernetes repository.
+
+
+ :param name: The name of the Persistent Volume Claim to be retrieved (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._pvc_name_get_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "PersistentVolumeClaim",
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def pvc_name_get_without_preload_content(
+ self,
+ name: Annotated[StrictStr, Field(description="The name of the Persistent Volume Claim to be retrieved")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Retrieve a Persistent Volume Claim from the Kubernetes repository.
+
+
+ :param name: The name of the Persistent Volume Claim to be retrieved (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._pvc_name_get_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "PersistentVolumeClaim",
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _pvc_name_get_serialize(
+ self,
+ name,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if name is not None:
+ _path_params['name'] = name
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ 'bearerAuth'
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/pvc/{name}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def pvc_post(
+ self,
+ persistent_volume_claim_create: Annotated[PersistentVolumeClaimCreate, Field(description="The Persistent Volume Claim to create.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> PersistentVolumeClaim:
+ """Create a Persistent Volume Claim in Kubernetes
+
+
+ :param persistent_volume_claim_create: The Persistent Volume Claim to create. (required)
+ :type persistent_volume_claim_create: PersistentVolumeClaimCreate
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._pvc_post_serialize(
+ persistent_volume_claim_create=persistent_volume_claim_create,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '201': "PersistentVolumeClaim",
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def pvc_post_with_http_info(
+ self,
+ persistent_volume_claim_create: Annotated[PersistentVolumeClaimCreate, Field(description="The Persistent Volume Claim to create.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[PersistentVolumeClaim]:
+ """Create a Persistent Volume Claim in Kubernetes
+
+
+ :param persistent_volume_claim_create: The Persistent Volume Claim to create. (required)
+ :type persistent_volume_claim_create: PersistentVolumeClaimCreate
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._pvc_post_serialize(
+ persistent_volume_claim_create=persistent_volume_claim_create,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '201': "PersistentVolumeClaim",
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def pvc_post_without_preload_content(
+ self,
+ persistent_volume_claim_create: Annotated[PersistentVolumeClaimCreate, Field(description="The Persistent Volume Claim to create.")],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """Create a Persistent Volume Claim in Kubernetes
+
+
+ :param persistent_volume_claim_create: The Persistent Volume Claim to create. (required)
+ :type persistent_volume_claim_create: PersistentVolumeClaimCreate
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._pvc_post_serialize(
+ persistent_volume_claim_create=persistent_volume_claim_create,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '201': "PersistentVolumeClaim",
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _pvc_post_serialize(
+ self,
+ persistent_volume_claim_create,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+ if persistent_volume_claim_create is not None:
+ _body_params = persistent_volume_claim_create
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+ # set the HTTP header `Content-Type`
+ if _content_type:
+ _header_params['Content-Type'] = _content_type
+ else:
+ _default_content_type = (
+ self.api_client.select_header_content_type(
+ [
+ 'application/json'
+ ]
+ )
+ )
+ if _default_content_type is not None:
+ _header_params['Content-Type'] = _default_content_type
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ 'bearerAuth'
+ ]
+
+ return self.api_client.param_serialize(
+ method='POST',
+ resource_path='/pvc',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_client.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_client.py
index 871503979..559185cec 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_client.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_client.py
@@ -1,1499 +1,781 @@
# coding: utf-8
+
"""
Volumes manager API
- CloudHarness Volumes manager API # noqa: E501
+ CloudHarness Volumes manager API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
-from dataclasses import dataclass
-from decimal import Decimal
-import enum
-import email
+import datetime
+from dateutil.parser import parse
+from enum import Enum
import json
+import mimetypes
import os
-import io
-import atexit
-from multiprocessing.pool import ThreadPool
import re
import tempfile
-import typing
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-from urllib.parse import urlparse, quote
-from urllib3.fields import RequestField as RequestFieldBase
-import frozendict
+from urllib.parse import quote
+from typing import Tuple, Optional, List, Dict, Union
+from pydantic import SecretStr
-from cloudharness_cli.volumemanager import rest
from cloudharness_cli.volumemanager.configuration import Configuration
-from cloudharness_cli.volumemanager.exceptions import ApiTypeError, ApiValueError
-from cloudharness_cli.volumemanager.schemas import (
- NoneClass,
- BoolClass,
- Schema,
- FileIO,
- BinarySchema,
- date,
- datetime,
- none_type,
- Unset,
- unset,
+from cloudharness_cli.volumemanager.api_response import ApiResponse, T as ApiResponseT
+import cloudharness_cli.volumemanager.models
+from cloudharness_cli.volumemanager import rest
+from cloudharness_cli.volumemanager.exceptions import (
+ ApiValueError,
+ ApiException,
+ BadRequestException,
+ UnauthorizedException,
+ ForbiddenException,
+ NotFoundException,
+ ServiceException
)
+RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]]
-class RequestField(RequestFieldBase):
- def __eq__(self, other):
- if not isinstance(other, RequestField):
- return False
- return self.__dict__ == other.__dict__
-
-
-class JSONEncoder(json.JSONEncoder):
- compact_separators = (',', ':')
-
- def default(self, obj):
- if isinstance(obj, str):
- return str(obj)
- elif isinstance(obj, float):
- return float(obj)
- elif isinstance(obj, int):
- return int(obj)
- elif isinstance(obj, Decimal):
- if obj.as_tuple().exponent >= 0:
- return int(obj)
- return float(obj)
- elif isinstance(obj, NoneClass):
- return None
- elif isinstance(obj, BoolClass):
- return bool(obj)
- elif isinstance(obj, (dict, frozendict.frozendict)):
- return {key: self.default(val) for key, val in obj.items()}
- elif isinstance(obj, (list, tuple)):
- return [self.default(item) for item in obj]
- raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
-
-
-class ParameterInType(enum.Enum):
- QUERY = 'query'
- HEADER = 'header'
- PATH = 'path'
- COOKIE = 'cookie'
-
-
-class ParameterStyle(enum.Enum):
- MATRIX = 'matrix'
- LABEL = 'label'
- FORM = 'form'
- SIMPLE = 'simple'
- SPACE_DELIMITED = 'spaceDelimited'
- PIPE_DELIMITED = 'pipeDelimited'
- DEEP_OBJECT = 'deepObject'
-
-
-class PrefixSeparatorIterator:
- # A class to store prefixes and separators for rfc6570 expansions
-
- def __init__(self, prefix: str, separator: str):
- self.prefix = prefix
- self.separator = separator
- self.first = True
- if separator in {'.', '|', '%20'}:
- item_separator = separator
- else:
- item_separator = ','
- self.item_separator = item_separator
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self.first:
- self.first = False
- return self.prefix
- return self.separator
-
-
-class ParameterSerializerBase:
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- return False
-
- @staticmethod
- def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
- """
- Get representation if str/float/int/None/items in list/ values in dict
- None is returned if an item is undefined, use cases are value=
- - None
- - []
- - {}
- - [None, None None]
- - {'a': None, 'b': None}
- """
- if type(in_data) in {str, float, int}:
- if percent_encode:
- return quote(str(in_data))
- return str(in_data)
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, list) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, dict) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- raise ApiValueError('Unable to generate a ref6570 item representation of {}'.format(in_data))
-
- @staticmethod
- def _to_dict(name: str, value: str):
- return {name: value}
-
- @classmethod
- def __ref6570_str_float_int_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_value = cls.__ref6570_item_value(in_data, percent_encode)
- if item_value is None or (item_value == '' and prefix_separator_iterator.separator == ';'):
- return next(prefix_separator_iterator) + var_name_piece
- value_pair_equals = '=' if named_parameter_expansion else ''
- return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+class ApiClient:
+ """Generic API client for OpenAPI client library builds.
- @classmethod
- def __ref6570_list_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
- item_values = [v for v in item_values if v is not None]
- if not item_values:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece +
- value_pair_equals +
- prefix_separator_iterator.item_separator.join(item_values)
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [var_name_piece + value_pair_equals + val for val in item_values]
- )
+ OpenAPI generic API client. This client handles the client-
+ server communication, and is invariant across implementations. Specifics of
+ the methods and models for each application are generated from the OpenAPI
+ templates.
- @classmethod
- def __ref6570_dict_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
- in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
- if not in_data_transformed:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece + value_pair_equals +
- prefix_separator_iterator.item_separator.join(
- prefix_separator_iterator.item_separator.join(
- item_pair
- ) for item_pair in in_data_transformed.items()
- )
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [key + '=' + val for key, val in in_data_transformed.items()]
- )
+ :param configuration: .Configuration object for this client
+ :param header_name: a header to pass when making calls to the API.
+ :param header_value: a header value to pass when making calls to
+ the API.
+ :param cookie: a cookie to include in the header when making calls
+ to the API
+ """
- @classmethod
- def _ref6570_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator
- ) -> str:
- """
- Separator is for separate variables like dict with explode true, not for array item separation
- """
- named_parameter_expansion = prefix_separator_iterator.separator in {'&', ';'}
- var_name_piece = variable_name if named_parameter_expansion else ''
- if type(in_data) in {str, float, int}:
- return cls.__ref6570_str_float_int_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- elif isinstance(in_data, list):
- return cls.__ref6570_list_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, dict):
- return cls.__ref6570_dict_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- # bool, bytes, etc
- raise ApiValueError('Unable to generate a ref6570 representation of {}'.format(in_data))
+ PRIMITIVE_TYPES = (float, bool, bytes, str, int)
+ NATIVE_TYPES_MAPPING = {
+ 'int': int,
+ 'long': int, # TODO remove as only py3 is supported?
+ 'float': float,
+ 'str': str,
+ 'bool': bool,
+ 'date': datetime.date,
+ 'datetime': datetime.datetime,
+ 'object': object,
+ }
+ _pool = None
+ def __init__(
+ self,
+ configuration=None,
+ header_name=None,
+ header_value=None,
+ cookie=None
+ ) -> None:
+ # use default configuration if none is provided
+ if configuration is None:
+ configuration = Configuration.get_default()
+ self.configuration = configuration
-class StyleFormSerializer(ParameterSerializerBase):
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- if style is ParameterStyle.FORM:
- return True
- return super()._get_default_explode(style)
+ self.rest_client = rest.RESTClientObject(configuration)
+ self.default_headers = {}
+ if header_name is not None:
+ self.default_headers[header_name] = header_value
+ self.cookie = cookie
+ # Set default User-Agent.
+ self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ self.client_side_validation = configuration.client_side_validation
- def _serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> str:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = PrefixSeparatorIterator('', '&')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
-class StyleSimpleSerializer(ParameterSerializerBase):
+ @property
+ def user_agent(self):
+ """User agent for this API client"""
+ return self.default_headers['User-Agent']
- def _serialize_simple(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool
- ) -> str:
- prefix_separator_iterator = PrefixSeparatorIterator('', ',')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ @user_agent.setter
+ def user_agent(self, value):
+ self.default_headers['User-Agent'] = value
+ def set_default_header(self, header_name, header_value):
+ self.default_headers[header_name] = header_value
-class JSONDetector:
- """
- Works for:
- application/json
- application/json; charset=UTF-8
- application/json-patch+json
- application/geo+json
- """
- __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
- @classmethod
- def _content_type_is_json(cls, content_type: str) -> bool:
- if cls.__json_content_type_pattern.match(content_type):
- return True
- return False
-
-
-@dataclass
-class ParameterBase(JSONDetector):
- name: str
- in_type: ParameterInType
- required: bool
- style: typing.Optional[ParameterStyle]
- explode: typing.Optional[bool]
- allow_reserved: typing.Optional[bool]
- schema: typing.Optional[typing.Type[Schema]]
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
-
- __style_to_in_type = {
- ParameterStyle.MATRIX: {ParameterInType.PATH},
- ParameterStyle.LABEL: {ParameterInType.PATH},
- ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
- ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
- ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
- }
- __in_type_to_default_style = {
- ParameterInType.QUERY: ParameterStyle.FORM,
- ParameterInType.PATH: ParameterStyle.SIMPLE,
- ParameterInType.HEADER: ParameterStyle.SIMPLE,
- ParameterInType.COOKIE: ParameterStyle.FORM,
- }
- __disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'}
- _json_encoder = JSONEncoder()
+ _default = None
@classmethod
- def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
- if style is None:
- return
- in_type_set = cls.__style_to_in_type[style]
- if in_type not in in_type_set:
- raise ValueError(
- 'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format(
- style, in_type_set
- )
- )
-
- def __init__(
- self,
- name: str,
- in_type: ParameterInType,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- if schema is None and content is None:
- raise ValueError('Value missing; Pass in either schema or content')
- if schema and content:
- raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input')
- if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
- raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names))
- self.__verify_style_to_in_type(style, in_type)
- if content is None and style is None:
- style = self.__in_type_to_default_style[in_type]
- if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
- raise ValueError('Invalid content length, content length must equal 1')
- self.in_type = in_type
- self.name = name
- self.required = required
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
- self.schema = schema
- self.content = content
-
- def _serialize_json(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- eliminate_whitespace: bool = False
- ) -> str:
- if eliminate_whitespace:
- return json.dumps(in_data, separators=self._json_encoder.compact_separators)
- return json.dumps(in_data)
+ def get_default(cls):
+ """Return new instance of ApiClient.
+ This method returns newly created, based on default constructor,
+ object of ApiClient class or returns a copy of default
+ ApiClient.
-class PathParameter(ParameterBase, StyleSimpleSerializer):
+ :return: The ApiClient object.
+ """
+ if cls._default is None:
+ cls._default = ApiClient()
+ return cls._default
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.PATH,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ @classmethod
+ def set_default(cls, default):
+ """Set default instance of ApiClient.
- def __serialize_label(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator('.', '.')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ It stores default ApiClient.
- def __serialize_matrix(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator(';', ';')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ :param default: object of ApiClient.
+ """
+ cls._default = default
- def __serialize_simple(
+ def param_serialize(
self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- ) -> typing.Dict[str, str]:
- value = self._serialize_simple(
- in_data=in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True
- )
- return self._to_dict(self.name, value)
+ method,
+ resource_path,
+ path_params=None,
+ query_params=None,
+ header_params=None,
+ body=None,
+ post_params=None,
+ files=None, auth_settings=None,
+ collection_formats=None,
+ _host=None,
+ _request_auth=None
+ ) -> RequestSerialized:
+
+ """Builds the HTTP request params needed by the request.
+ :param method: Method to call.
+ :param resource_path: Path to method endpoint.
+ :param path_params: Path parameters in the url.
+ :param query_params: Query parameters in the url.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings list: Auth Settings names for the request.
+ :param files dict: key -> filename, value -> filepath,
+ for `multipart/form-data`.
+ :param collection_formats: dict of collection formats for path, query,
+ header, and post parameters.
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the authentication
+ in the spec for a single request.
+ :return: tuple of form (path, http_method, query_params, header_params,
+ body, post_params, files)
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> path
- path:
- returns path_params: dict
- label -> path
- returns path_params
- matrix -> path
- returns path_params
- """
- if self.style:
- if self.style is ParameterStyle.SIMPLE:
- return self.__serialize_simple(cast_in_data)
- elif self.style is ParameterStyle.LABEL:
- return self.__serialize_label(cast_in_data)
- elif self.style is ParameterStyle.MATRIX:
- return self.__serialize_matrix(cast_in_data)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class QueryParameter(ParameterBase, StyleFormSerializer):
+ config = self.configuration
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.QUERY,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ # header parameters
+ header_params = header_params or {}
+ header_params.update(self.default_headers)
+ if self.cookie:
+ header_params['Cookie'] = self.cookie
+ if header_params:
+ header_params = self.sanitize_for_serialization(header_params)
+ header_params = dict(
+ self.parameters_to_tuples(header_params,collection_formats)
+ )
- def __serialize_space_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # path parameters
+ if path_params:
+ path_params = self.sanitize_for_serialization(path_params)
+ path_params = self.parameters_to_tuples(
+ path_params,
+ collection_formats
+ )
+ for k, v in path_params:
+ # specified safe chars, encode everything
+ resource_path = resource_path.replace(
+ '{%s}' % k,
+ quote(str(v), safe=config.safe_chars_for_path_param)
+ )
- def __serialize_pipe_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # post parameters
+ if post_params or files:
+ post_params = post_params if post_params else []
+ post_params = self.sanitize_for_serialization(post_params)
+ post_params = self.parameters_to_tuples(
+ post_params,
+ collection_formats
+ )
+ if files:
+ post_params.extend(self.files_parameters(files))
- def __serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._serialize_form(
- in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
+ # auth setting
+ self.update_params_for_auth(
+ header_params,
+ query_params,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=_request_auth
)
- return self._to_dict(self.name, value)
- def get_prefix_separator_iterator(self) -> typing.Optional[PrefixSeparatorIterator]:
- if self.style is ParameterStyle.FORM:
- return PrefixSeparatorIterator('?', '&')
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return PrefixSeparatorIterator('', '%20')
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return PrefixSeparatorIterator('', '|')
-
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> query
- query:
- - GET/HEAD/DELETE: could use fields
- - PUT/POST: must use urlencode to send parameters
- returns fields: tuple
- spaceDelimited -> query
- returns fields
- pipeDelimited -> query
- returns fields
- deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
- returns fields
- """
- if self.style:
- # TODO update query ones to omit setting values when [] {} or None is input
- if self.style is ParameterStyle.FORM:
- return self.__serialize_form(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
- # self.content will be length one
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
- return self._to_dict(
- self.name,
- next(prefix_separator_iterator) + self.name + '=' + quote(value)
- )
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+ # body
+ if body:
+ body = self.sanitize_for_serialization(body)
+ # request url
+ if _host is None or self.configuration.ignore_operation_servers:
+ url = self.configuration.host + resource_path
+ else:
+ # use server/host defined in path or operation instead
+ url = _host + resource_path
+
+ # query parameters
+ if query_params:
+ query_params = self.sanitize_for_serialization(query_params)
+ url_query = self.parameters_to_url_query(
+ query_params,
+ collection_formats
+ )
+ url += "?" + url_query
-class CookieParameter(ParameterBase, StyleFormSerializer):
+ return method, url, header_params, body, post_params
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None and content is None and schema else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.COOKIE,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> cookie
- returns fields: tuple
- """
- if self.style:
- """
- TODO add escaping of comma, space, equals
- or turn encoding on
- """
- value = self._serialize_form(
- cast_in_data,
- explode=self.explode,
- name=self.name,
- percent_encode=False,
- prefix_separator_iterator=PrefixSeparatorIterator('', '&')
- )
- return self._to_dict(self.name, value)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class HeaderParameter(ParameterBase, StyleSimpleSerializer):
- def __init__(
+ def call_api(
self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.HEADER,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
-
- @staticmethod
- def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
- data = tuple(t for t in in_data if t)
- headers = HTTPHeaderDict()
- if not data:
- return headers
- headers.extend(data)
- return headers
+ method,
+ url,
+ header_params=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ) -> rest.RESTResponse:
+ """Makes the HTTP request (synchronous)
+ :param method: Method to call.
+ :param url: Path to method endpoint.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param _request_timeout: timeout setting for this request.
+ :return: RESTResponse
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> HTTPHeaderDict:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> header
- headers: PoolManager needs a mapping, tuple is close
- returns headers: dict
- """
- if self.style:
- value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
- return self.__to_headers(((self.name, value),))
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self.__to_headers(((self.name, value),))
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class Encoding:
- def __init__(
- self,
- content_type: str,
- headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: bool = False,
- ):
- self.content_type = content_type
- self.headers = headers
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
-
-
-@dataclass
-class MediaType:
- """
- Used to store request and response body schema information
- encoding:
- A map between a property name and its encoding information.
- The key, being the property name, MUST exist in the schema as a property.
- The encoding object SHALL only apply to requestBody objects when the media type is
- multipart or application/x-www-form-urlencoded.
- """
- schema: typing.Optional[typing.Type[Schema]] = None
- encoding: typing.Optional[typing.Dict[str, Encoding]] = None
+ try:
+ # perform request and return response
+ response_data = self.rest_client.request(
+ method, url,
+ headers=header_params,
+ body=body, post_params=post_params,
+ _request_timeout=_request_timeout
+ )
+ except ApiException as e:
+ raise e
-@dataclass
-class ApiResponse:
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, Schema]
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
+ return response_data
- def __init__(
+ def response_deserialize(
self,
- response: urllib3.HTTPResponse,
- body: typing.Union[Unset, typing.Type[Schema]],
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
- ):
- """
- pycharm needs this to prevent 'Unexpected argument' warnings
+ response_data: rest.RESTResponse,
+ response_types_map: Optional[Dict[str, ApiResponseT]]=None
+ ) -> ApiResponse[ApiResponseT]:
+ """Deserializes response into an object.
+ :param response_data: RESTResponse object to be deserialized.
+ :param response_types_map: dict of response types.
+ :return: ApiResponse
"""
- self.response = response
- self.body = body
- self.headers = headers
+ msg = "RESTResponse.read() must be called before passing it to response_deserialize()"
+ assert response_data.data is not None, msg
-@dataclass
-class ApiResponseWithoutDeserialization(ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, typing.Type[Schema]] = unset
- headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
+ response_type = response_types_map.get(str(response_data.status), None)
+ if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599:
+ # if not found, look for '1XX', '2XX', etc.
+ response_type = response_types_map.get(str(response_data.status)[0] + "XX", None)
+ # deserialize response data
+ response_text = None
+ return_data = None
+ try:
+ if response_type == "bytearray":
+ return_data = response_data.data
+ elif response_type == "file":
+ return_data = self.__deserialize_file(response_data)
+ elif response_type is not None:
+ match = None
+ content_type = response_data.getheader('content-type')
+ if content_type is not None:
+ match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type)
+ encoding = match.group(1) if match else "utf-8"
+ response_text = response_data.data.decode(encoding)
+ return_data = self.deserialize(response_text, response_type, content_type)
+ finally:
+ if not 200 <= response_data.status <= 299:
+ raise ApiException.from_response(
+ http_resp=response_data,
+ body=response_text,
+ data=return_data,
+ )
-class OpenApiResponse(JSONDetector):
- __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
+ return ApiResponse(
+ status_code = response_data.status,
+ data = return_data,
+ headers = response_data.getheaders(),
+ raw_data = response_data.data
+ )
- def __init__(
- self,
- response_cls: typing.Type[ApiResponse] = ApiResponse,
- content: typing.Optional[typing.Dict[str, MediaType]] = None,
- headers: typing.Optional[typing.List[HeaderParameter]] = None,
- ):
- self.headers = headers
- if content is not None and len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
- self.response_cls = response_cls
-
- @staticmethod
- def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
- # python must be >= 3.9 so we can pass in bytes into json.loads
- return json.loads(response.data)
-
- @staticmethod
- def __file_name_from_response_url(response_url: typing.Optional[str]) -> typing.Optional[str]:
- if response_url is None:
- return None
- url_path = urlparse(response_url).path
- if url_path:
- path_basename = os.path.basename(url_path)
- if path_basename:
- _filename, ext = os.path.splitext(path_basename)
- if ext:
- return path_basename
- return None
+ def sanitize_for_serialization(self, obj):
+ """Builds a JSON POST object.
- @classmethod
- def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
- if content_disposition is None:
- return None
- match = cls.__filename_content_disposition_pattern.search(content_disposition)
- if not match:
- return None
- return match.group(1)
+ If obj is None, return None.
+ If obj is SecretStr, return obj.get_secret_value()
+ If obj is str, int, long, float, bool, return directly.
+ If obj is datetime.datetime, datetime.date
+ convert to string in iso8601 format.
+ If obj is list, sanitize each element in the list.
+ If obj is dict, return the dict.
+ If obj is OpenAPI model, return the properties dict.
- def __deserialize_application_octet_stream(
- self, response: urllib3.HTTPResponse
- ) -> typing.Union[bytes, io.BufferedReader]:
- """
- urllib3 use cases:
- 1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
- 2. when preload_content=False (stream=True) then supports_chunked_reads is True and
- a file will be written and returned
+ :param obj: The data to serialize.
+ :return: The serialized form of data.
"""
- if response.supports_chunked_reads():
- file_name = (
- self.__file_name_from_content_disposition(response.headers.get('content-disposition'))
- or self.__file_name_from_response_url(response.geturl())
+ if obj is None:
+ return None
+ elif isinstance(obj, Enum):
+ return obj.value
+ elif isinstance(obj, SecretStr):
+ return obj.get_secret_value()
+ elif isinstance(obj, self.PRIMITIVE_TYPES):
+ return obj
+ elif isinstance(obj, list):
+ return [
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
+ ]
+ elif isinstance(obj, tuple):
+ return tuple(
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
)
+ elif isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
- if file_name is None:
- _fd, path = tempfile.mkstemp()
- else:
- path = os.path.join(tempfile.gettempdir(), file_name)
-
- with open(path, 'wb') as new_file:
- chunk_size = 1024
- while True:
- data = response.read(chunk_size)
- if not data:
- break
- new_file.write(data)
- # release_conn is needed for streaming connections only
- response.release_conn()
- new_file = open(path, 'rb')
- return new_file
+ elif isinstance(obj, dict):
+ obj_dict = obj
else:
- return response.data
+ # Convert model obj to dict except
+ # attributes `openapi_types`, `attribute_map`
+ # and attributes which value is not None.
+ # Convert attribute name to json key in
+ # model definition for request.
+ if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')):
+ obj_dict = obj.to_dict()
+ else:
+ obj_dict = obj.__dict__
- @staticmethod
- def __deserialize_multipart_form_data(
- response: urllib3.HTTPResponse
- ) -> typing.Dict[str, typing.Any]:
- msg = email.message_from_bytes(response.data)
return {
- part.get_param("name", header="Content-Disposition"): part.get_payload(
- decode=True
- ).decode(part.get_content_charset())
- if part.get_content_charset()
- else part.get_payload()
- for part in msg.get_payload()
+ key: self.sanitize_for_serialization(val)
+ for key, val in obj_dict.items()
}
- def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
- content_type = response.getheader('content-type')
- deserialized_body = unset
- streamed = response.supports_chunked_reads()
-
- deserialized_headers = unset
- if self.headers is not None:
- # TODO add header deserialiation here
- pass
-
- if self.content is not None:
- if content_type not in self.content:
- raise ApiValueError(
- f"Invalid content_type returned. Content_type='{content_type}' was returned "
- f"when only {str(set(self.content))} are defined for status_code={str(response.status)}"
- )
- body_schema = self.content[content_type].schema
- if body_schema is None:
- # some specs do not define response content media type schemas
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=unset
- )
+ def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]):
+ """Deserializes response into an object.
+
+ :param response: RESTResponse object to be deserialized.
+ :param response_type: class literal for
+ deserialized object, or string of class name.
+ :param content_type: content type of response.
+
+ :return: deserialized object.
+ """
- if self._content_type_is_json(content_type):
- body_data = self.__deserialize_json(response)
- elif content_type == 'application/octet-stream':
- body_data = self.__deserialize_application_octet_stream(response)
- elif content_type.startswith('multipart/form-data'):
- body_data = self.__deserialize_multipart_form_data(response)
- content_type = 'multipart/form-data'
+ # fetch data from response object
+ if content_type is None:
+ try:
+ data = json.loads(response_text)
+ except ValueError:
+ data = response_text
+ elif content_type.startswith("application/json"):
+ if response_text == "":
+ data = ""
else:
- raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type))
- deserialized_body = body_schema.from_openapi_data_oapg(
- body_data, _configuration=configuration)
- elif streamed:
- response.release_conn()
-
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=deserialized_body
- )
+ data = json.loads(response_text)
+ elif content_type.startswith("text/plain"):
+ data = response_text
+ else:
+ raise ApiException(
+ status=0,
+ reason="Unsupported content type: {0}".format(content_type)
+ )
+ return self.__deserialize(data, response_type)
-class ApiClient:
- """Generic API client for OpenAPI client library builds.
+ def __deserialize(self, data, klass):
+ """Deserializes dict, list, str into an object.
- OpenAPI generic API client. This client handles the client-
- server communication, and is invariant across implementations. Specifics of
- the methods and models for each application are generated from the OpenAPI
- templates.
+ :param data: dict, list or str.
+ :param klass: class literal, or string of class name.
- NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
+ :return: object.
+ """
+ if data is None:
+ return None
- :param configuration: .Configuration object for this client
- :param header_name: a header to pass when making calls to the API.
- :param header_value: a header value to pass when making calls to
- the API.
- :param cookie: a cookie to include in the header when making calls
- to the API
- :param pool_threads: The number of threads to use for async requests
- to the API. More threads means more concurrent API requests.
- """
+ if isinstance(klass, str):
+ if klass.startswith('List['):
+ m = re.match(r'List\[(.*)]', klass)
+ assert m is not None, "Malformed List type definition"
+ sub_kls = m.group(1)
+ return [self.__deserialize(sub_data, sub_kls)
+ for sub_data in data]
+
+ if klass.startswith('Dict['):
+ m = re.match(r'Dict\[([^,]*), (.*)]', klass)
+ assert m is not None, "Malformed Dict type definition"
+ sub_kls = m.group(2)
+ return {k: self.__deserialize(v, sub_kls)
+ for k, v in data.items()}
+
+ # convert str to class
+ if klass in self.NATIVE_TYPES_MAPPING:
+ klass = self.NATIVE_TYPES_MAPPING[klass]
+ else:
+ klass = getattr(cloudharness_cli.volumemanager.models, klass)
+
+ if klass in self.PRIMITIVE_TYPES:
+ return self.__deserialize_primitive(data, klass)
+ elif klass == object:
+ return self.__deserialize_object(data)
+ elif klass == datetime.date:
+ return self.__deserialize_date(data)
+ elif klass == datetime.datetime:
+ return self.__deserialize_datetime(data)
+ elif issubclass(klass, Enum):
+ return self.__deserialize_enum(data, klass)
+ else:
+ return self.__deserialize_model(data, klass)
- _pool = None
+ def parameters_to_tuples(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __init__(
- self,
- configuration: typing.Optional[Configuration] = None,
- header_name: typing.Optional[str] = None,
- header_value: typing.Optional[str] = None,
- cookie: typing.Optional[str] = None,
- pool_threads: int = 1
- ):
- if configuration is None:
- configuration = Configuration()
- self.configuration = configuration
- self.pool_threads = pool_threads
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: Parameters as list of tuples, collections formatted
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, value) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(str(value) for value in v)))
+ else:
+ new_params.append((k, v))
+ return new_params
- self.rest_client = rest.RESTClientObject(configuration)
- self.default_headers = HTTPHeaderDict()
- if header_name is not None:
- self.default_headers[header_name] = header_value
- self.cookie = cookie
- # Set default User-Agent.
- self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ def parameters_to_url_query(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __enter__(self):
- return self
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: URL query string (e.g. a=Hello%20World&b=123)
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if isinstance(v, bool):
+ v = str(v).lower()
+ if isinstance(v, (int, float)):
+ v = str(v)
+ if isinstance(v, dict):
+ v = json.dumps(v)
+
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, str(value)) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(quote(str(value)) for value in v))
+ )
+ else:
+ new_params.append((k, quote(str(v))))
- def __exit__(self, exc_type, exc_value, traceback):
- self.close()
+ return "&".join(["=".join(map(str, item)) for item in new_params])
- def close(self):
- if self._pool:
- self._pool.close()
- self._pool.join()
- self._pool = None
- if hasattr(atexit, 'unregister'):
- atexit.unregister(self.close)
+ def files_parameters(self, files: Dict[str, Union[str, bytes]]):
+ """Builds form parameters.
- @property
- def pool(self):
- """Create thread pool on first request
- avoids instantiating unused threadpool for blocking clients.
+ :param files: File parameters.
+ :return: Form parameters with files.
"""
- if self._pool is None:
- atexit.register(self.close)
- self._pool = ThreadPool(self.pool_threads)
- return self._pool
-
- @property
- def user_agent(self):
- """User agent for this API client"""
- return self.default_headers['User-Agent']
+ params = []
+ for k, v in files.items():
+ if isinstance(v, str):
+ with open(v, 'rb') as f:
+ filename = os.path.basename(f.name)
+ filedata = f.read()
+ elif isinstance(v, bytes):
+ filename = k
+ filedata = v
+ else:
+ raise ValueError("Unsupported file value")
+ mimetype = (
+ mimetypes.guess_type(filename)[0]
+ or 'application/octet-stream'
+ )
+ params.append(
+ tuple([k, tuple([filename, filedata, mimetype])])
+ )
+ return params
- @user_agent.setter
- def user_agent(self, value):
- self.default_headers['User-Agent'] = value
+ def select_header_accept(self, accepts: List[str]) -> Optional[str]:
+ """Returns `Accept` based on an array of accepts provided.
- def set_default_header(self, header_name, header_value):
- self.default_headers[header_name] = header_value
+ :param accepts: List of headers.
+ :return: Accept (e.g. application/json).
+ """
+ if not accepts:
+ return None
- def __call_api(
- self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
+ for accept in accepts:
+ if re.search('json', accept, re.IGNORECASE):
+ return accept
- # header parameters
- used_headers = HTTPHeaderDict(self.default_headers)
- if self.cookie:
- headers['Cookie'] = self.cookie
+ return accepts[0]
- # auth setting
- self.update_params_for_auth(used_headers,
- auth_settings, resource_path, method, body)
+ def select_header_content_type(self, content_types):
+ """Returns `Content-Type` based on an array of content_types provided.
- # must happen after cookie setting and auth setting in case user is overriding those
- if headers:
- used_headers.update(headers)
+ :param content_types: List of content-types.
+ :return: Content-Type (e.g. application/json).
+ """
+ if not content_types:
+ return None
- # request url
- if host is None:
- url = self.configuration.host + resource_path
- else:
- # use server/host defined in path or operation instead
- url = host + resource_path
+ for content_type in content_types:
+ if re.search('json', content_type, re.IGNORECASE):
+ return content_type
- # perform request and return response
- response = self.request(
- method,
- url,
- headers=used_headers,
- fields=fields,
- body=body,
- stream=stream,
- timeout=timeout,
- )
- return response
+ return content_types[0]
- def call_api(
+ def update_params_for_auth(
self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- async_req: typing.Optional[bool] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request (synchronous) and returns deserialized data.
-
- To make an async_req request, set the async_req parameter.
+ headers,
+ queries,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=None
+ ) -> None:
+ """Updates header and query params based on authentication setting.
- :param resource_path: Path to method endpoint.
- :param method: Method to call.
- :param headers: Header parameters to be
- placed in the request header.
- :param body: Request body.
- :param fields: Request post form parameters,
- for `application/x-www-form-urlencoded`, `multipart/form-data`.
- :param auth_settings: Auth Settings names for the request.
- :param async_req: execute request asynchronously
- :type async_req: bool, optional TODO remove, unused
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Also when True, if the openapi spec describes a file download,
- the data will be written to a local filesystme file and the BinarySchema
- instance will also inherit from FileSchema and FileIO
- Default is False.
- :type stream: bool, optional
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
- :param host: api endpoint host
- :return:
- If async_req parameter is True,
- the request will be called asynchronously.
- The method will return the request thread.
- If parameter async_req is False or missing,
- then the method will return the response directly.
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :param auth_settings: Authentication setting identifiers list.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param request_auth: if set, the provided settings will
+ override the token in the configuration.
"""
+ if not auth_settings:
+ return
- if not async_req:
- return self.__call_api(
- resource_path,
- method,
+ if request_auth:
+ self._apply_auth_params(
headers,
- body,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
- )
-
- return self.pool.apply_async(
- self.__call_api,
- (
+ queries,
resource_path,
method,
- headers,
body,
- json,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
+ request_auth
)
- )
-
- def request(
+ else:
+ for auth in auth_settings:
+ auth_setting = self.configuration.auth_settings().get(auth)
+ if auth_setting:
+ self._apply_auth_params(
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ )
+
+ def _apply_auth_params(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request using RESTClient."""
- if method == "GET":
- return self.rest_client.GET(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "HEAD":
- return self.rest_client.HEAD(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "OPTIONS":
- return self.rest_client.OPTIONS(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "POST":
- return self.rest_client.POST(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PUT":
- return self.rest_client.PUT(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PATCH":
- return self.rest_client.PATCH(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "DELETE":
- return self.rest_client.DELETE(url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body)
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ ) -> None:
+ """Updates the request parameters based on a single auth_setting
+
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param auth_setting: auth settings for the endpoint
+ """
+ if auth_setting['in'] == 'cookie':
+ headers['Cookie'] = auth_setting['value']
+ elif auth_setting['in'] == 'header':
+ if auth_setting['type'] != 'http-signature':
+ headers[auth_setting['key']] = auth_setting['value']
+ elif auth_setting['in'] == 'query':
+ queries.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
- "http method must be `GET`, `HEAD`, `OPTIONS`,"
- " `POST`, `PATCH`, `PUT` or `DELETE`."
+ 'Authentication token must be in `query` or `header`'
)
- def update_params_for_auth(self, headers, auth_settings,
- resource_path, method, body):
- """Updates header and query params based on authentication setting.
+ def __deserialize_file(self, response):
+ """Deserializes body to file
- :param headers: Header parameters dict to be updated.
- :param auth_settings: Authentication setting identifiers list.
- :param resource_path: A string representation of the HTTP request resource path.
- :param method: A string representation of the HTTP request method.
- :param body: A object representing the body of the HTTP request.
- The object type is the return value of _encoder.default().
- """
- if not auth_settings:
- return
+ Saves response body into a file in a temporary folder,
+ using the filename from the `Content-Disposition` header if provided.
- for auth in auth_settings:
- auth_setting = self.configuration.auth_settings().get(auth)
- if not auth_setting:
- continue
- if auth_setting['in'] == 'cookie':
- headers.add('Cookie', auth_setting['value'])
- elif auth_setting['in'] == 'header':
- if auth_setting['type'] != 'http-signature':
- headers.add(auth_setting['key'], auth_setting['value'])
- elif auth_setting['in'] == 'query':
- """ TODO implement auth in query
- need to pass in prefix_separator_iterator
- and need to output resource_path with query params added
- """
- raise ApiValueError("Auth in query not yet implemented")
- else:
- raise ApiValueError(
- 'Authentication token must be in `query` or `header`'
- )
+ handle file downloading
+ save response body into a tmp file and return the instance
+ :param response: RESTResponse.
+ :return: file path.
+ """
+ fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
+ os.close(fd)
+ os.remove(path)
+
+ content_disposition = response.getheader("Content-Disposition")
+ if content_disposition:
+ m = re.search(
+ r'filename=[\'"]?([^\'"\s]+)[\'"]?',
+ content_disposition
+ )
+ assert m is not None, "Unexpected 'content-disposition' header value"
+ filename = m.group(1)
+ path = os.path.join(os.path.dirname(path), filename)
-class Api:
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
+ with open(path, "wb") as f:
+ f.write(response.data)
- Do not edit the class manually.
- """
+ return path
+
+ def __deserialize_primitive(self, data, klass):
+ """Deserializes string to primitive type.
- def __init__(self, api_client: typing.Optional[ApiClient] = None):
- if api_client is None:
- api_client = ApiClient()
- self.api_client = api_client
+ :param data: str.
+ :param klass: class literal.
- @staticmethod
- def _verify_typed_dict_inputs_oapg(cls: typing.Type[typing_extensions.TypedDict], data: typing.Dict[str, typing.Any]):
+ :return: int, long, float, str, bool.
"""
- Ensures that:
- - required keys are present
- - additional properties are not input
- - value stored under required keys do not have the value unset
- Note: detailed value checking is done in schema classes
+ try:
+ return klass(data)
+ except UnicodeEncodeError:
+ return str(data)
+ except TypeError:
+ return data
+
+ def __deserialize_object(self, value):
+ """Return an original value.
+
+ :return: object.
"""
- missing_required_keys = []
- required_keys_with_unset_values = []
- for required_key in cls.__required_keys__:
- if required_key not in data:
- missing_required_keys.append(required_key)
- continue
- value = data[required_key]
- if value is unset:
- required_keys_with_unset_values.append(required_key)
- if missing_required_keys:
- raise ApiTypeError(
- '{} missing {} required arguments: {}'.format(
- cls.__name__, len(missing_required_keys), missing_required_keys
- )
- )
- if required_keys_with_unset_values:
- raise ApiValueError(
- '{} contains invalid unset values for {} required keys: {}'.format(
- cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values
- )
- )
+ return value
- disallowed_additional_keys = []
- for key in data:
- if key in cls.__required_keys__ or key in cls.__optional_keys__:
- continue
- disallowed_additional_keys.append(key)
- if disallowed_additional_keys:
- raise ApiTypeError(
- '{} got {} unexpected keyword arguments: {}'.format(
- cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys
- )
- )
+ def __deserialize_date(self, string):
+ """Deserializes string to date.
- def _get_host_oapg(
- self,
- operation_id: str,
- servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
- host_index: typing.Optional[int] = None
- ) -> typing.Optional[str]:
- configuration = self.api_client.configuration
+ :param string: str.
+ :return: date.
+ """
try:
- if host_index is None:
- index = configuration.server_operation_index.get(
- operation_id, configuration.server_index
- )
- else:
- index = host_index
- server_variables = configuration.server_operation_variables.get(
- operation_id, configuration.server_variables
- )
- host = configuration.get_host_from_settings(
- index, variables=server_variables, servers=servers
+ return parse(string).date()
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason="Failed to parse `{0}` as date object".format(string)
)
- except IndexError:
- if servers:
- raise ApiValueError(
- "Invalid host index. Must be 0 <= index < %s" %
- len(servers)
- )
- host = None
- return host
-
-
-class SerializedRequestBody(typing_extensions.TypedDict, total=False):
- body: typing.Union[str, bytes]
- fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
+ def __deserialize_datetime(self, string):
+ """Deserializes string to datetime.
-class RequestBody(StyleFormSerializer, JSONDetector):
- """
- A request body parameter
- content: content_type to MediaType Schema info
- """
- __json_encoder = JSONEncoder()
+ The string should be in iso8601 datetime format.
- def __init__(
- self,
- content: typing.Dict[str, MediaType],
- required: bool = False,
- ):
- self.required = required
- if len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
-
- def __serialize_json(
- self,
- in_data: typing.Any
- ) -> typing.Dict[str, bytes]:
- in_data = self.__json_encoder.default(in_data)
- json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode(
- "utf-8"
- )
- return dict(body=json_str)
-
- @staticmethod
- def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
- if isinstance(in_data, frozendict.frozendict):
- raise ValueError('Unable to serialize type frozendict.frozendict to text/plain')
- elif isinstance(in_data, tuple):
- raise ValueError('Unable to serialize type tuple to text/plain')
- elif isinstance(in_data, NoneClass):
- raise ValueError('Unable to serialize type NoneClass to text/plain')
- elif isinstance(in_data, BoolClass):
- raise ValueError('Unable to serialize type BoolClass to text/plain')
- return dict(body=str(in_data))
-
- def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
- json_value = self.__json_encoder.default(value)
- return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'})
-
- def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
- if isinstance(value, str):
- return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'})
- elif isinstance(value, bytes):
- return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'})
- elif isinstance(value, FileIO):
- request_field = RequestField(
- name=key,
- data=value.read(),
- filename=os.path.basename(value.name),
- headers={'Content-Type': 'application/octet-stream'}
+ :param string: str.
+ :return: datetime.
+ """
+ try:
+ return parse(string)
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as datetime object"
+ .format(string)
+ )
)
- value.close()
- return request_field
- else:
- return self.__multipart_json_item(key=key, value=value)
- def __serialize_multipart_form_data(
- self, in_data: Schema
- ) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data')
+ def __deserialize_enum(self, data, klass):
+ """Deserializes primitive type to enum.
+
+ :param data: primitive type.
+ :param klass: class literal.
+ :return: enum value.
"""
- In a multipart/form-data request body, each schema property, or each element of a schema array property,
- takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
- for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
+ try:
+ return klass(data)
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as `{1}`"
+ .format(data, klass)
+ )
+ )
- When passing in multipart types, boundaries MAY be used to separate sections of the content being
- transferred – thus, the following default Content-Types are defined for multipart:
+ def __deserialize_model(self, data, klass):
+ """Deserializes list or dict to model.
- If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
- If the property is complex, or an array of complex values, the default Content-Type is application/json
- Question: how is the array of primitives encoded?
- If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
- """
- fields = []
- for key, value in in_data.items():
- if isinstance(value, tuple):
- if value:
- # values use explode = True, so the code makes a RequestField for each item with name=key
- for item in value:
- request_field = self.__multipart_form_item(key=key, value=item)
- fields.append(request_field)
- else:
- # send an empty array as json because exploding will not send it
- request_field = self.__multipart_json_item(key=key, value=value)
- fields.append(request_field)
- else:
- request_field = self.__multipart_form_item(key=key, value=value)
- fields.append(request_field)
-
- return dict(fields=tuple(fields))
-
- def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
- if isinstance(in_data, bytes):
- return dict(body=in_data)
- # FileIO type
- result = dict(body=in_data.read())
- in_data.close()
- return result
-
- def __serialize_application_x_www_form_data(
- self, in_data: typing.Any
- ) -> SerializedRequestBody:
+ :param data: dict, list.
+ :param klass: class literal.
+ :return: model object.
"""
- POST submission of form data in body
- """
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(
- f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data')
- cast_in_data = self.__json_encoder.default(in_data)
- value = self._serialize_form(cast_in_data, name='', explode=True, percent_encode=True)
- return dict(body=value)
-
- def serialize(
- self, in_data: typing.Any, content_type: str
- ) -> SerializedRequestBody:
- """
- If a str is returned then the result will be assigned to data when making the request
- If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
- Return a tuple of
- The key of the return dict is
- - body for application/json
- - encode_multipart and fields for multipart/form-data
- """
- media_type = self.content[content_type]
- if isinstance(in_data, media_type.schema):
- cast_in_data = in_data
- elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
- cast_in_data = media_type.schema(**in_data)
- else:
- cast_in_data = media_type.schema(in_data)
- # TODO check for and use encoding if it exists
- # and content_type is multipart or application/x-www-form-urlencoded
- if self._content_type_is_json(content_type):
- return self.__serialize_json(cast_in_data)
- elif content_type == 'text/plain':
- return self.__serialize_text_plain(cast_in_data)
- elif content_type == 'multipart/form-data':
- return self.__serialize_multipart_form_data(cast_in_data)
- elif content_type == 'application/x-www-form-urlencoded':
- return self.__serialize_application_x_www_form_data(cast_in_data)
- elif content_type == 'application/octet-stream':
- return self.__serialize_application_octet_stream(cast_in_data)
- raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type))
+ return klass.from_dict(data)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_response.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_response.py
new file mode 100644
index 000000000..9bc7c11f6
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/api_response.py
@@ -0,0 +1,21 @@
+"""API response object."""
+
+from __future__ import annotations
+from typing import Optional, Generic, Mapping, TypeVar
+from pydantic import Field, StrictInt, StrictBytes, BaseModel
+
+T = TypeVar("T")
+
+class ApiResponse(BaseModel, Generic[T]):
+ """
+ API response object
+ """
+
+ status_code: StrictInt = Field(description="HTTP status code")
+ headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers")
+ data: T = Field(description="Deserialized data given the data type")
+ raw_data: StrictBytes = Field(description="Raw data (HTTP response body)")
+
+ model_config = {
+ "arbitrary_types_allowed": True
+ }
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/__init__.py
deleted file mode 100644
index 7840f7726..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints then import them from
-# tags, paths, or path_to_api, or tag_to_api
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/path_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/path_to_api.py
deleted file mode 100644
index 6d420fcbe..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/path_to_api.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.volumemanager.paths import PathValues
-from cloudharness_cli.volumemanager.apis.paths.pvc import Pvc
-from cloudharness_cli.volumemanager.apis.paths.pvc_name import PvcName
-
-PathToApi = typing_extensions.TypedDict(
- 'PathToApi',
- {
- PathValues.PVC: Pvc,
- PathValues.PVC_NAME: PvcName,
- }
-)
-
-path_to_api = PathToApi(
- {
- PathValues.PVC: Pvc,
- PathValues.PVC_NAME: PvcName,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/__init__.py
deleted file mode 100644
index 5fa427f9e..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.volumemanager.apis.path_to_api import path_to_api
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc.py
deleted file mode 100644
index 587cdcfa6..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.volumemanager.paths.pvc.post import ApiForpost
-
-
-class Pvc(
- ApiForpost,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc_name.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc_name.py
deleted file mode 100644
index 623732eff..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/paths/pvc_name.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.volumemanager.paths.pvc_name.get import ApiForget
-
-
-class PvcName(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tag_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tag_to_api.py
deleted file mode 100644
index a665128d9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tag_to_api.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.volumemanager.apis.tags import TagValues
-from cloudharness_cli.volumemanager.apis.tags.rest_api import RestApi
-
-TagToApi = typing_extensions.TypedDict(
- 'TagToApi',
- {
- TagValues.REST: RestApi,
- }
-)
-
-tag_to_api = TagToApi(
- {
- TagValues.REST: RestApi,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/__init__.py
deleted file mode 100644
index d0e842928..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.volumemanager.apis.tag_to_api import tag_to_api
-
-import enum
-
-
-class TagValues(str, enum.Enum):
- REST = "rest"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/rest_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/rest_api.py
deleted file mode 100644
index af13c644f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/apis/tags/rest_api.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.volumemanager.paths.pvc_name.get import PvcNameGet
-from cloudharness_cli.volumemanager.paths.pvc.post import PvcPost
-
-
-class RestApi(
- PvcNameGet,
- PvcPost,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/configuration.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/configuration.py
index deaf2899b..ad9dffaf7 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/configuration.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/configuration.py
@@ -3,69 +3,48 @@
"""
Volumes manager API
- CloudHarness Volumes manager API # noqa: E501
+ CloudHarness Volumes manager API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
import copy
import logging
+from logging import FileHandler
import multiprocessing
import sys
+from typing import Optional
import urllib3
-from http import client as http_client
-from cloudharness_cli.volumemanager.exceptions import ApiValueError
-
+import http.client as httplib
JSON_SCHEMA_VALIDATION_KEYWORDS = {
'multipleOf', 'maximum', 'exclusiveMaximum',
'minimum', 'exclusiveMinimum', 'maxLength',
- 'minLength', 'pattern', 'maxItems', 'minItems',
- 'uniqueItems', 'maxProperties', 'minProperties',
+ 'minLength', 'pattern', 'maxItems', 'minItems'
}
-class Configuration(object):
- """NOTE: This class is auto generated by OpenAPI Generator
+class Configuration:
+ """This class contains various settings of the API client.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
-
- :param host: Base url
+ :param host: Base url.
+ :param ignore_operation_servers
+ Boolean to ignore operation servers for the API client.
+ Config will use `host` as the base url regardless of the operation servers.
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
- :param api_key_prefix: Dict to store API prefix (e.g. Bearer)
+ :param api_key_prefix: Dict to store API prefix (e.g. Bearer).
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
- :param username: Username for HTTP basic authentication
- :param password: Password for HTTP basic authentication
- :param discard_unknown_keys: Boolean value indicating whether to discard
- unknown properties. A server may send a response that includes additional
- properties that are not known by the client in the following scenarios:
- 1. The OpenAPI document is incomplete, i.e. it does not match the server
- implementation.
- 2. The client was generated using an older version of the OpenAPI document
- and the server has been upgraded since then.
- If a schema in the OpenAPI document defines the additionalProperties attribute,
- then all undeclared properties received by the server are injected into the
- additional properties map. In that case, there are undeclared properties, and
- nothing to discard.
- :param disabled_client_side_validations (string): Comma-separated list of
- JSON schema validation keywords to disable JSON schema structural validation
- rules. The following keywords may be specified: multipleOf, maximum,
- exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
- maxItems, minItems.
- By default, the validation is performed for data generated locally by the client
- and data received from the server, independent of any validation performed by
- the server side. If the input data does not satisfy the JSON schema validation
- rules specified in the OpenAPI document, an exception is raised.
- If disabled_client_side_validations is set, structural validation is
- disabled. This can be useful to troubleshoot data validation problem, such as
- when the OpenAPI document validation rules do not match the actual API data
- received by the server.
+ :param username: Username for HTTP basic authentication.
+ :param password: Password for HTTP basic authentication.
+ :param access_token: Access token.
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
@@ -74,7 +53,11 @@ class Configuration(object):
configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
- The validation of enums is performed for variables with defined enum values before.
+ The validation of enums is performed for variables with defined enum
+ values before.
+ :param ssl_ca_cert: str - the path to a file of concatenated CA certificates
+ in PEM format.
+ :param retries: Number of retries for API requests.
:Example:
"""
@@ -84,11 +67,15 @@ class Configuration(object):
def __init__(self, host=None,
api_key=None, api_key_prefix=None,
username=None, password=None,
- discard_unknown_keys=False,
- disabled_client_side_validations="",
+ access_token=None,
server_index=None, server_variables=None,
server_operation_index=None, server_operation_variables=None,
- ):
+ ignore_operation_servers=False,
+ ssl_ca_cert=None,
+ retries=None,
+ *,
+ debug: Optional[bool] = None
+ ) -> None:
"""Constructor
"""
self._base_path = "/api" if host is None else host
@@ -102,6 +89,9 @@ def __init__(self, host=None,
self.server_operation_variables = server_operation_variables or {}
"""Default server variables
"""
+ self.ignore_operation_servers = ignore_operation_servers
+ """Ignore operation servers
+ """
self.temp_folder_path = None
"""Temp file folder for downloading files
"""
@@ -125,8 +115,9 @@ def __init__(self, host=None,
self.password = password
"""Password for HTTP basic authentication
"""
- self.discard_unknown_keys = discard_unknown_keys
- self.disabled_client_side_validations = disabled_client_side_validations
+ self.access_token = access_token
+ """Access token
+ """
self.logger = {}
"""Logging Settings
"""
@@ -138,13 +129,16 @@ def __init__(self, host=None,
self.logger_stream_handler = None
"""Log stream handler
"""
- self.logger_file_handler = None
+ self.logger_file_handler: Optional[FileHandler] = None
"""Log file handler
"""
self.logger_file = None
"""Debug file location
"""
- self.debug = False
+ if debug is not None:
+ self.debug = debug
+ else:
+ self.__debug = False
"""Debug switch
"""
@@ -153,7 +147,7 @@ def __init__(self, host=None,
Set this to false to skip verifying SSL certificate when calling API
from https server.
"""
- self.ssl_ca_cert = None
+ self.ssl_ca_cert = ssl_ca_cert
"""Set this to customize the certificate file to verify the peer.
"""
self.cert_file = None
@@ -165,6 +159,10 @@ def __init__(self, host=None,
self.assert_hostname = None
"""Set this to True/False to enable/disable SSL hostname verification.
"""
+ self.tls_server_name = None
+ """SSL/TLS Server Name Indication (SNI)
+ Set this to the SNI value expected by the server.
+ """
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
"""urllib3 connection pool's maximum number of connections saved
@@ -174,7 +172,7 @@ def __init__(self, host=None,
cpu_count * 5 is used as default value to increase performance.
"""
- self.proxy = None
+ self.proxy: Optional[str] = None
"""Proxy URL
"""
self.proxy_headers = None
@@ -183,14 +181,23 @@ def __init__(self, host=None,
self.safe_chars_for_path_param = ''
"""Safe chars for path_param
"""
- self.retries = None
+ self.retries = retries
"""Adding retries to override urllib3 default value 3
"""
# Enable client side validation
self.client_side_validation = True
- # Options to pass down to the underlying urllib3 socket
self.socket_options = None
+ """Options to pass down to the underlying urllib3 socket
+ """
+
+ self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z"
+ """datetime format
+ """
+
+ self.date_format = "%Y-%m-%d"
+ """date format
+ """
def __deepcopy__(self, memo):
cls = self.__class__
@@ -208,13 +215,6 @@ def __deepcopy__(self, memo):
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
- if name == 'disabled_client_side_validations':
- s = set(filter(None, value.split(',')))
- for v in s:
- if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
- raise ApiValueError(
- "Invalid keyword: '{0}''".format(v))
- self._disabled_client_side_validations = s
@classmethod
def set_default(cls, default):
@@ -225,21 +225,31 @@ def set_default(cls, default):
:param default: object of Configuration
"""
- cls._default = copy.deepcopy(default)
+ cls._default = default
@classmethod
def get_default_copy(cls):
- """Return new instance of configuration.
+ """Deprecated. Please use `get_default` instead.
+
+ Deprecated. Please use `get_default` instead.
+
+ :return: The configuration object.
+ """
+ return cls.get_default()
+
+ @classmethod
+ def get_default(cls):
+ """Return the default configuration.
This method returns newly created, based on default constructor,
object of Configuration class or returns a copy of default
- configuration passed by the set_default method.
+ configuration.
:return: The configuration object.
"""
- if cls._default is not None:
- return copy.deepcopy(cls._default)
- return Configuration()
+ if cls._default is None:
+ cls._default = Configuration()
+ return cls._default
@property
def logger_file(self):
@@ -293,15 +303,15 @@ def debug(self, value):
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
- # turn on http_client debug
- http_client.HTTPConnection.debuglevel = 1
+ # turn on httplib debug
+ httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
- # turn off http_client debug
- http_client.HTTPConnection.debuglevel = 0
+ # turn off httplib debug
+ httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/exceptions.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/exceptions.py
index aef8e589c..fd6f058c1 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/exceptions.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/exceptions.py
@@ -3,12 +3,16 @@
"""
Volumes manager API
- CloudHarness Volumes manager API # noqa: E501
+ CloudHarness Volumes manager API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+from typing import Any, Optional
+from typing_extensions import Self
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
@@ -16,7 +20,7 @@ class OpenApiException(Exception):
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
- key_type=None):
+ key_type=None) -> None:
""" Raises an exception for TypeErrors
Args:
@@ -44,7 +48,7 @@ def __init__(self, msg, path_to_item=None, valid_classes=None,
class ApiValueError(OpenApiException, ValueError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -62,7 +66,7 @@ def __init__(self, msg, path_to_item=None):
class ApiAttributeError(OpenApiException, AttributeError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Raised when an attribute reference or assignment fails.
@@ -81,7 +85,7 @@ def __init__(self, msg, path_to_item=None):
class ApiKeyError(OpenApiException, KeyError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -99,17 +103,56 @@ def __init__(self, msg, path_to_item=None):
class ApiException(OpenApiException):
- def __init__(self, status=None, reason=None, api_response: 'cloudharness_cli.volumemanager.api_client.ApiResponse' = None):
- if api_response:
- self.status = api_response.response.status
- self.reason = api_response.response.reason
- self.body = api_response.response.data
- self.headers = api_response.response.getheaders()
- else:
- self.status = status
- self.reason = reason
- self.body = None
- self.headers = None
+ def __init__(
+ self,
+ status=None,
+ reason=None,
+ http_resp=None,
+ *,
+ body: Optional[str] = None,
+ data: Optional[Any] = None,
+ ) -> None:
+ self.status = status
+ self.reason = reason
+ self.body = body
+ self.data = data
+ self.headers = None
+
+ if http_resp:
+ if self.status is None:
+ self.status = http_resp.status
+ if self.reason is None:
+ self.reason = http_resp.reason
+ if self.body is None:
+ try:
+ self.body = http_resp.data.decode('utf-8')
+ except Exception:
+ pass
+ self.headers = http_resp.getheaders()
+
+ @classmethod
+ def from_response(
+ cls,
+ *,
+ http_resp,
+ body: Optional[str],
+ data: Optional[Any],
+ ) -> Self:
+ if http_resp.status == 400:
+ raise BadRequestException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 401:
+ raise UnauthorizedException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 403:
+ raise ForbiddenException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 404:
+ raise NotFoundException(http_resp=http_resp, body=body, data=data)
+
+ if 500 <= http_resp.status <= 599:
+ raise ServiceException(http_resp=http_resp, body=body, data=data)
+ raise ApiException(http_resp=http_resp, body=body, data=data)
def __str__(self):
"""Custom error messages for exception"""
@@ -119,12 +162,32 @@ def __str__(self):
error_message += "HTTP response headers: {0}\n".format(
self.headers)
- if self.body:
- error_message += "HTTP response body: {0}\n".format(self.body)
+ if self.data or self.body:
+ error_message += "HTTP response body: {0}\n".format(self.data or self.body)
return error_message
+class BadRequestException(ApiException):
+ pass
+
+
+class NotFoundException(ApiException):
+ pass
+
+
+class UnauthorizedException(ApiException):
+ pass
+
+
+class ForbiddenException(ApiException):
+ pass
+
+
+class ServiceException(ApiException):
+ pass
+
+
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/__init__.py
deleted file mode 100644
index ba634dd07..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# we can not import model classes here because that would create a circular
-# reference which would not work in python2
-# do not import all models into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all models from one package, import them with
-# from cloudharness_cli.volumemanager.models import ModelA, ModelB
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.py
deleted file mode 100644
index 70f7f43f1..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-
-class PersistentVolumeClaim(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
- name = schemas.StrSchema
- namespace = schemas.StrSchema
- accessmode = schemas.StrSchema
- size = schemas.StrSchema
- __annotations__ = {
- "name": name,
- "namespace": namespace,
- "accessmode": accessmode,
- "size": size,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["namespace"]) -> MetaOapg.properties.namespace: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["accessmode"]) -> MetaOapg.properties.accessmode: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["size"]) -> MetaOapg.properties.size: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["name", "namespace", "accessmode", "size", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["namespace"]) -> typing.Union[MetaOapg.properties.namespace, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["accessmode"]) -> typing.Union[MetaOapg.properties.accessmode, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["size"]) -> typing.Union[MetaOapg.properties.size, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["name", "namespace", "accessmode", "size", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- namespace: typing.Union[MetaOapg.properties.namespace, str, schemas.Unset] = schemas.unset,
- accessmode: typing.Union[MetaOapg.properties.accessmode, str, schemas.Unset] = schemas.unset,
- size: typing.Union[MetaOapg.properties.size, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'PersistentVolumeClaim':
- return super().__new__(
- cls,
- *args,
- name=name,
- namespace=namespace,
- accessmode=accessmode,
- size=size,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.pyi
deleted file mode 100644
index 70f7f43f1..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim.pyi
+++ /dev/null
@@ -1,108 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-
-class PersistentVolumeClaim(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
- name = schemas.StrSchema
- namespace = schemas.StrSchema
- accessmode = schemas.StrSchema
- size = schemas.StrSchema
- __annotations__ = {
- "name": name,
- "namespace": namespace,
- "accessmode": accessmode,
- "size": size,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["namespace"]) -> MetaOapg.properties.namespace: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["accessmode"]) -> MetaOapg.properties.accessmode: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["size"]) -> MetaOapg.properties.size: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["name", "namespace", "accessmode", "size", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["namespace"]) -> typing.Union[MetaOapg.properties.namespace, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["accessmode"]) -> typing.Union[MetaOapg.properties.accessmode, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["size"]) -> typing.Union[MetaOapg.properties.size, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["name", "namespace", "accessmode", "size", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- namespace: typing.Union[MetaOapg.properties.namespace, str, schemas.Unset] = schemas.unset,
- accessmode: typing.Union[MetaOapg.properties.accessmode, str, schemas.Unset] = schemas.unset,
- size: typing.Union[MetaOapg.properties.size, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'PersistentVolumeClaim':
- return super().__new__(
- cls,
- *args,
- name=name,
- namespace=namespace,
- accessmode=accessmode,
- size=size,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.py
deleted file mode 100644
index d9c1eb7b4..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-
-class PersistentVolumeClaimCreate(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
- name = schemas.StrSchema
- size = schemas.StrSchema
- __annotations__ = {
- "name": name,
- "size": size,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["size"]) -> MetaOapg.properties.size: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["name", "size", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["size"]) -> typing.Union[MetaOapg.properties.size, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["name", "size", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- size: typing.Union[MetaOapg.properties.size, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'PersistentVolumeClaimCreate':
- return super().__new__(
- cls,
- *args,
- name=name,
- size=size,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.pyi
deleted file mode 100644
index d9c1eb7b4..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/model/persistent_volume_claim_create.pyi
+++ /dev/null
@@ -1,88 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-
-class PersistentVolumeClaimCreate(
- schemas.DictSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
-
- class properties:
- name = schemas.StrSchema
- size = schemas.StrSchema
- __annotations__ = {
- "name": name,
- "size": size,
- }
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["size"]) -> MetaOapg.properties.size: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["name", "size", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["size"]) -> typing.Union[MetaOapg.properties.size, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["name", "size", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, ],
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- size: typing.Union[MetaOapg.properties.size, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'PersistentVolumeClaimCreate':
- return super().__new__(
- cls,
- *args,
- name=name,
- size=size,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/__init__.py
index 564a51baf..677a3502c 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/__init__.py
@@ -1,15 +1,18 @@
# coding: utf-8
# flake8: noqa
+"""
+ Volumes manager API
-# import all models into this package
-# if you have many models here with many references from one model to another this may
-# raise a RecursionError
-# to avoid this, import only the models that you directly need like:
-# from from cloudharness_cli.volumemanager.model.pet import Pet
-# or import this package, but before doing it, use:
-# import sys
-# sys.setrecursionlimit(n)
-
-from cloudharness_cli.volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-from cloudharness_cli.volumemanager.model.persistent_volume_claim_create import PersistentVolumeClaimCreate
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+# import models into model package
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+from cloudharness_cli.volumemanager.models.persistent_volume_claim_create import PersistentVolumeClaimCreate
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim.py
new file mode 100644
index 000000000..ca2b7fc5b
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim.py
@@ -0,0 +1,93 @@
+# coding: utf-8
+
+"""
+ Volumes manager API
+
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing import Optional, Set
+from typing_extensions import Self
+
+class PersistentVolumeClaim(BaseModel):
+ """
+ PersistentVolumeClaim
+ """ # noqa: E501
+ name: Optional[StrictStr] = Field(default=None, description="Unique name for the Persisten Volume Claim")
+ namespace: Optional[StrictStr] = Field(default=None, description="The namespace where the Persistent Volume Claim resides in")
+ accessmode: Optional[StrictStr] = Field(default=None, description="The accessmode of the Persistent Volume Claim")
+ size: Optional[StrictStr] = Field(default=None, description="The size of the Persistent Volume Claim.")
+ __properties: ClassVar[List[str]] = ["name", "namespace", "accessmode", "size"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of PersistentVolumeClaim from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of PersistentVolumeClaim from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "name": obj.get("name"),
+ "namespace": obj.get("namespace"),
+ "accessmode": obj.get("accessmode"),
+ "size": obj.get("size")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim_create.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim_create.py
new file mode 100644
index 000000000..1450db94a
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/models/persistent_volume_claim_create.py
@@ -0,0 +1,89 @@
+# coding: utf-8
+
+"""
+ Volumes manager API
+
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing import Optional, Set
+from typing_extensions import Self
+
+class PersistentVolumeClaimCreate(BaseModel):
+ """
+ PersistentVolumeClaimCreate
+ """ # noqa: E501
+ name: Optional[StrictStr] = Field(default=None, description="Unique name for the Persisten Volume Claim to create.")
+ size: Optional[StrictStr] = Field(default=None, description="The size of the Persistent Volume Claim to create.")
+ __properties: ClassVar[List[str]] = ["name", "size"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of PersistentVolumeClaimCreate from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of PersistentVolumeClaimCreate from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "name": obj.get("name"),
+ "size": obj.get("size")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/__init__.py
deleted file mode 100644
index 4ae0da2f9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.volumemanager.apis.path_to_api import path_to_api
-
-import enum
-
-
-class PathValues(str, enum.Enum):
- PVC = "/pvc"
- PVC_NAME = "/pvc/{name}"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/__init__.py
deleted file mode 100644
index 62c450d50..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.volumemanager.paths.pvc import Api
-
-from cloudharness_cli.volumemanager.paths import PathValues
-
-path = PathValues.PVC
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.py
deleted file mode 100644
index 4dcc22ffc..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.py
+++ /dev/null
@@ -1,347 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.volumemanager import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-from cloudharness_cli/volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-from cloudharness_cli/volumemanager.model.persistent_volume_claim_create import PersistentVolumeClaimCreate
-
-from . import path
-
-# body param
-SchemaForRequestBodyApplicationJson = PersistentVolumeClaimCreate
-
-
-request_body_persistent_volume_claim_create = api_client.RequestBody(
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaForRequestBodyApplicationJson),
- },
- required=True,
-)
-_auth = [
- 'bearerAuth',
-]
-SchemaFor201ResponseBodyApplicationJson = PersistentVolumeClaim
-
-
-@dataclass
-class ApiResponseFor201(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor201ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_201 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor201,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor201ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-_status_code_to_response = {
- '201': _response_for_201,
- '400': _response_for_400,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Create a Persistent Volume Claim in Kubernetes
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- if body is schemas.unset:
- raise exceptions.ApiValueError(
- 'The required body parameter has an invalid value of: unset. Set a valid value instead')
- _fields = None
- _body = None
- serialized_data = request_body_persistent_volume_claim_create.serialize(body, content_type)
- _headers.add('Content-Type', content_type)
- if 'fields' in serialized_data:
- _fields = serialized_data['fields']
- elif 'body' in serialized_data:
- _body = serialized_data['body']
- response = self.api_client.call_api(
- resource_path=used_path,
- method='post'.upper(),
- headers=_headers,
- fields=_fields,
- body=_body,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class PvcPost(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_post_oapg(
- body=body,
- content_type=content_type,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForpost(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_post_oapg(
- body=body,
- content_type=content_type,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.pyi
deleted file mode 100644
index 67697b9ec..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc/post.pyi
+++ /dev/null
@@ -1,338 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.volumemanager import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-from cloudharness_cli/volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-from cloudharness_cli/volumemanager.model.persistent_volume_claim_create import PersistentVolumeClaimCreate
-
-# body param
-SchemaForRequestBodyApplicationJson = PersistentVolumeClaimCreate
-
-
-request_body_persistent_volume_claim_create = api_client.RequestBody(
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaForRequestBodyApplicationJson),
- },
- required=True,
-)
-SchemaFor201ResponseBodyApplicationJson = PersistentVolumeClaim
-
-
-@dataclass
-class ApiResponseFor201(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor201ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_201 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor201,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor201ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _pvc_post_oapg(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Create a Persistent Volume Claim in Kubernetes
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- used_path = path.value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- if body is schemas.unset:
- raise exceptions.ApiValueError(
- 'The required body parameter has an invalid value of: unset. Set a valid value instead')
- _fields = None
- _body = None
- serialized_data = request_body_persistent_volume_claim_create.serialize(body, content_type)
- _headers.add('Content-Type', content_type)
- if 'fields' in serialized_data:
- _fields = serialized_data['fields']
- elif 'body' in serialized_data:
- _body = serialized_data['body']
- response = self.api_client.call_api(
- resource_path=used_path,
- method='post'.upper(),
- headers=_headers,
- fields=_fields,
- body=_body,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class PvcPost(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def pvc_post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_post_oapg(
- body=body,
- content_type=content_type,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForpost(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: typing_extensions.Literal["application/json"] = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- ]: ...
-
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- skip_deserialization: typing_extensions.Literal[True],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = ...,
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor201,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def post(
- self,
- body: typing.Union[SchemaForRequestBodyApplicationJson,],
- content_type: str = 'application/json',
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_post_oapg(
- body=body,
- content_type=content_type,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/__init__.py
deleted file mode 100644
index 2a282a50a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.volumemanager.paths.pvc_name import Api
-
-from cloudharness_cli.volumemanager.paths import PathValues
-
-path = PathValues.PVC_NAME
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.py
deleted file mode 100644
index f912ab6c2..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.py
+++ /dev/null
@@ -1,306 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.volumemanager import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-from cloudharness_cli/volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-
-from . import path
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-_auth = [
- 'bearerAuth',
-]
-SchemaFor200ResponseBodyApplicationJson = PersistentVolumeClaim
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '404': _response_for_404,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _pvc_name_get_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _pvc_name_get_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _pvc_name_get_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _pvc_name_get_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Retrieve a Persistent Volume Claim from the Kubernetes repository.
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class PvcNameGet(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def pvc_name_get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def pvc_name_get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def pvc_name_get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def pvc_name_get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_name_get_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_name_get_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.pyi
deleted file mode 100644
index d75717b1a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/paths/pvc_name/get.pyi
+++ /dev/null
@@ -1,297 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.volumemanager import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.volumemanager import schemas # noqa: F401
-
-from cloudharness_cli/volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-SchemaFor200ResponseBodyApplicationJson = PersistentVolumeClaim
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _pvc_name_get_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _pvc_name_get_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _pvc_name_get_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _pvc_name_get_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- Retrieve a Persistent Volume Claim from the Kubernetes repository.
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- auth_settings=_auth,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class PvcNameGet(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def pvc_name_get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def pvc_name_get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def pvc_name_get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def pvc_name_get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_name_get_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._pvc_name_get_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/test/common/test_paths/test_accounts_config/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/py.typed
similarity index 100%
rename from libraries/client/cloudharness_cli/test/common/test_paths/test_accounts_config/__init__.py
rename to libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/py.typed
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/rest.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/rest.py
index afef65444..656d76131 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/rest.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/rest.py
@@ -3,34 +3,66 @@
"""
Volumes manager API
- CloudHarness Volumes manager API # noqa: E501
+ CloudHarness Volumes manager API
The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
-import logging
+import io
+import json
+import re
import ssl
-from urllib.parse import urlencode
-import typing
-import certifi
import urllib3
-from urllib3._collections import HTTPHeaderDict
from cloudharness_cli.volumemanager.exceptions import ApiException, ApiValueError
+SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"}
+RESTResponseType = urllib3.HTTPResponse
+
+
+def is_socks_proxy_url(url):
+ if url is None:
+ return False
+ split_section = url.split("://")
+ if len(split_section) < 2:
+ return False
+ else:
+ return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES
+
+
+class RESTResponse(io.IOBase):
+
+ def __init__(self, resp) -> None:
+ self.response = resp
+ self.status = resp.status
+ self.reason = resp.reason
+ self.data = None
+
+ def read(self):
+ if self.data is None:
+ self.data = self.response.data
+ return self.data
+
+ def getheaders(self):
+ """Returns a dictionary of the response headers."""
+ return self.response.headers
-logger = logging.getLogger(__name__)
+ def getheader(self, name, default=None):
+ """Returns a given response header."""
+ return self.response.headers.get(name, default)
-class RESTClientObject(object):
+class RESTClientObject:
- def __init__(self, configuration, pools_size=4, maxsize=None):
+ def __init__(self, configuration) -> None:
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
- # maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
@@ -39,140 +71,167 @@ def __init__(self, configuration, pools_size=4, maxsize=None):
else:
cert_reqs = ssl.CERT_NONE
- # ca_certs
- if configuration.ssl_ca_cert:
- ca_certs = configuration.ssl_ca_cert
- else:
- # if not set certificate file, use Mozilla's root certificates.
- ca_certs = certifi.where()
-
- addition_pool_args = {}
+ pool_args = {
+ "cert_reqs": cert_reqs,
+ "ca_certs": configuration.ssl_ca_cert,
+ "cert_file": configuration.cert_file,
+ "key_file": configuration.key_file,
+ }
if configuration.assert_hostname is not None:
- addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
+ pool_args['assert_hostname'] = (
+ configuration.assert_hostname
+ )
if configuration.retries is not None:
- addition_pool_args['retries'] = configuration.retries
+ pool_args['retries'] = configuration.retries
+
+ if configuration.tls_server_name:
+ pool_args['server_hostname'] = configuration.tls_server_name
+
if configuration.socket_options is not None:
- addition_pool_args['socket_options'] = configuration.socket_options
+ pool_args['socket_options'] = configuration.socket_options
- if maxsize is None:
- if configuration.connection_pool_maxsize is not None:
- maxsize = configuration.connection_pool_maxsize
- else:
- maxsize = 4
+ if configuration.connection_pool_maxsize is not None:
+ pool_args['maxsize'] = configuration.connection_pool_maxsize
# https pool manager
+ self.pool_manager: urllib3.PoolManager
+
if configuration.proxy:
- self.pool_manager = urllib3.ProxyManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- proxy_url=configuration.proxy,
- proxy_headers=configuration.proxy_headers,
- **addition_pool_args
- )
+ if is_socks_proxy_url(configuration.proxy):
+ from urllib3.contrib.socks import SOCKSProxyManager
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["headers"] = configuration.proxy_headers
+ self.pool_manager = SOCKSProxyManager(**pool_args)
+ else:
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["proxy_headers"] = configuration.proxy_headers
+ self.pool_manager = urllib3.ProxyManager(**pool_args)
else:
- self.pool_manager = urllib3.PoolManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- **addition_pool_args
- )
+ self.pool_manager = urllib3.PoolManager(**pool_args)
def request(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, typing.Any], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
+ method,
+ url,
+ headers=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ):
"""Perform requests.
:param method: http request method
:param url: http request url
:param headers: http request headers
- :param body: request body, for other types
- :param fields: request parameters for
- `application/x-www-form-urlencoded`
- or `multipart/form-data`
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Default is False.
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
+ :param body: request json body, for `application/json`
+ :param post_params: request post parameters,
+ `application/x-www-form-urlencoded`
+ and `multipart/form-data`
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
"""
method = method.upper()
- assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
- 'PATCH', 'OPTIONS']
-
- if fields and body:
+ assert method in [
+ 'GET',
+ 'HEAD',
+ 'DELETE',
+ 'POST',
+ 'PUT',
+ 'PATCH',
+ 'OPTIONS'
+ ]
+
+ if post_params and body:
raise ApiValueError(
- "body parameter cannot be used with fields parameter."
+ "body parameter cannot be used with post_params parameter."
)
- fields = fields or {}
+ post_params = post_params or {}
headers = headers or {}
- if timeout:
- if isinstance(timeout, (int, float)): # noqa: E501,F821
- timeout = urllib3.Timeout(total=timeout)
- elif (isinstance(timeout, tuple) and
- len(timeout) == 2):
- timeout = urllib3.Timeout(connect=timeout[0], read=timeout[1])
+ timeout = None
+ if _request_timeout:
+ if isinstance(_request_timeout, (int, float)):
+ timeout = urllib3.Timeout(total=_request_timeout)
+ elif (
+ isinstance(_request_timeout, tuple)
+ and len(_request_timeout) == 2
+ ):
+ timeout = urllib3.Timeout(
+ connect=_request_timeout[0],
+ read=_request_timeout[1]
+ )
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
- if 'Content-Type' not in headers and body is None:
+
+ # no content type provided or payload is json
+ content_type = headers.get('Content-Type')
+ if (
+ not content_type
+ or re.search('json', content_type, re.IGNORECASE)
+ ):
+ request_body = None
+ if body is not None:
+ request_body = json.dumps(body)
r = self.pool_manager.request(
method,
url,
- preload_content=not stream,
+ body=request_body,
timeout=timeout,
- headers=headers
+ headers=headers,
+ preload_content=False
)
- elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
+ elif content_type == 'application/x-www-form-urlencoded':
r = self.pool_manager.request(
- method, url,
- body=body,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=False,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
- elif headers['Content-Type'] == 'multipart/form-data':
+ headers=headers,
+ preload_content=False
+ )
+ elif content_type == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
+ # Ensures that dict objects are serialized
+ post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params]
r = self.pool_manager.request(
- method, url,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=True,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
+ headers=headers,
+ preload_content=False
+ )
# Pass a `string` parameter directly in the body to support
- # other content types than Json when `body` argument is
- # provided in serialized form
+ # other content types than JSON when `body` argument is
+ # provided in serialized form.
elif isinstance(body, str) or isinstance(body, bytes):
- request_body = body
r = self.pool_manager.request(
- method, url,
+ method,
+ url,
+ body=body,
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
+ elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool):
+ request_body = "true" if body else "false"
+ r = self.pool_manager.request(
+ method,
+ url,
body=request_body,
- preload_content=not stream,
+ preload_content=False,
timeout=timeout,
headers=headers)
else:
@@ -183,72 +242,16 @@ def request(
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
- r = self.pool_manager.request(method, url,
- preload_content=not stream,
- timeout=timeout,
- headers=headers)
+ r = self.pool_manager.request(
+ method,
+ url,
+ fields={},
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
except urllib3.exceptions.SSLError as e:
- msg = "{0}\n{1}".format(type(e).__name__, str(e))
+ msg = "\n".join([type(e).__name__, str(e)])
raise ApiException(status=0, reason=msg)
- if not stream:
- # log response body
- logger.debug("response body: %s", r.data)
-
- return r
-
- def GET(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("GET", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def HEAD(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("HEAD", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def OPTIONS(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("OPTIONS", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def DELETE(self, url, headers=None, body=None,
- stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("DELETE", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def POST(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("POST", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PUT(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PUT", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PATCH(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PATCH", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
+ return RESTResponse(r)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/schemas.py b/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/schemas.py
deleted file mode 100644
index 1d49822a8..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/volumemanager/schemas.py
+++ /dev/null
@@ -1,2462 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-from collections import defaultdict
-from datetime import date, datetime, timedelta # noqa: F401
-import functools
-import decimal
-import io
-import re
-import types
-import typing
-import uuid
-
-from dateutil.parser.isoparser import isoparser, _takes_ascii
-import frozendict
-
-from cloudharness_cli.volumemanager.exceptions import (
- ApiTypeError,
- ApiValueError,
-)
-from cloudharness_cli.volumemanager.configuration import (
- Configuration,
-)
-
-
-class Unset(object):
- """
- An instance of this class is set as the default value for object type(dict) properties that are optional
- When a property has an unset value, that property will not be assigned in the dict
- """
- pass
-
-unset = Unset()
-
-none_type = type(None)
-file_type = io.IOBase
-
-
-class FileIO(io.FileIO):
- """
- A class for storing files
- Note: this class is not immutable
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]):
- if isinstance(arg, (io.FileIO, io.BufferedReader)):
- if arg.closed:
- raise ApiValueError('Invalid file state; file is closed and must be open')
- arg.close()
- inst = super(FileIO, cls).__new__(cls, arg.name)
- super(FileIO, inst).__init__(arg.name)
- return inst
- raise ApiValueError('FileIO must be passed arg which contains the open file')
-
- def __init__(self, arg: typing.Union[io.FileIO, io.BufferedReader]):
- pass
-
-
-def update(d: dict, u: dict):
- """
- Adds u to d
- Where each dict is defaultdict(set)
- """
- if not u:
- return d
- for k, v in u.items():
- if k not in d:
- d[k] = v
- else:
- d[k] = d[k] | v
-
-
-class ValidationMetadata(frozendict.frozendict):
- """
- A class storing metadata that is needed to validate OpenApi Schema payloads
- """
- def __new__(
- cls,
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
- from_server: bool = False,
- configuration: typing.Optional[Configuration] = None,
- seen_classes: typing.FrozenSet[typing.Type] = frozenset(),
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]] = frozendict.frozendict()
- ):
- """
- Args:
- path_to_item: the path to the current data being instantiated.
- For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0)
- This changes from location to location
- from_server: whether or not this data came form the server
- True when receiving server data
- False when instantiating model with client side data not form the server
- This does not change from location to location
- configuration: the Configuration instance to use
- This is needed because in Configuration:
- - one can disable validation checking
- This does not change from location to location
- seen_classes: when deserializing data that matches multiple schemas, this is used to store
- the schemas that have been traversed. This is used to stop processing when a cycle is seen.
- This changes from location to location
- validated_path_to_schemas: stores the already validated schema classes for a given path location
- This does not change from location to location
- """
- return super().__new__(
- cls,
- path_to_item=path_to_item,
- from_server=from_server,
- configuration=configuration,
- seen_classes=seen_classes,
- validated_path_to_schemas=validated_path_to_schemas
- )
-
- def validation_ran_earlier(self, cls: type) -> bool:
- validated_schemas = self.validated_path_to_schemas.get(self.path_to_item, set())
- validation_ran_earlier = validated_schemas and cls in validated_schemas
- if validation_ran_earlier:
- return True
- if cls in self.seen_classes:
- return True
- return False
-
- @property
- def path_to_item(self) -> typing.Tuple[typing.Union[str, int], ...]:
- return self.get('path_to_item')
-
- @property
- def from_server(self) -> bool:
- return self.get('from_server')
-
- @property
- def configuration(self) -> typing.Optional[Configuration]:
- return self.get('configuration')
-
- @property
- def seen_classes(self) -> typing.FrozenSet[typing.Type]:
- return self.get('seen_classes')
-
- @property
- def validated_path_to_schemas(self) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]]:
- return self.get('validated_path_to_schemas')
-
-
-class Singleton:
- """
- Enums and singletons are the same
- The same instance is returned for a given key of (cls, arg)
- """
- _instances = {}
-
- def __new__(cls, arg: typing.Any, **kwargs):
- """
- cls base classes: BoolClass, NoneClass, str, decimal.Decimal
- The 3rd key is used in the tuple below for a corner case where an enum contains integer 1
- However 1.0 can also be ingested into that enum schema because 1.0 == 1 and
- Decimal('1.0') == Decimal('1')
- But if we omitted the 3rd value in the key, then Decimal('1.0') would be stored as Decimal('1')
- and json serializing that instance would be '1' rather than the expected '1.0'
- Adding the 3rd value, the str of arg ensures that 1.0 -> Decimal('1.0') which is serialized as 1.0
- """
- key = (cls, arg, str(arg))
- if key not in cls._instances:
- if isinstance(arg, (none_type, bool, BoolClass, NoneClass)):
- inst = super().__new__(cls)
- cls._instances[key] = inst
- else:
- cls._instances[key] = super().__new__(cls, arg)
- return cls._instances[key]
-
- def __repr__(self):
- if isinstance(self, NoneClass):
- return f'<{self.__class__.__name__}: None>'
- elif isinstance(self, BoolClass):
- if bool(self):
- return f'<{self.__class__.__name__}: True>'
- return f'<{self.__class__.__name__}: False>'
- return f'<{self.__class__.__name__}: {super().__repr__()}>'
-
-
-class classproperty:
-
- def __init__(self, fget):
- self.fget = fget
-
- def __get__(self, owner_self, owner_cls):
- return self.fget(owner_cls)
-
-
-class NoneClass(Singleton):
- @classproperty
- def NONE(cls):
- return cls(None)
-
- def __bool__(self) -> bool:
- return False
-
-
-class BoolClass(Singleton):
- @classproperty
- def TRUE(cls):
- return cls(True)
-
- @classproperty
- def FALSE(cls):
- return cls(False)
-
- @functools.lru_cache()
- def __bool__(self) -> bool:
- for key, instance in self._instances.items():
- if self is instance:
- return bool(key[1])
- raise ValueError('Unable to find the boolean value of this instance')
-
-
-class MetaOapgTyped:
- exclusive_maximum: typing.Union[int, float]
- inclusive_maximum: typing.Union[int, float]
- exclusive_minimum: typing.Union[int, float]
- inclusive_minimum: typing.Union[int, float]
- max_items: int
- min_items: int
- discriminator: typing.Dict[str, typing.Dict[str, typing.Type['Schema']]]
-
-
- class properties:
- # to hold object properties
- pass
-
- additional_properties: typing.Optional[typing.Type['Schema']]
- max_properties: int
- min_properties: int
- all_of: typing.List[typing.Type['Schema']]
- one_of: typing.List[typing.Type['Schema']]
- any_of: typing.List[typing.Type['Schema']]
- not_schema: typing.Type['Schema']
- max_length: int
- min_length: int
- items: typing.Type['Schema']
-
-
-class Schema:
- """
- the base class of all swagger/openapi schemas/models
- """
- __inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict.frozendict, FileIO, bytes, BoolClass, NoneClass}
- _types: typing.Set[typing.Type]
- MetaOapg = MetaOapgTyped
-
- @staticmethod
- def __get_valid_classes_phrase(input_classes):
- """Returns a string phrase describing what types are allowed"""
- all_classes = list(input_classes)
- all_classes = sorted(all_classes, key=lambda cls: cls.__name__)
- all_class_names = [cls.__name__ for cls in all_classes]
- if len(all_class_names) == 1:
- return "is {0}".format(all_class_names[0])
- return "is one of [{0}]".format(", ".join(all_class_names))
-
- @staticmethod
- def _get_class_oapg(item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type['Schema']]) -> typing.Type['Schema']:
- if isinstance(item_cls, types.FunctionType):
- # referenced schema
- return item_cls()
- elif isinstance(item_cls, staticmethod):
- # referenced schema
- return item_cls.__func__()
- return item_cls
-
- @classmethod
- def __type_error_message(
- cls, var_value=None, var_name=None, valid_classes=None, key_type=None
- ):
- """
- Keyword Args:
- var_value (any): the variable which has the type_error
- var_name (str): the name of the variable which has the typ error
- valid_classes (tuple): the accepted classes for current_item's
- value
- key_type (bool): False if our value is a value in a dict
- True if it is a key in a dict
- False if our item is an item in a tuple
- """
- key_or_value = "value"
- if key_type:
- key_or_value = "key"
- valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes)
- msg = "Invalid type. Required {1} type {2} and " "passed type was {3}".format(
- var_name,
- key_or_value,
- valid_classes_phrase,
- type(var_value).__name__,
- )
- return msg
-
- @classmethod
- def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False):
- error_msg = cls.__type_error_message(
- var_name=path_to_item[-1],
- var_value=var_value,
- valid_classes=valid_classes,
- key_type=key_type,
- )
- return ApiTypeError(
- error_msg,
- path_to_item=path_to_item,
- valid_classes=valid_classes,
- key_type=key_type,
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- Schema _validate_oapg
- All keyword validation except for type checking was done in calling stack frames
- If those validations passed, the validated classes are collected in path_to_schemas
-
- Returns:
- path_to_schemas: a map of path to schemas
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- base_class = type(arg)
- if base_class not in cls._types:
- raise cls.__get_type_error(
- arg,
- validation_metadata.path_to_item,
- cls._types,
- key_type=False,
- )
-
- path_to_schemas = {validation_metadata.path_to_item: set()}
- path_to_schemas[validation_metadata.path_to_item].add(cls)
- path_to_schemas[validation_metadata.path_to_item].add(base_class)
- return path_to_schemas
-
- @staticmethod
- def _process_schema_classes_oapg(
- schema_classes: typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]
- ):
- """
- Processes and mutates schema_classes
- If a SomeSchema is a subclass of DictSchema then remove DictSchema because it is already included
- """
- if len(schema_classes) < 2:
- return
- if len(schema_classes) > 2 and UnsetAnyTypeSchema in schema_classes:
- schema_classes.remove(UnsetAnyTypeSchema)
- x_schema = schema_type_classes & schema_classes
- if not x_schema:
- return
- x_schema = x_schema.pop()
- if any(c is not x_schema and issubclass(c, x_schema) for c in schema_classes):
- # needed to not have a mro error in get_new_class
- schema_classes.remove(x_schema)
-
- @classmethod
- def __get_new_cls(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]:
- """
- Make a new dynamic class and return an instance of that class
- We are making an instance of cls, but instead of making cls
- make a new class, new_cls
- which includes dynamic bases including cls
- return an instance of that new class
-
- Dict property + List Item Assignment Use cases:
- 1. value is NOT an instance of the required schema class
- the value is validated by _validate_oapg
- _validate_oapg returns a key value pair
- where the key is the path to the item, and the value will be the required manufactured class
- made out of the matching schemas
- 2. value is an instance of the the correct schema type
- the value is NOT validated by _validate_oapg, _validate_oapg only checks that the instance is of the correct schema type
- for this value, _validate_oapg does NOT return an entry for it in _path_to_schemas
- and in list/dict _get_items_oapg,_get_properties_oapg the value will be directly assigned
- because value is of the correct type, and validation was run earlier when the instance was created
- """
- _path_to_schemas = {}
- if validation_metadata.validated_path_to_schemas:
- update(_path_to_schemas, validation_metadata.validated_path_to_schemas)
- if not validation_metadata.validation_ran_earlier(cls):
- other_path_to_schemas = cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- # loop through it make a new class for each entry
- # do not modify the returned result because it is cached and we would be modifying the cached value
- path_to_schemas = {}
- for path, schema_classes in _path_to_schemas.items():
- """
- Use cases
- 1. N number of schema classes + enum + type != bool/None, classes in path_to_schemas: tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- needs Singleton added
- 2. N number of schema classes + enum + type == bool/None, classes in path_to_schemas: BoolClass/NoneClass
- Singleton already added
- 3. N number of schema classes, classes in path_to_schemas: BoolClass/NoneClass/tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- """
- cls._process_schema_classes_oapg(schema_classes)
- enum_schema = any(
- issubclass(this_cls, EnumBase) for this_cls in schema_classes)
- inheritable_primitive_type = schema_classes.intersection(cls.__inheritable_primitive_types_set)
- chosen_schema_classes = schema_classes - inheritable_primitive_type
- suffix = tuple(inheritable_primitive_type)
- if enum_schema and suffix[0] not in {NoneClass, BoolClass}:
- suffix = (Singleton,) + suffix
-
- used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix
- mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes)
- path_to_schemas[path] = mfg_cls
-
- return path_to_schemas
-
- @classmethod
- def _get_new_instance_without_conversion_oapg(
- cls,
- arg: typing.Any,
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- # We have a Dynamic class and we are making an instance of it
- if issubclass(cls, frozendict.frozendict) and issubclass(cls, DictBase):
- properties = cls._get_properties_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, properties)
- elif issubclass(cls, tuple) and issubclass(cls, ListBase):
- items = cls._get_items_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, items)
- """
- str = openapi str, date, and datetime
- decimal.Decimal = openapi int and float
- FileIO = openapi binary type and the user inputs a file
- bytes = openapi binary type and the user inputs bytes
- """
- return super(Schema, cls).__new__(cls, arg)
-
- @classmethod
- def from_openapi_data_oapg(
- cls,
- arg: typing.Union[
- str,
- date,
- datetime,
- int,
- float,
- decimal.Decimal,
- bool,
- None,
- 'Schema',
- dict,
- frozendict.frozendict,
- tuple,
- list,
- io.FileIO,
- io.BufferedReader,
- bytes
- ],
- _configuration: typing.Optional[Configuration]
- ):
- """
- Schema from_openapi_data_oapg
- """
- from_server = True
- validated_path_to_schemas = {}
- arg = cast_to_allowed_types(arg, from_server, validated_path_to_schemas)
- validation_metadata = ValidationMetadata(
- from_server=from_server, configuration=_configuration, validated_path_to_schemas=validated_path_to_schemas)
- path_to_schemas = cls.__get_new_cls(arg, validation_metadata)
- new_cls = path_to_schemas[validation_metadata.path_to_item]
- new_inst = new_cls._get_new_instance_without_conversion_oapg(
- arg,
- validation_metadata.path_to_item,
- path_to_schemas
- )
- return new_inst
-
- @staticmethod
- def __get_input_dict(*args, **kwargs) -> frozendict.frozendict:
- input_dict = {}
- if args and isinstance(args[0], (dict, frozendict.frozendict)):
- input_dict.update(args[0])
- if kwargs:
- input_dict.update(kwargs)
- return frozendict.frozendict(input_dict)
-
- @staticmethod
- def __remove_unsets(kwargs):
- return {key: val for key, val in kwargs.items() if val is not unset}
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]):
- """
- Schema __new__
-
- Args:
- args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): the value
- kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): dict values
- _configuration: contains the Configuration that enables json schema validation keywords
- like minItems, minLength etc
-
- Note: double underscores are used here because pycharm thinks that these variables
- are instance properties if they are named normally :(
- """
- __kwargs = cls.__remove_unsets(kwargs)
- if not args and not __kwargs:
- raise TypeError(
- 'No input given. args or kwargs must be given.'
- )
- if not __kwargs and args and not isinstance(args[0], dict):
- __arg = args[0]
- else:
- __arg = cls.__get_input_dict(*args, **__kwargs)
- __from_server = False
- __validated_path_to_schemas = {}
- __arg = cast_to_allowed_types(
- __arg, __from_server, __validated_path_to_schemas)
- __validation_metadata = ValidationMetadata(
- configuration=_configuration, from_server=__from_server, validated_path_to_schemas=__validated_path_to_schemas)
- __path_to_schemas = cls.__get_new_cls(__arg, __validation_metadata)
- __new_cls = __path_to_schemas[__validation_metadata.path_to_item]
- return __new_cls._get_new_instance_without_conversion_oapg(
- __arg,
- __validation_metadata.path_to_item,
- __path_to_schemas
- )
-
- def __init__(
- self,
- *args: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'],
- _configuration: typing.Optional[Configuration] = None,
- **kwargs: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset
- ]
- ):
- """
- this is needed to fix 'Unexpected argument' warning in pycharm
- this code does nothing because all Schema instances are immutable
- this means that all input data is passed into and used in new, and after the new instance is made
- no new attributes are assigned and init is not used
- """
- pass
-
-"""
-import itertools
-data_types = ('None', 'FrozenDict', 'Tuple', 'Str', 'Decimal', 'Bool')
-type_to_cls = {
- 'None': 'NoneClass',
- 'FrozenDict': 'frozendict.frozendict',
- 'Tuple': 'tuple',
- 'Str': 'str',
- 'Decimal': 'decimal.Decimal',
- 'Bool': 'BoolClass'
-}
-cls_tuples = [v for v in itertools.combinations(data_types, 5)]
-typed_classes = [f"class {''.join(cls_tuple)}Mixin({', '.join(type_to_cls[typ] for typ in cls_tuple)}):\n pass" for cls_tuple in cls_tuples]
-for cls in typed_classes:
- print(cls)
-object_classes = [f"{''.join(cls_tuple)}Mixin = object" for cls_tuple in cls_tuples]
-for cls in object_classes:
- print(cls)
-"""
-if typing.TYPE_CHECKING:
- # qty 1
- NoneMixin = NoneClass
- FrozenDictMixin = frozendict.frozendict
- TupleMixin = tuple
- StrMixin = str
- DecimalMixin = decimal.Decimal
- BoolMixin = BoolClass
- BytesMixin = bytes
- FileMixin = FileIO
- # qty 2
- class BinaryMixin(bytes, FileIO):
- pass
- class NoneFrozenDictMixin(NoneClass, frozendict.frozendict):
- pass
- class NoneTupleMixin(NoneClass, tuple):
- pass
- class NoneStrMixin(NoneClass, str):
- pass
- class NoneDecimalMixin(NoneClass, decimal.Decimal):
- pass
- class NoneBoolMixin(NoneClass, BoolClass):
- pass
- class FrozenDictTupleMixin(frozendict.frozendict, tuple):
- pass
- class FrozenDictStrMixin(frozendict.frozendict, str):
- pass
- class FrozenDictDecimalMixin(frozendict.frozendict, decimal.Decimal):
- pass
- class FrozenDictBoolMixin(frozendict.frozendict, BoolClass):
- pass
- class TupleStrMixin(tuple, str):
- pass
- class TupleDecimalMixin(tuple, decimal.Decimal):
- pass
- class TupleBoolMixin(tuple, BoolClass):
- pass
- class StrDecimalMixin(str, decimal.Decimal):
- pass
- class StrBoolMixin(str, BoolClass):
- pass
- class DecimalBoolMixin(decimal.Decimal, BoolClass):
- pass
- # qty 3
- class NoneFrozenDictTupleMixin(NoneClass, frozendict.frozendict, tuple):
- pass
- class NoneFrozenDictStrMixin(NoneClass, frozendict.frozendict, str):
- pass
- class NoneFrozenDictDecimalMixin(NoneClass, frozendict.frozendict, decimal.Decimal):
- pass
- class NoneFrozenDictBoolMixin(NoneClass, frozendict.frozendict, BoolClass):
- pass
- class NoneTupleStrMixin(NoneClass, tuple, str):
- pass
- class NoneTupleDecimalMixin(NoneClass, tuple, decimal.Decimal):
- pass
- class NoneTupleBoolMixin(NoneClass, tuple, BoolClass):
- pass
- class NoneStrDecimalMixin(NoneClass, str, decimal.Decimal):
- pass
- class NoneStrBoolMixin(NoneClass, str, BoolClass):
- pass
- class NoneDecimalBoolMixin(NoneClass, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrMixin(frozendict.frozendict, tuple, str):
- pass
- class FrozenDictTupleDecimalMixin(frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class FrozenDictTupleBoolMixin(frozendict.frozendict, tuple, BoolClass):
- pass
- class FrozenDictStrDecimalMixin(frozendict.frozendict, str, decimal.Decimal):
- pass
- class FrozenDictStrBoolMixin(frozendict.frozendict, str, BoolClass):
- pass
- class FrozenDictDecimalBoolMixin(frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalMixin(tuple, str, decimal.Decimal):
- pass
- class TupleStrBoolMixin(tuple, str, BoolClass):
- pass
- class TupleDecimalBoolMixin(tuple, decimal.Decimal, BoolClass):
- pass
- class StrDecimalBoolMixin(str, decimal.Decimal, BoolClass):
- pass
- # qty 4
- class NoneFrozenDictTupleStrMixin(NoneClass, frozendict.frozendict, tuple, str):
- pass
- class NoneFrozenDictTupleDecimalMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class NoneFrozenDictTupleBoolMixin(NoneClass, frozendict.frozendict, tuple, BoolClass):
- pass
- class NoneFrozenDictStrDecimalMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal):
- pass
- class NoneFrozenDictStrBoolMixin(NoneClass, frozendict.frozendict, str, BoolClass):
- pass
- class NoneFrozenDictDecimalBoolMixin(NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalMixin(NoneClass, tuple, str, decimal.Decimal):
- pass
- class NoneTupleStrBoolMixin(NoneClass, tuple, str, BoolClass):
- pass
- class NoneTupleDecimalBoolMixin(NoneClass, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneStrDecimalBoolMixin(NoneClass, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalMixin(frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class FrozenDictTupleStrBoolMixin(frozendict.frozendict, tuple, str, BoolClass):
- pass
- class FrozenDictTupleDecimalBoolMixin(frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class FrozenDictStrDecimalBoolMixin(frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalBoolMixin(tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class NoneFrozenDictTupleStrBoolMixin(NoneClass, frozendict.frozendict, tuple, str, BoolClass):
- pass
- class NoneFrozenDictTupleDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneFrozenDictStrDecimalBoolMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalBoolMixin(NoneClass, tuple, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalBoolMixin(frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes):
- pass
-else:
- # qty 1
- class NoneMixin:
- _types = {NoneClass}
- class FrozenDictMixin:
- _types = {frozendict.frozendict}
- class TupleMixin:
- _types = {tuple}
- class StrMixin:
- _types = {str}
- class DecimalMixin:
- _types = {decimal.Decimal}
- class BoolMixin:
- _types = {BoolClass}
- class BytesMixin:
- _types = {bytes}
- class FileMixin:
- _types = {FileIO}
- # qty 2
- class BinaryMixin:
- _types = {bytes, FileIO}
- class NoneFrozenDictMixin:
- _types = {NoneClass, frozendict.frozendict}
- class NoneTupleMixin:
- _types = {NoneClass, tuple}
- class NoneStrMixin:
- _types = {NoneClass, str}
- class NoneDecimalMixin:
- _types = {NoneClass, decimal.Decimal}
- class NoneBoolMixin:
- _types = {NoneClass, BoolClass}
- class FrozenDictTupleMixin:
- _types = {frozendict.frozendict, tuple}
- class FrozenDictStrMixin:
- _types = {frozendict.frozendict, str}
- class FrozenDictDecimalMixin:
- _types = {frozendict.frozendict, decimal.Decimal}
- class FrozenDictBoolMixin:
- _types = {frozendict.frozendict, BoolClass}
- class TupleStrMixin:
- _types = {tuple, str}
- class TupleDecimalMixin:
- _types = {tuple, decimal.Decimal}
- class TupleBoolMixin:
- _types = {tuple, BoolClass}
- class StrDecimalMixin:
- _types = {str, decimal.Decimal}
- class StrBoolMixin:
- _types = {str, BoolClass}
- class DecimalBoolMixin:
- _types = {decimal.Decimal, BoolClass}
- # qty 3
- class NoneFrozenDictTupleMixin:
- _types = {NoneClass, frozendict.frozendict, tuple}
- class NoneFrozenDictStrMixin:
- _types = {NoneClass, frozendict.frozendict, str}
- class NoneFrozenDictDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal}
- class NoneFrozenDictBoolMixin:
- _types = {NoneClass, frozendict.frozendict, BoolClass}
- class NoneTupleStrMixin:
- _types = {NoneClass, tuple, str}
- class NoneTupleDecimalMixin:
- _types = {NoneClass, tuple, decimal.Decimal}
- class NoneTupleBoolMixin:
- _types = {NoneClass, tuple, BoolClass}
- class NoneStrDecimalMixin:
- _types = {NoneClass, str, decimal.Decimal}
- class NoneStrBoolMixin:
- _types = {NoneClass, str, BoolClass}
- class NoneDecimalBoolMixin:
- _types = {NoneClass, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrMixin:
- _types = {frozendict.frozendict, tuple, str}
- class FrozenDictTupleDecimalMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal}
- class FrozenDictTupleBoolMixin:
- _types = {frozendict.frozendict, tuple, BoolClass}
- class FrozenDictStrDecimalMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal}
- class FrozenDictStrBoolMixin:
- _types = {frozendict.frozendict, str, BoolClass}
- class FrozenDictDecimalBoolMixin:
- _types = {frozendict.frozendict, decimal.Decimal, BoolClass}
- class TupleStrDecimalMixin:
- _types = {tuple, str, decimal.Decimal}
- class TupleStrBoolMixin:
- _types = {tuple, str, BoolClass}
- class TupleDecimalBoolMixin:
- _types = {tuple, decimal.Decimal, BoolClass}
- class StrDecimalBoolMixin:
- _types = {str, decimal.Decimal, BoolClass}
- # qty 4
- class NoneFrozenDictTupleStrMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str}
- class NoneFrozenDictTupleDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal}
- class NoneFrozenDictTupleBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, BoolClass}
- class NoneFrozenDictStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal}
- class NoneFrozenDictStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, BoolClass}
- class NoneFrozenDictDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal}
- class NoneTupleStrBoolMixin:
- _types = {NoneClass, tuple, str, BoolClass}
- class NoneTupleDecimalBoolMixin:
- _types = {NoneClass, tuple, decimal.Decimal, BoolClass}
- class NoneStrDecimalBoolMixin:
- _types = {NoneClass, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal}
- class FrozenDictTupleStrBoolMixin:
- _types = {frozendict.frozendict, tuple, str, BoolClass}
- class FrozenDictTupleDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class FrozenDictStrDecimalBoolMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class TupleStrDecimalBoolMixin:
- _types = {tuple, str, decimal.Decimal, BoolClass}
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal}
- class NoneFrozenDictTupleStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, BoolClass}
- class NoneFrozenDictTupleDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class NoneFrozenDictStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalBoolMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes}
-
-
-class ValidatorBase:
- @staticmethod
- def _is_json_validation_enabled_oapg(schema_keyword, configuration=None):
- """Returns true if JSON schema validation is enabled for the specified
- validation keyword. This can be used to skip JSON schema structural validation
- as requested in the configuration.
- Note: the suffix _oapg stands for openapi python (experimental) generator and
- it has been added to prevent collisions with other methods and properties
-
- Args:
- schema_keyword (string): the name of a JSON schema validation keyword.
- configuration (Configuration): the configuration class.
- """
-
- return (configuration is None or
- not hasattr(configuration, '_disabled_client_side_validations') or
- schema_keyword not in configuration._disabled_client_side_validations)
-
- @staticmethod
- def _raise_validation_errror_message_oapg(value, constraint_msg, constraint_value, path_to_item, additional_txt=""):
- raise ApiValueError(
- "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format(
- value=value,
- constraint_msg=constraint_msg,
- constraint_value=constraint_value,
- additional_txt=additional_txt,
- path_to_item=path_to_item,
- )
- )
-
-
-class EnumBase:
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- EnumBase _validate_oapg
- Validates that arg is in the enum's allowed values
- """
- try:
- cls.MetaOapg.enum_value_to_name[arg]
- except KeyError:
- raise ApiValueError("Invalid value {} passed in to {}, allowed_values={}".format(arg, cls, cls.MetaOapg.enum_value_to_name.keys()))
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class BoolBase:
- def is_true_oapg(self) -> bool:
- """
- A replacement for x is True
- True if the instance is a BoolClass True Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self)
-
- def is_false_oapg(self) -> bool:
- """
- A replacement for x is False
- True if the instance is a BoolClass False Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self) is False
-
-
-class NoneBase:
- def is_none_oapg(self) -> bool:
- """
- A replacement for x is None
- True if the instance is a NoneClass None Singleton
- """
- if issubclass(self.__class__, NoneClass):
- return True
- return False
-
-
-class StrBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_str_oapg(self) -> str:
- return self
-
- @property
- def as_date_oapg(self) -> date:
- raise Exception('not implemented')
-
- @property
- def as_datetime_oapg(self) -> datetime:
- raise Exception('not implemented')
-
- @property
- def as_decimal_oapg(self) -> decimal.Decimal:
- raise Exception('not implemented')
-
- @property
- def as_uuid_oapg(self) -> uuid.UUID:
- raise Exception('not implemented')
-
- @classmethod
- def __check_str_validations(
- cls,
- arg: str,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_length') and
- len(arg) > cls.MetaOapg.max_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be less than or equal to",
- constraint_value=cls.MetaOapg.max_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_length') and
- len(arg) < cls.MetaOapg.min_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'regex')):
- for regex_dict in cls.MetaOapg.regex:
- flags = regex_dict.get('flags', 0)
- if not re.search(regex_dict['pattern'], arg, flags=flags):
- if flags != 0:
- # Don't print the regex flags if the flags are not
- # specified in the OAS document.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item,
- additional_txt=" with flags=`{}`".format(flags)
- )
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- StrBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, str):
- cls.__check_str_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class UUIDBase:
- @property
- @functools.lru_cache()
- def as_uuid_oapg(self) -> uuid.UUID:
- return uuid.UUID(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- uuid.UUID(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Invalid value '{}' for type UUID at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- UUIDBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class CustomIsoparser(isoparser):
-
- @_takes_ascii
- def parse_isodatetime(self, dt_str):
- components, pos = self._parse_isodate(dt_str)
- if len(dt_str) > pos:
- if self._sep is None or dt_str[pos:pos + 1] == self._sep:
- components += self._parse_isotime(dt_str[pos + 1:])
- else:
- raise ValueError('String contains unknown ISO components')
-
- if len(components) > 3 and components[3] == 24:
- components[3] = 0
- return datetime(*components) + timedelta(days=1)
-
- if len(components) <= 3:
- raise ValueError('Value is not a datetime')
-
- return datetime(*components)
-
- @_takes_ascii
- def parse_isodate(self, datestr):
- components, pos = self._parse_isodate(datestr)
-
- if len(datestr) > pos:
- raise ValueError('String contains invalid time components')
-
- if len(components) > 3:
- raise ValueError('String contains invalid time components')
-
- return date(*components)
-
-
-DEFAULT_ISOPARSER = CustomIsoparser()
-
-
-class DateBase:
- @property
- @functools.lru_cache()
- def as_date_oapg(self) -> date:
- return DEFAULT_ISOPARSER.parse_isodate(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodate(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 date format. "
- "Invalid value '{}' for type date at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- DateBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DateTimeBase:
- @property
- @functools.lru_cache()
- def as_datetime_oapg(self) -> datetime:
- return DEFAULT_ISOPARSER.parse_isodatetime(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodatetime(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 datetime format. "
- "Invalid value '{}' for type datetime at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DateTimeBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DecimalBase:
- """
- A class for storing decimals that are sent over the wire as strings
- These schemas must remain based on StrBase rather than NumberBase
- because picking base classes must be deterministic
- """
-
- @property
- @functools.lru_cache()
- def as_decimal_oapg(self) -> decimal.Decimal:
- return decimal.Decimal(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- decimal.Decimal(arg)
- return True
- except decimal.InvalidOperation:
- raise ApiValueError(
- "Value cannot be converted to a decimal. "
- "Invalid value '{}' for type decimal at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DecimalBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class NumberBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- """
- Note: for some numbers like 9.0 they could be represented as an
- integer but our code chooses to store them as
- >>> Decimal('9.0').as_tuple()
- DecimalTuple(sign=0, digits=(9, 0), exponent=-1)
- so we can tell that the value came from a float and convert it back to a float
- during later serialization
- """
- if self.as_tuple().exponent < 0:
- # this could be represented as an integer but should be represented as a float
- # because that's what it was serialized from
- raise ApiValueError(f'{self} is not an integer')
- self._as_int = int(self)
- return self._as_int
-
- @property
- def as_float_oapg(self) -> float:
- try:
- return self._as_float
- except AttributeError:
- if self.as_tuple().exponent >= 0:
- raise ApiValueError(f'{self} is not an float')
- self._as_float = float(self)
- return self._as_float
-
- @classmethod
- def __check_numeric_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if cls._is_json_validation_enabled_oapg('multipleOf',
- validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
- multiple_of_value = cls.MetaOapg.multiple_of
- if (not (float(arg) / multiple_of_value).is_integer()):
- # Note 'multipleOf' will be as good as the floating point arithmetic.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="value must be a multiple of",
- constraint_value=multiple_of_value,
- path_to_item=validation_metadata.path_to_item
- )
-
- checking_max_or_min_values = any(
- hasattr(cls.MetaOapg, validation_key) for validation_key in {
- 'exclusive_maximum',
- 'inclusive_maximum',
- 'exclusive_minimum',
- 'inclusive_minimum',
- }
- )
- if not checking_max_or_min_values:
- return
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_maximum') and
- arg >= cls.MetaOapg.exclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_maximum') and
- arg > cls.MetaOapg.inclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than or equal to",
- constraint_value=cls.MetaOapg.inclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_minimum') and
- arg <= cls.MetaOapg.exclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_minimum') and
- arg < cls.MetaOapg.inclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than or equal to",
- constraint_value=cls.MetaOapg.inclusive_minimum,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- NumberBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, decimal.Decimal):
- cls.__check_numeric_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class ListBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def __validate_items(cls, list_items, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for items are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- list_items: the input list of items
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
-
- # if we have definitions for an items schema, use it
- # otherwise accept anything
- item_cls = getattr(cls.MetaOapg, 'items', UnsetAnyTypeSchema)
- item_cls = cls._get_class_oapg(item_cls)
- path_to_schemas = {}
- for i, value in enumerate(list_items):
- item_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=validation_metadata.path_to_item+(i,),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if item_validation_metadata.validation_ran_earlier(item_cls):
- continue
- other_path_to_schemas = item_cls._validate_oapg(
- value, validation_metadata=item_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_tuple_validations(
- cls, arg,
- validation_metadata: ValidationMetadata):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_items') and
- len(arg) > cls.MetaOapg.max_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be less than or equal to",
- constraint_value=cls.MetaOapg.max_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_items') and
- len(arg) < cls.MetaOapg.min_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
- unique_items = set(arg)
- if len(arg) > len(unique_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="duplicate items were found, and the tuple must not contain duplicates because",
- constraint_value='unique_items==True',
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- ListBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, tuple):
- cls.__check_tuple_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, tuple):
- return _path_to_schemas
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- other_path_to_schemas = cls.__validate_items(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_items_oapg(
- cls: 'Schema',
- arg: typing.List[typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- '''
- ListBase _get_items_oapg
- '''
- cast_items = []
-
- for i, value in enumerate(arg):
- item_path_to_item = path_to_item + (i,)
- item_cls = path_to_schemas[item_path_to_item]
- new_value = item_cls._get_new_instance_without_conversion_oapg(
- value,
- item_path_to_item,
- path_to_schemas
- )
- cast_items.append(new_value)
-
- return cast_items
-
-
-class Discriminable:
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def _ensure_discriminator_value_present_oapg(cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args):
- if not args or args and disc_property_name not in args[0]:
- # The input data does not contain the discriminator property
- raise ApiValueError(
- "Cannot deserialize input data due to missing discriminator. "
- "The discriminator property '{}' is missing at path: {}".format(disc_property_name, validation_metadata.path_to_item)
- )
-
- @classmethod
- def get_discriminated_class_oapg(cls, disc_property_name: str, disc_payload_value: str):
- """
- Used in schemas with discriminators
- """
- if not hasattr(cls.MetaOapg, 'discriminator'):
- return None
- disc = cls.MetaOapg.discriminator()
- if disc_property_name not in disc:
- return None
- discriminated_cls = disc[disc_property_name].get(disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if not hasattr(cls, 'MetaOapg'):
- return None
- elif not (
- hasattr(cls.MetaOapg, 'all_of') or
- hasattr(cls.MetaOapg, 'one_of') or
- hasattr(cls.MetaOapg, 'any_of')
- ):
- return None
- # TODO stop traveling if a cycle is hit
- if hasattr(cls.MetaOapg, 'all_of'):
- for allof_cls in cls.MetaOapg.all_of():
- discriminated_cls = allof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'one_of'):
- for oneof_cls in cls.MetaOapg.one_of():
- discriminated_cls = oneof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'any_of'):
- for anyof_cls in cls.MetaOapg.any_of():
- discriminated_cls = anyof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- return None
-
-
-class DictBase(Discriminable, ValidatorBase):
-
- @classmethod
- def __validate_arg_presence(cls, arg):
- """
- Ensures that:
- - all required arguments are passed in
- - the input variable names are valid
- - present in properties or
- - accepted because additionalProperties exists
- Exceptions will be raised if:
- - invalid arguments were passed in
- - a var_name is invalid if additional_properties == NotAnyTypeSchema
- and var_name not in properties.__annotations__
- - required properties were not passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- seen_required_properties = set()
- invalid_arguments = []
- required_property_names = getattr(cls.MetaOapg, 'required', set())
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name in arg:
- if property_name in required_property_names:
- seen_required_properties.add(property_name)
- elif property_name in property_annotations:
- continue
- elif additional_properties is not NotAnyTypeSchema:
- continue
- else:
- invalid_arguments.append(property_name)
- missing_required_arguments = list(required_property_names - seen_required_properties)
- if missing_required_arguments:
- missing_required_arguments.sort()
- raise ApiTypeError(
- "{} is missing {} required argument{}: {}".format(
- cls.__name__,
- len(missing_required_arguments),
- "s" if len(missing_required_arguments) > 1 else "",
- missing_required_arguments
- )
- )
- if invalid_arguments:
- invalid_arguments.sort()
- raise ApiTypeError(
- "{} was passed {} invalid argument{}: {}".format(
- cls.__name__,
- len(invalid_arguments),
- "s" if len(invalid_arguments) > 1 else "",
- invalid_arguments
- )
- )
-
- @classmethod
- def __validate_args(cls, arg, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for properties are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- path_to_schemas = {}
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name, value in arg.items():
- path_to_item = validation_metadata.path_to_item+(property_name,)
- if property_name in property_annotations:
- schema = property_annotations[property_name]
- elif additional_properties is not NotAnyTypeSchema:
- if additional_properties is UnsetAnyTypeSchema:
- """
- If additionalProperties is unset and this path_to_item does not yet have
- any validations on it, validate it.
- If it already has validations on it, skip this validation.
- """
- if path_to_item in path_to_schemas:
- continue
- schema = additional_properties
- else:
- raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format(
- value, cls, validation_metadata.path_to_item+(property_name,)
- ))
- schema = cls._get_class_oapg(schema)
- arg_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=path_to_item,
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if arg_validation_metadata.validation_ran_earlier(schema):
- continue
- other_path_to_schemas = schema._validate_oapg(value, validation_metadata=arg_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_dict_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_properties') and
- len(arg) > cls.MetaOapg.max_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be less than or equal to",
- constraint_value=cls.MetaOapg.max_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_properties') and
- len(arg) < cls.MetaOapg.min_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DictBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, frozendict.frozendict):
- cls.__check_dict_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, frozendict.frozendict):
- return _path_to_schemas
- cls.__validate_arg_presence(arg)
- other_path_to_schemas = cls.__validate_args(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- try:
- discriminator = cls.MetaOapg.discriminator()
- except AttributeError:
- return _path_to_schemas
- # discriminator exists
- disc_prop_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_prop_name, validation_metadata, arg)
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format(
- cls.__name__,
- disc_prop_name,
- list(discriminator[disc_prop_name].keys()),
- validation_metadata.path_to_item + (disc_prop_name,)
- )
- )
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if updated_vm.validation_ran_earlier(discriminated_cls):
- return _path_to_schemas
- other_path_to_schemas = discriminated_cls._validate_oapg(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_properties_oapg(
- cls,
- arg: typing.Dict[str, typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- """
- DictBase _get_properties_oapg, this is how properties are set
- These values already passed validation
- """
- dict_items = {}
-
- for property_name_js, value in arg.items():
- property_path_to_item = path_to_item + (property_name_js,)
- property_cls = path_to_schemas[property_path_to_item]
- new_value = property_cls._get_new_instance_without_conversion_oapg(
- value,
- property_path_to_item,
- path_to_schemas
- )
- dict_items[property_name_js] = new_value
-
- return dict_items
-
- def __setattr__(self, name: str, value: typing.Any):
- if not isinstance(self, FileIO):
- raise AttributeError('property setting not supported on immutable instances')
-
- def __getattr__(self, name: str):
- """
- for instance.name access
- Properties are only type hinted for required properties
- so that hasattr(instance, 'optionalProp') is False when that key is not present
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- if name not in self.__class__.__annotations__:
- raise AttributeError(f"{self} has no attribute '{name}'")
- try:
- value = self[name]
- return value
- except KeyError as ex:
- raise AttributeError(str(ex))
-
- def __getitem__(self, name: str):
- """
- dict_instance[name] accessor
- key errors thrown
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- return super().__getitem__(name)
-
- def get_item_oapg(self, name: str) -> typing.Union['AnyTypeSchema', Unset]:
- # dict_instance[name] accessor
- if not isinstance(self, frozendict.frozendict):
- raise NotImplementedError()
- try:
- return super().__getitem__(name)
- except KeyError:
- return unset
-
-
-def cast_to_allowed_types(
- arg: typing.Union[str, date, datetime, uuid.UUID, decimal.Decimal, int, float, None, dict, frozendict.frozendict, list, tuple, bytes, Schema, io.FileIO, io.BufferedReader],
- from_server: bool,
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]],
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
-) -> typing.Union[frozendict.frozendict, tuple, decimal.Decimal, str, bytes, BoolClass, NoneClass, FileIO]:
- """
- Casts the input payload arg into the allowed types
- The input validated_path_to_schemas is mutated by running this function
-
- When from_server is False then
- - date/datetime is cast to str
- - int/float is cast to Decimal
-
- If a Schema instance is passed in it is converted back to a primitive instance because
- One may need to validate that data to the original Schema class AND additional different classes
- those additional classes will need to be added to the new manufactured class for that payload
- If the code didn't do this and kept the payload as a Schema instance it would fail to validate to other
- Schema classes and the code wouldn't be able to mfg a new class that includes all valid schemas
- TODO: store the validated schema classes in validation_metadata
-
- Args:
- arg: the payload
- from_server: whether this payload came from the server or not
- validated_path_to_schemas: a dict that stores the validated classes at any path location in the payload
- """
- if isinstance(arg, Schema):
- # store the already run validations
- schema_classes = set()
- source_schema_was_unset = len(arg.__class__.__bases__) == 2 and UnsetAnyTypeSchema in arg.__class__.__bases__
- if not source_schema_was_unset:
- """
- Do not include UnsetAnyTypeSchema and its base class because
- it did not exist in the original spec schema definition
- It was added to ensure that all instances are of type Schema and the allowed base types
- """
- for cls in arg.__class__.__bases__:
- if cls is Singleton:
- # Skip Singleton
- continue
- schema_classes.add(cls)
- validated_path_to_schemas[path_to_item] = schema_classes
-
- type_error = ApiTypeError(f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}")
- if isinstance(arg, str):
- return str(arg)
- elif isinstance(arg, (dict, frozendict.frozendict)):
- return frozendict.frozendict({key: cast_to_allowed_types(val, from_server, validated_path_to_schemas, path_to_item + (key,)) for key, val in arg.items()})
- elif isinstance(arg, (bool, BoolClass)):
- """
- this check must come before isinstance(arg, (int, float))
- because isinstance(True, int) is True
- """
- if arg:
- return BoolClass.TRUE
- return BoolClass.FALSE
- elif isinstance(arg, int):
- return decimal.Decimal(arg)
- elif isinstance(arg, float):
- decimal_from_float = decimal.Decimal(arg)
- if decimal_from_float.as_integer_ratio()[1] == 1:
- # 9.0 -> Decimal('9.0')
- # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0')
- return decimal.Decimal(str(decimal_from_float)+'.0')
- return decimal_from_float
- elif isinstance(arg, (tuple, list)):
- return tuple([cast_to_allowed_types(item, from_server, validated_path_to_schemas, path_to_item + (i,)) for i, item in enumerate(arg)])
- elif isinstance(arg, (none_type, NoneClass)):
- return NoneClass.NONE
- elif isinstance(arg, (date, datetime)):
- if not from_server:
- return arg.isoformat()
- raise type_error
- elif isinstance(arg, uuid.UUID):
- if not from_server:
- return str(arg)
- raise type_error
- elif isinstance(arg, decimal.Decimal):
- return decimal.Decimal(arg)
- elif isinstance(arg, bytes):
- return bytes(arg)
- elif isinstance(arg, (io.FileIO, io.BufferedReader)):
- return FileIO(arg)
- raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg)))
-
-
-class ComposedBase(Discriminable):
-
- @classmethod
- def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
- path_to_schemas = defaultdict(set)
- for allof_cls in cls.MetaOapg.all_of():
- if validation_metadata.validation_ran_earlier(allof_cls):
- continue
- other_path_to_schemas = allof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __get_oneof_class(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata,
- ):
- oneof_classes = []
- path_to_schemas = defaultdict(set)
- for oneof_cls in cls.MetaOapg.one_of():
- if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
- oneof_classes.append(oneof_cls)
- continue
- if validation_metadata.validation_ran_earlier(oneof_cls):
- oneof_classes.append(oneof_cls)
- continue
- try:
- path_to_schemas = oneof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and oneof_cls is discriminated_cls:
- raise ex
- continue
- oneof_classes.append(oneof_cls)
- if not oneof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the oneOf schemas matched the input data.".format(cls)
- )
- elif len(oneof_classes) > 1:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. Multiple "
- "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes)
- )
- # exactly one class matches
- return path_to_schemas
-
- @classmethod
- def __get_anyof_classes(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata
- ):
- anyof_classes = []
- path_to_schemas = defaultdict(set)
- for anyof_cls in cls.MetaOapg.any_of():
- if validation_metadata.validation_ran_earlier(anyof_cls):
- anyof_classes.append(anyof_cls)
- continue
-
- try:
- other_path_to_schemas = anyof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and anyof_cls is discriminated_cls:
- raise ex
- continue
- anyof_classes.append(anyof_cls)
- update(path_to_schemas, other_path_to_schemas)
- if not anyof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the anyOf schemas matched the input data.".format(cls)
- )
- return path_to_schemas
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- ComposedBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- # validation checking on types, validations, and enums
- path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
-
- # process composed schema
- discriminator = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'discriminator'):
- discriminator = cls.MetaOapg.discriminator()
- discriminated_cls = None
- if discriminator and arg and isinstance(arg, frozendict.frozendict):
- disc_property_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_property_name, updated_vm, arg)
- # get discriminated_cls by looking at the dict in the current class
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=arg[disc_property_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format(
- arg[disc_property_name],
- cls.__name__,
- disc_property_name,
- list(discriminator[disc_property_name].keys()),
- updated_vm.path_to_item + (disc_property_name,)
- )
- )
-
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
- other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
- other_path_to_schemas = cls.__get_oneof_class(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
- other_path_to_schemas = cls.__get_anyof_classes(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- not_cls = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'not_schema'):
- not_cls = cls.MetaOapg.not_schema
- not_cls = cls._get_class_oapg(not_cls)
- if not_cls:
- other_path_to_schemas = None
- not_exception = ApiValueError(
- "Invalid value '{}' was passed in to {}. Value is invalid because it is disallowed by {}".format(
- arg,
- cls.__name__,
- not_cls.__name__,
- )
- )
- if updated_vm.validation_ran_earlier(not_cls):
- raise not_exception
-
- try:
- other_path_to_schemas = not_cls._validate_oapg(arg, validation_metadata=updated_vm)
- except (ApiValueError, ApiTypeError):
- pass
- if other_path_to_schemas:
- raise not_exception
-
- if discriminated_cls is not None and not updated_vm.validation_ran_earlier(discriminated_cls):
- # TODO use an exception from this package here
- assert discriminated_cls in path_to_schemas[updated_vm.path_to_item]
- return path_to_schemas
-
-
-# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase
-class ComposedSchema(
- ComposedBase,
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, *args: typing.Any, _configuration: typing.Optional[Configuration] = None, **kwargs):
- if not args:
- if not kwargs:
- raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__))
- args = (kwargs, )
- return super().from_openapi_data_oapg(args[0], _configuration=_configuration)
-
-
-class ListSchema(
- ListBase,
- Schema,
- TupleMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.List[typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NoneSchema(
- NoneBase,
- Schema,
- NoneMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: None, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: None, **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NumberSchema(
- NumberBase,
- Schema,
- DecimalMixin
-):
- """
- This is used for type: number with no format
- Both integers AND floats are accepted
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Union[int, float], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class IntBase:
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- self._as_int = int(self)
- return self._as_int
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
-
- denominator = arg.as_integer_ratio()[-1]
- if denominator != 1:
- raise ApiValueError(
- "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- IntBase _validate_oapg
- TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class IntSchema(IntBase, NumberSchema):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: int, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class Int32Base:
- __inclusive_minimum = decimal.Decimal(-2147483648)
- __inclusive_maximum = decimal.Decimal(2147483647)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int32 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int32Schema(
- Int32Base,
- IntSchema
-):
- pass
-
-
-class Int64Base:
- __inclusive_minimum = decimal.Decimal(-9223372036854775808)
- __inclusive_maximum = decimal.Decimal(9223372036854775807)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int64 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int64Schema(
- Int64Base,
- IntSchema
-):
- pass
-
-
-class Float32Base:
- __inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
- __inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type float at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Float32Schema(
- Float32Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class Float64Base:
- __inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
- __inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type double at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-class Float64Schema(
- Float64Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- # todo check format
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class StrSchema(
- StrBase,
- Schema,
- StrMixin
-):
- """
- date + datetime string types must inherit from this class
- That is because one can validate a str payload as both:
- - type: string (format unset)
- - type: string, format: date
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: str, _configuration: typing.Optional[Configuration] = None) -> 'StrSchema':
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class UUIDSchema(UUIDBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateSchema(DateBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, date], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateTimeSchema(DateTimeBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, datetime], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DecimalSchema(DecimalBase, StrSchema):
-
- def __new__(cls, arg: str, **kwargs: Configuration):
- """
- Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads
- which can be simple (str) or complex (dicts or lists with nested values)
- Because casting is only done once and recursively casts all values prior to validation then for a potential
- client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know
- if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema
- where it should stay as Decimal.
- """
- return super().__new__(cls, arg, **kwargs)
-
-
-class BytesSchema(
- Schema,
- BytesMixin
-):
- """
- this class will subclass bytes and is immutable
- """
- def __new__(cls, arg: bytes, **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class FileSchema(
- Schema,
- FileMixin
-):
- """
- This class is NOT immutable
- Dynamic classes are built using it for example when AnyType allows in binary data
- Al other schema classes ARE immutable
- If one wanted to make this immutable one could make this a DictSchema with required properties:
- - data = BytesSchema (which would be an immutable bytes based schema)
- - file_name = StrSchema
- and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name
- The downside would be that data would be stored in memory which one may not want to do for very large files
-
- The developer is responsible for closing this file and deleting it
-
- This class was kept as mutable:
- - to allow file reading and writing to disk
- - to be able to preserve file name info
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class BinaryBase:
- pass
-
-
-class BinarySchema(
- ComposedBase,
- BinaryBase,
- Schema,
- BinaryMixin
-):
- class MetaOapg:
- @staticmethod
- def one_of():
- return [
- BytesSchema,
- FileSchema,
- ]
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: Configuration):
- return super().__new__(cls, arg)
-
-
-class BoolSchema(
- BoolBase,
- Schema,
- BoolMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: bool, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: bool, **kwargs: ValidationMetadata):
- return super().__new__(cls, arg, **kwargs)
-
-
-class AnyTypeSchema(
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolFileBytesMixin
-):
- # Python representation of a schema defined as true or {}
- pass
-
-
-class UnsetAnyTypeSchema(AnyTypeSchema):
- # Used when additionalProperties/items was not explicitly defined and a defining schema is needed
- pass
-
-
-class NotAnyTypeSchema(
- ComposedSchema,
-):
- """
- Python representation of a schema defined as false or {'not': {}}
- Does not allow inputs in of AnyType
- Note: validations on this class are never run because the code knows that no inputs will ever validate
- """
-
- class MetaOapg:
- not_schema = AnyTypeSchema
-
- def __new__(
- cls,
- *args,
- _configuration: typing.Optional[Configuration] = None,
- ) -> 'NotAnyTypeSchema':
- return super().__new__(
- cls,
- *args,
- _configuration=_configuration,
- )
-
-
-class DictSchema(
- DictBase,
- Schema,
- FrozenDictMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Dict[str, typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict], **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, ValidationMetadata]):
- return super().__new__(cls, *args, **kwargs)
-
-
-schema_type_classes = {NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema, AnyTypeSchema}
-
-
-@functools.lru_cache()
-def get_new_class(
- class_name: str,
- bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...]
-) -> typing.Type[Schema]:
- """
- Returns a new class that is made with the subclass bases
- """
- new_cls: typing.Type[Schema] = type(class_name, bases, {})
- return new_cls
-
-
-LOG_CACHE_USAGE = False
-
-
-def log_cache_usage(cache_fn):
- if LOG_CACHE_USAGE:
- print(cache_fn.__name__, cache_fn.cache_info())
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py
index 7f19a3f9a..2ee03a01a 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py
@@ -5,25 +5,34 @@
"""
Workflows API
- Workflows API # noqa: E501
+ Workflows API
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
__version__ = "1.0.0"
+# import apis into sdk package
+from cloudharness_cli.workflows.api.create_and_access_api import CreateAndAccessApi
+
# import ApiClient
+from cloudharness_cli.workflows.api_response import ApiResponse
from cloudharness_cli.workflows.api_client import ApiClient
-
-# import Configuration
from cloudharness_cli.workflows.configuration import Configuration
-
-# import exceptions
from cloudharness_cli.workflows.exceptions import OpenApiException
-from cloudharness_cli.workflows.exceptions import ApiAttributeError
from cloudharness_cli.workflows.exceptions import ApiTypeError
from cloudharness_cli.workflows.exceptions import ApiValueError
from cloudharness_cli.workflows.exceptions import ApiKeyError
+from cloudharness_cli.workflows.exceptions import ApiAttributeError
from cloudharness_cli.workflows.exceptions import ApiException
+
+# import models into sdk package
+from cloudharness_cli.workflows.models.operation import Operation
+from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult
+from cloudharness_cli.workflows.models.operation_status import OperationStatus
+from cloudharness_cli.workflows.models.search_result_data import SearchResultData
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py
new file mode 100644
index 000000000..3f8eef111
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py
@@ -0,0 +1,5 @@
+# flake8: noqa
+
+# import apis into api package
+from cloudharness_cli.workflows.api.create_and_access_api import CreateAndAccessApi
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py
new file mode 100644
index 000000000..c7fa472f4
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py
@@ -0,0 +1,1120 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+import warnings
+from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt
+from typing import Any, Dict, List, Optional, Tuple, Union
+from typing_extensions import Annotated
+
+from pydantic import Field, StrictStr, field_validator
+from typing import Optional
+from typing_extensions import Annotated
+from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult
+from cloudharness_cli.workflows.models.operation_status import OperationStatus
+
+from cloudharness_cli.workflows.api_client import ApiClient, RequestSerialized
+from cloudharness_cli.workflows.api_response import ApiResponse
+from cloudharness_cli.workflows.rest import RESTResponseType
+
+
+class CreateAndAccessApi:
+ """NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, api_client=None) -> None:
+ if api_client is None:
+ api_client = ApiClient.get_default()
+ self.api_client = api_client
+
+
+ @validate_call
+ def delete_operation(
+ self,
+ name: StrictStr,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> None:
+ """deletes operation by name
+
+ delete operation by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._delete_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def delete_operation_with_http_info(
+ self,
+ name: StrictStr,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[None]:
+ """deletes operation by name
+
+ delete operation by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._delete_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def delete_operation_without_preload_content(
+ self,
+ name: StrictStr,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """deletes operation by name
+
+ delete operation by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._delete_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _delete_operation_serialize(
+ self,
+ name,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if name is not None:
+ _path_params['name'] = name
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='DELETE',
+ resource_path='/operations/{name}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def get_operation(
+ self,
+ name: Annotated[str, Field(strict=True)],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> str:
+ """get operation by name
+
+ retrieves an operation by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def get_operation_with_http_info(
+ self,
+ name: Annotated[str, Field(strict=True)],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[str]:
+ """get operation by name
+
+ retrieves an operation by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def get_operation_without_preload_content(
+ self,
+ name: Annotated[str, Field(strict=True)],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """get operation by name
+
+ retrieves an operation by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._get_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _get_operation_serialize(
+ self,
+ name,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if name is not None:
+ _path_params['name'] = name
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/operations/{name}',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def list_operations(
+ self,
+ status: Annotated[Optional[OperationStatus], Field(description="filter by status")] = None,
+ previous_search_token: Annotated[Optional[StrictStr], Field(description="continue previous search (pagination chunks)")] = None,
+ limit: Annotated[Optional[Annotated[int, Field(le=50, strict=True, ge=1)]], Field(description="maximum number of records to return per page")] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> OperationSearchResult:
+ """lists operations
+
+ see all operations for the user
+
+ :param status: filter by status
+ :type status: OperationStatus
+ :param previous_search_token: continue previous search (pagination chunks)
+ :type previous_search_token: str
+ :param limit: maximum number of records to return per page
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._list_operations_serialize(
+ status=status,
+ previous_search_token=previous_search_token,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "OperationSearchResult",
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def list_operations_with_http_info(
+ self,
+ status: Annotated[Optional[OperationStatus], Field(description="filter by status")] = None,
+ previous_search_token: Annotated[Optional[StrictStr], Field(description="continue previous search (pagination chunks)")] = None,
+ limit: Annotated[Optional[Annotated[int, Field(le=50, strict=True, ge=1)]], Field(description="maximum number of records to return per page")] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[OperationSearchResult]:
+ """lists operations
+
+ see all operations for the user
+
+ :param status: filter by status
+ :type status: OperationStatus
+ :param previous_search_token: continue previous search (pagination chunks)
+ :type previous_search_token: str
+ :param limit: maximum number of records to return per page
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._list_operations_serialize(
+ status=status,
+ previous_search_token=previous_search_token,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "OperationSearchResult",
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def list_operations_without_preload_content(
+ self,
+ status: Annotated[Optional[OperationStatus], Field(description="filter by status")] = None,
+ previous_search_token: Annotated[Optional[StrictStr], Field(description="continue previous search (pagination chunks)")] = None,
+ limit: Annotated[Optional[Annotated[int, Field(le=50, strict=True, ge=1)]], Field(description="maximum number of records to return per page")] = None,
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """lists operations
+
+ see all operations for the user
+
+ :param status: filter by status
+ :type status: OperationStatus
+ :param previous_search_token: continue previous search (pagination chunks)
+ :type previous_search_token: str
+ :param limit: maximum number of records to return per page
+ :type limit: int
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._list_operations_serialize(
+ status=status,
+ previous_search_token=previous_search_token,
+ limit=limit,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "OperationSearchResult",
+ '400': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _list_operations_serialize(
+ self,
+ status,
+ previous_search_token,
+ limit,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ # process the query parameters
+ if status is not None:
+
+ _query_params.append(('status', status.value))
+
+ if previous_search_token is not None:
+
+ _query_params.append(('previous_search_token', previous_search_token))
+
+ if limit is not None:
+
+ _query_params.append(('limit', limit))
+
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'application/json'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/operations',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
+
+
+ @validate_call
+ def log_operation(
+ self,
+ name: Annotated[str, Field(strict=True)],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> str:
+ """get operation by name
+
+ retrieves an operation log by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._log_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ ).data
+
+
+ @validate_call
+ def log_operation_with_http_info(
+ self,
+ name: Annotated[str, Field(strict=True)],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> ApiResponse[str]:
+ """get operation by name
+
+ retrieves an operation log by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._log_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ response_data.read()
+ return self.api_client.response_deserialize(
+ response_data=response_data,
+ response_types_map=_response_types_map,
+ )
+
+
+ @validate_call
+ def log_operation_without_preload_content(
+ self,
+ name: Annotated[str, Field(strict=True)],
+ _request_timeout: Union[
+ None,
+ Annotated[StrictFloat, Field(gt=0)],
+ Tuple[
+ Annotated[StrictFloat, Field(gt=0)],
+ Annotated[StrictFloat, Field(gt=0)]
+ ]
+ ] = None,
+ _request_auth: Optional[Dict[StrictStr, Any]] = None,
+ _content_type: Optional[StrictStr] = None,
+ _headers: Optional[Dict[StrictStr, Any]] = None,
+ _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
+ ) -> RESTResponseType:
+ """get operation by name
+
+ retrieves an operation log by its name
+
+ :param name: (required)
+ :type name: str
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
+ :type _request_timeout: int, tuple(int, int), optional
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the
+ authentication in the spec for a single request.
+ :type _request_auth: dict, optional
+ :param _content_type: force content-type for the request.
+ :type _content_type: str, Optional
+ :param _headers: set to override the headers for a single
+ request; this effectively ignores the headers
+ in the spec for a single request.
+ :type _headers: dict, optional
+ :param _host_index: set to override the host_index for a single
+ request; this effectively ignores the host_index
+ in the spec for a single request.
+ :type _host_index: int, optional
+ :return: Returns the result object.
+ """ # noqa: E501
+
+ _param = self._log_operation_serialize(
+ name=name,
+ _request_auth=_request_auth,
+ _content_type=_content_type,
+ _headers=_headers,
+ _host_index=_host_index
+ )
+
+ _response_types_map: Dict[str, Optional[str]] = {
+ '200': "str",
+ '400': None,
+ '404': None,
+ }
+ response_data = self.api_client.call_api(
+ *_param,
+ _request_timeout=_request_timeout
+ )
+ return response_data.response
+
+
+ def _log_operation_serialize(
+ self,
+ name,
+ _request_auth,
+ _content_type,
+ _headers,
+ _host_index,
+ ) -> RequestSerialized:
+
+ _host = None
+
+ _collection_formats: Dict[str, str] = {
+ }
+
+ _path_params: Dict[str, str] = {}
+ _query_params: List[Tuple[str, str]] = []
+ _header_params: Dict[str, Optional[str]] = _headers or {}
+ _form_params: List[Tuple[str, str]] = []
+ _files: Dict[str, Union[str, bytes]] = {}
+ _body_params: Optional[bytes] = None
+
+ # process the path parameters
+ if name is not None:
+ _path_params['name'] = name
+ # process the query parameters
+ # process the header parameters
+ # process the form parameters
+ # process the body parameter
+
+
+ # set the HTTP header `Accept`
+ if 'Accept' not in _header_params:
+ _header_params['Accept'] = self.api_client.select_header_accept(
+ [
+ 'text/plain'
+ ]
+ )
+
+
+ # authentication setting
+ _auth_settings: List[str] = [
+ ]
+
+ return self.api_client.param_serialize(
+ method='GET',
+ resource_path='/operations/{name}/logs',
+ path_params=_path_params,
+ query_params=_query_params,
+ header_params=_header_params,
+ body=_body_params,
+ post_params=_form_params,
+ files=_files,
+ auth_settings=_auth_settings,
+ collection_formats=_collection_formats,
+ _host=_host,
+ _request_auth=_request_auth
+ )
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py
index d06655e2a..7c3ba6fd0 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py
@@ -1,1500 +1,782 @@
# coding: utf-8
+
"""
Workflows API
- Workflows API # noqa: E501
+ Workflows API
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
-from dataclasses import dataclass
-from decimal import Decimal
-import enum
-import email
+
+import datetime
+from dateutil.parser import parse
+from enum import Enum
import json
+import mimetypes
import os
-import io
-import atexit
-from multiprocessing.pool import ThreadPool
import re
import tempfile
-import typing
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-from urllib.parse import urlparse, quote
-from urllib3.fields import RequestField as RequestFieldBase
-import frozendict
+from urllib.parse import quote
+from typing import Tuple, Optional, List, Dict, Union
+from pydantic import SecretStr
-from cloudharness_cli.workflows import rest
from cloudharness_cli.workflows.configuration import Configuration
-from cloudharness_cli.workflows.exceptions import ApiTypeError, ApiValueError
-from cloudharness_cli.workflows.schemas import (
- NoneClass,
- BoolClass,
- Schema,
- FileIO,
- BinarySchema,
- date,
- datetime,
- none_type,
- Unset,
- unset,
+from cloudharness_cli.workflows.api_response import ApiResponse, T as ApiResponseT
+import cloudharness_cli.workflows.models
+from cloudharness_cli.workflows import rest
+from cloudharness_cli.workflows.exceptions import (
+ ApiValueError,
+ ApiException,
+ BadRequestException,
+ UnauthorizedException,
+ ForbiddenException,
+ NotFoundException,
+ ServiceException
)
+RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]]
-class RequestField(RequestFieldBase):
- def __eq__(self, other):
- if not isinstance(other, RequestField):
- return False
- return self.__dict__ == other.__dict__
-
-
-class JSONEncoder(json.JSONEncoder):
- compact_separators = (',', ':')
-
- def default(self, obj):
- if isinstance(obj, str):
- return str(obj)
- elif isinstance(obj, float):
- return float(obj)
- elif isinstance(obj, int):
- return int(obj)
- elif isinstance(obj, Decimal):
- if obj.as_tuple().exponent >= 0:
- return int(obj)
- return float(obj)
- elif isinstance(obj, NoneClass):
- return None
- elif isinstance(obj, BoolClass):
- return bool(obj)
- elif isinstance(obj, (dict, frozendict.frozendict)):
- return {key: self.default(val) for key, val in obj.items()}
- elif isinstance(obj, (list, tuple)):
- return [self.default(item) for item in obj]
- raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__))
-
-
-class ParameterInType(enum.Enum):
- QUERY = 'query'
- HEADER = 'header'
- PATH = 'path'
- COOKIE = 'cookie'
-
-
-class ParameterStyle(enum.Enum):
- MATRIX = 'matrix'
- LABEL = 'label'
- FORM = 'form'
- SIMPLE = 'simple'
- SPACE_DELIMITED = 'spaceDelimited'
- PIPE_DELIMITED = 'pipeDelimited'
- DEEP_OBJECT = 'deepObject'
-
-
-class PrefixSeparatorIterator:
- # A class to store prefixes and separators for rfc6570 expansions
-
- def __init__(self, prefix: str, separator: str):
- self.prefix = prefix
- self.separator = separator
- self.first = True
- if separator in {'.', '|', '%20'}:
- item_separator = separator
- else:
- item_separator = ','
- self.item_separator = item_separator
-
- def __iter__(self):
- return self
-
- def __next__(self):
- if self.first:
- self.first = False
- return self.prefix
- return self.separator
-
-
-class ParameterSerializerBase:
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- return False
-
- @staticmethod
- def __ref6570_item_value(in_data: typing.Any, percent_encode: bool):
- """
- Get representation if str/float/int/None/items in list/ values in dict
- None is returned if an item is undefined, use cases are value=
- - None
- - []
- - {}
- - [None, None None]
- - {'a': None, 'b': None}
- """
- if type(in_data) in {str, float, int}:
- if percent_encode:
- return quote(str(in_data))
- return str(in_data)
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, list) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- elif isinstance(in_data, dict) and not in_data:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return None
- raise ApiValueError('Unable to generate a ref6570 item representation of {}'.format(in_data))
+class ApiClient:
+ """Generic API client for OpenAPI client library builds.
- @staticmethod
- def _to_dict(name: str, value: str):
- return {name: value}
+ OpenAPI generic API client. This client handles the client-
+ server communication, and is invariant across implementations. Specifics of
+ the methods and models for each application are generated from the OpenAPI
+ templates.
- @classmethod
- def __ref6570_str_float_int_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_value = cls.__ref6570_item_value(in_data, percent_encode)
- if item_value is None or (item_value == '' and prefix_separator_iterator.separator == ';'):
- return next(prefix_separator_iterator) + var_name_piece
- value_pair_equals = '=' if named_parameter_expansion else ''
- return next(prefix_separator_iterator) + var_name_piece + value_pair_equals + item_value
+ :param configuration: .Configuration object for this client
+ :param header_name: a header to pass when making calls to the API.
+ :param header_value: a header value to pass when making calls to
+ the API.
+ :param cookie: a cookie to include in the header when making calls
+ to the API
+ """
- @classmethod
- def __ref6570_list_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- item_values = [cls.__ref6570_item_value(v, percent_encode) for v in in_data]
- item_values = [v for v in item_values if v is not None]
- if not item_values:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece +
- value_pair_equals +
- prefix_separator_iterator.item_separator.join(item_values)
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [var_name_piece + value_pair_equals + val for val in item_values]
- )
+ PRIMITIVE_TYPES = (float, bool, bytes, str, int)
+ NATIVE_TYPES_MAPPING = {
+ 'int': int,
+ 'long': int, # TODO remove as only py3 is supported?
+ 'float': float,
+ 'str': str,
+ 'bool': bool,
+ 'date': datetime.date,
+ 'datetime': datetime.datetime,
+ 'object': object,
+ }
+ _pool = None
- @classmethod
- def __ref6570_dict_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator,
- var_name_piece: str,
- named_parameter_expansion: bool
- ) -> str:
- in_data_transformed = {key: cls.__ref6570_item_value(val, percent_encode) for key, val in in_data.items()}
- in_data_transformed = {key: val for key, val in in_data_transformed.items() if val is not None}
- if not in_data_transformed:
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- value_pair_equals = '=' if named_parameter_expansion else ''
- if not explode:
- return (
- next(prefix_separator_iterator) +
- var_name_piece + value_pair_equals +
- prefix_separator_iterator.item_separator.join(
- prefix_separator_iterator.item_separator.join(
- item_pair
- ) for item_pair in in_data_transformed.items()
- )
- )
- # exploded
- return next(prefix_separator_iterator) + next(prefix_separator_iterator).join(
- [key + '=' + val for key, val in in_data_transformed.items()]
- )
+ def __init__(
+ self,
+ configuration=None,
+ header_name=None,
+ header_value=None,
+ cookie=None
+ ) -> None:
+ # use default configuration if none is provided
+ if configuration is None:
+ configuration = Configuration.get_default()
+ self.configuration = configuration
- @classmethod
- def _ref6570_expansion(
- cls,
- variable_name: str,
- in_data: typing.Any,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: PrefixSeparatorIterator
- ) -> str:
- """
- Separator is for separate variables like dict with explode true, not for array item separation
- """
- named_parameter_expansion = prefix_separator_iterator.separator in {'&', ';'}
- var_name_piece = variable_name if named_parameter_expansion else ''
- if type(in_data) in {str, float, int}:
- return cls.__ref6570_str_float_int_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, none_type):
- # ignored by the expansion process https://datatracker.ietf.org/doc/html/rfc6570#section-3.2.1
- return ""
- elif isinstance(in_data, list):
- return cls.__ref6570_list_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- elif isinstance(in_data, dict):
- return cls.__ref6570_dict_expansion(
- variable_name,
- in_data,
- explode,
- percent_encode,
- prefix_separator_iterator,
- var_name_piece,
- named_parameter_expansion
- )
- # bool, bytes, etc
- raise ApiValueError('Unable to generate a ref6570 representation of {}'.format(in_data))
+ self.rest_client = rest.RESTClientObject(configuration)
+ self.default_headers = {}
+ if header_name is not None:
+ self.default_headers[header_name] = header_value
+ self.cookie = cookie
+ # Set default User-Agent.
+ self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ self.client_side_validation = configuration.client_side_validation
+ def __enter__(self):
+ return self
-class StyleFormSerializer(ParameterSerializerBase):
- @classmethod
- def _get_default_explode(cls, style: ParameterStyle) -> bool:
- if style is ParameterStyle.FORM:
- return True
- return super()._get_default_explode(style)
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
- def _serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool,
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> str:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = PrefixSeparatorIterator('', '&')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
+ @property
+ def user_agent(self):
+ """User agent for this API client"""
+ return self.default_headers['User-Agent']
+ @user_agent.setter
+ def user_agent(self, value):
+ self.default_headers['User-Agent'] = value
-class StyleSimpleSerializer(ParameterSerializerBase):
+ def set_default_header(self, header_name, header_value):
+ self.default_headers[header_name] = header_value
- def _serialize_simple(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- name: str,
- explode: bool,
- percent_encode: bool
- ) -> str:
- prefix_separator_iterator = PrefixSeparatorIterator('', ',')
- return self._ref6570_expansion(
- variable_name=name,
- in_data=in_data,
- explode=explode,
- percent_encode=percent_encode,
- prefix_separator_iterator=prefix_separator_iterator
- )
-
-class JSONDetector:
- """
- Works for:
- application/json
- application/json; charset=UTF-8
- application/json-patch+json
- application/geo+json
- """
- __json_content_type_pattern = re.compile("application/[^+]*[+]?(json);?.*")
-
- @classmethod
- def _content_type_is_json(cls, content_type: str) -> bool:
- if cls.__json_content_type_pattern.match(content_type):
- return True
- return False
-
-
-@dataclass
-class ParameterBase(JSONDetector):
- name: str
- in_type: ParameterInType
- required: bool
- style: typing.Optional[ParameterStyle]
- explode: typing.Optional[bool]
- allow_reserved: typing.Optional[bool]
- schema: typing.Optional[typing.Type[Schema]]
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]]
-
- __style_to_in_type = {
- ParameterStyle.MATRIX: {ParameterInType.PATH},
- ParameterStyle.LABEL: {ParameterInType.PATH},
- ParameterStyle.FORM: {ParameterInType.QUERY, ParameterInType.COOKIE},
- ParameterStyle.SIMPLE: {ParameterInType.PATH, ParameterInType.HEADER},
- ParameterStyle.SPACE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.PIPE_DELIMITED: {ParameterInType.QUERY},
- ParameterStyle.DEEP_OBJECT: {ParameterInType.QUERY},
- }
- __in_type_to_default_style = {
- ParameterInType.QUERY: ParameterStyle.FORM,
- ParameterInType.PATH: ParameterStyle.SIMPLE,
- ParameterInType.HEADER: ParameterStyle.SIMPLE,
- ParameterInType.COOKIE: ParameterStyle.FORM,
- }
- __disallowed_header_names = {'Accept', 'Content-Type', 'Authorization'}
- _json_encoder = JSONEncoder()
+ _default = None
@classmethod
- def __verify_style_to_in_type(cls, style: typing.Optional[ParameterStyle], in_type: ParameterInType):
- if style is None:
- return
- in_type_set = cls.__style_to_in_type[style]
- if in_type not in in_type_set:
- raise ValueError(
- 'Invalid style and in_type combination. For style={} only in_type={} are allowed'.format(
- style, in_type_set
- )
- )
-
- def __init__(
- self,
- name: str,
- in_type: ParameterInType,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- if schema is None and content is None:
- raise ValueError('Value missing; Pass in either schema or content')
- if schema and content:
- raise ValueError('Too many values provided. Both schema and content were provided. Only one may be input')
- if name in self.__disallowed_header_names and in_type is ParameterInType.HEADER:
- raise ValueError('Invalid name, name may not be one of {}'.format(self.__disallowed_header_names))
- self.__verify_style_to_in_type(style, in_type)
- if content is None and style is None:
- style = self.__in_type_to_default_style[in_type]
- if content is not None and in_type in self.__in_type_to_default_style and len(content) != 1:
- raise ValueError('Invalid content length, content length must equal 1')
- self.in_type = in_type
- self.name = name
- self.required = required
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
- self.schema = schema
- self.content = content
-
- def _serialize_json(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- eliminate_whitespace: bool = False
- ) -> str:
- if eliminate_whitespace:
- return json.dumps(in_data, separators=self._json_encoder.compact_separators)
- return json.dumps(in_data)
+ def get_default(cls):
+ """Return new instance of ApiClient.
+ This method returns newly created, based on default constructor,
+ object of ApiClient class or returns a copy of default
+ ApiClient.
-class PathParameter(ParameterBase, StyleSimpleSerializer):
+ :return: The ApiClient object.
+ """
+ if cls._default is None:
+ cls._default = ApiClient()
+ return cls._default
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.PATH,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ @classmethod
+ def set_default(cls, default):
+ """Set default instance of ApiClient.
- def __serialize_label(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator('.', '.')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ It stores default ApiClient.
- def __serialize_matrix(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list]
- ) -> typing.Dict[str, str]:
- prefix_separator_iterator = PrefixSeparatorIterator(';', ';')
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ :param default: object of ApiClient.
+ """
+ cls._default = default
- def __serialize_simple(
+ def param_serialize(
self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- ) -> typing.Dict[str, str]:
- value = self._serialize_simple(
- in_data=in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True
- )
- return self._to_dict(self.name, value)
+ method,
+ resource_path,
+ path_params=None,
+ query_params=None,
+ header_params=None,
+ body=None,
+ post_params=None,
+ files=None, auth_settings=None,
+ collection_formats=None,
+ _host=None,
+ _request_auth=None
+ ) -> RequestSerialized:
+
+ """Builds the HTTP request params needed by the request.
+ :param method: Method to call.
+ :param resource_path: Path to method endpoint.
+ :param path_params: Path parameters in the url.
+ :param query_params: Query parameters in the url.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param auth_settings list: Auth Settings names for the request.
+ :param files dict: key -> filename, value -> filepath,
+ for `multipart/form-data`.
+ :param collection_formats: dict of collection formats for path, query,
+ header, and post parameters.
+ :param _request_auth: set to override the auth_settings for an a single
+ request; this effectively ignores the authentication
+ in the spec for a single request.
+ :return: tuple of form (path, http_method, query_params, header_params,
+ body, post_params, files)
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> path
- path:
- returns path_params: dict
- label -> path
- returns path_params
- matrix -> path
- returns path_params
- """
- if self.style:
- if self.style is ParameterStyle.SIMPLE:
- return self.__serialize_simple(cast_in_data)
- elif self.style is ParameterStyle.LABEL:
- return self.__serialize_label(cast_in_data)
- elif self.style is ParameterStyle.MATRIX:
- return self.__serialize_matrix(cast_in_data)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class QueryParameter(ParameterBase, StyleFormSerializer):
+ config = self.configuration
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.QUERY,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
+ # header parameters
+ header_params = header_params or {}
+ header_params.update(self.default_headers)
+ if self.cookie:
+ header_params['Cookie'] = self.cookie
+ if header_params:
+ header_params = self.sanitize_for_serialization(header_params)
+ header_params = dict(
+ self.parameters_to_tuples(header_params,collection_formats)
+ )
- def __serialize_space_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # path parameters
+ if path_params:
+ path_params = self.sanitize_for_serialization(path_params)
+ path_params = self.parameters_to_tuples(
+ path_params,
+ collection_formats
+ )
+ for k, v in path_params:
+ # specified safe chars, encode everything
+ resource_path = resource_path.replace(
+ '{%s}' % k,
+ quote(str(v), safe=config.safe_chars_for_path_param)
+ )
- def __serialize_pipe_delimited(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._ref6570_expansion(
- variable_name=self.name,
- in_data=in_data,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
- )
- return self._to_dict(self.name, value)
+ # post parameters
+ if post_params or files:
+ post_params = post_params if post_params else []
+ post_params = self.sanitize_for_serialization(post_params)
+ post_params = self.parameters_to_tuples(
+ post_params,
+ collection_formats
+ )
+ if files:
+ post_params.extend(self.files_parameters(files))
- def __serialize_form(
- self,
- in_data: typing.Union[None, int, float, str, bool, dict, list],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator]
- ) -> typing.Dict[str, str]:
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- value = self._serialize_form(
- in_data,
- name=self.name,
- explode=self.explode,
- percent_encode=True,
- prefix_separator_iterator=prefix_separator_iterator
+ # auth setting
+ self.update_params_for_auth(
+ header_params,
+ query_params,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=_request_auth
)
- return self._to_dict(self.name, value)
- def get_prefix_separator_iterator(self) -> typing.Optional[PrefixSeparatorIterator]:
- if self.style is ParameterStyle.FORM:
- return PrefixSeparatorIterator('?', '&')
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return PrefixSeparatorIterator('', '%20')
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return PrefixSeparatorIterator('', '|')
-
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict],
- prefix_separator_iterator: typing.Optional[PrefixSeparatorIterator] = None
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> query
- query:
- - GET/HEAD/DELETE: could use fields
- - PUT/POST: must use urlencode to send parameters
- returns fields: tuple
- spaceDelimited -> query
- returns fields
- pipeDelimited -> query
- returns fields
- deepObject -> query, https://github.com/OAI/OpenAPI-Specification/issues/1706
- returns fields
- """
- if self.style:
- # TODO update query ones to omit setting values when [] {} or None is input
- if self.style is ParameterStyle.FORM:
- return self.__serialize_form(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.SPACE_DELIMITED:
- return self.__serialize_space_delimited(cast_in_data, prefix_separator_iterator)
- elif self.style is ParameterStyle.PIPE_DELIMITED:
- return self.__serialize_pipe_delimited(cast_in_data, prefix_separator_iterator)
- # self.content will be length one
- if prefix_separator_iterator is None:
- prefix_separator_iterator = self.get_prefix_separator_iterator()
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data, eliminate_whitespace=True)
- return self._to_dict(
- self.name,
- next(prefix_separator_iterator) + self.name + '=' + quote(value)
- )
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
+ # body
+ if body:
+ body = self.sanitize_for_serialization(body)
+ # request url
+ if _host is None or self.configuration.ignore_operation_servers:
+ url = self.configuration.host + resource_path
+ else:
+ # use server/host defined in path or operation instead
+ url = _host + resource_path
+
+ # query parameters
+ if query_params:
+ query_params = self.sanitize_for_serialization(query_params)
+ url_query = self.parameters_to_url_query(
+ query_params,
+ collection_formats
+ )
+ url += "?" + url_query
-class CookieParameter(ParameterBase, StyleFormSerializer):
+ return method, url, header_params, body, post_params
- def __init__(
- self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: typing.Optional[bool] = None,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- used_style = ParameterStyle.FORM if style is None and content is None and schema else style
- used_explode = self._get_default_explode(used_style) if explode is None else explode
-
- super().__init__(
- name,
- in_type=ParameterInType.COOKIE,
- required=required,
- style=used_style,
- explode=used_explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> typing.Dict[str, str]:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- form -> cookie
- returns fields: tuple
- """
- if self.style:
- """
- TODO add escaping of comma, space, equals
- or turn encoding on
- """
- value = self._serialize_form(
- cast_in_data,
- explode=self.explode,
- name=self.name,
- percent_encode=False,
- prefix_separator_iterator=PrefixSeparatorIterator('', '&')
- )
- return self._to_dict(self.name, value)
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self._to_dict(self.name, value)
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class HeaderParameter(ParameterBase, StyleSimpleSerializer):
- def __init__(
+ def call_api(
self,
- name: str,
- required: bool = False,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: typing.Optional[bool] = None,
- schema: typing.Optional[typing.Type[Schema]] = None,
- content: typing.Optional[typing.Dict[str, typing.Type[Schema]]] = None
- ):
- super().__init__(
- name,
- in_type=ParameterInType.HEADER,
- required=required,
- style=style,
- explode=explode,
- allow_reserved=allow_reserved,
- schema=schema,
- content=content
- )
-
- @staticmethod
- def __to_headers(in_data: typing.Tuple[typing.Tuple[str, str], ...]) -> HTTPHeaderDict:
- data = tuple(t for t in in_data if t)
- headers = HTTPHeaderDict()
- if not data:
- return headers
- headers.extend(data)
- return headers
+ method,
+ url,
+ header_params=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ) -> rest.RESTResponse:
+ """Makes the HTTP request (synchronous)
+ :param method: Method to call.
+ :param url: Path to method endpoint.
+ :param header_params: Header parameters to be
+ placed in the request header.
+ :param body: Request body.
+ :param post_params dict: Request post form parameters,
+ for `application/x-www-form-urlencoded`, `multipart/form-data`.
+ :param _request_timeout: timeout setting for this request.
+ :return: RESTResponse
+ """
- def serialize(
- self,
- in_data: typing.Union[
- Schema, Decimal, int, float, str, date, datetime, None, bool, list, tuple, dict, frozendict.frozendict]
- ) -> HTTPHeaderDict:
- if self.schema:
- cast_in_data = self.schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- """
- simple -> header
- headers: PoolManager needs a mapping, tuple is close
- returns headers: dict
- """
- if self.style:
- value = self._serialize_simple(cast_in_data, self.name, self.explode, False)
- return self.__to_headers(((self.name, value),))
- # self.content will be length one
- for content_type, schema in self.content.items():
- cast_in_data = schema(in_data)
- cast_in_data = self._json_encoder.default(cast_in_data)
- if self._content_type_is_json(content_type):
- value = self._serialize_json(cast_in_data)
- return self.__to_headers(((self.name, value),))
- raise NotImplementedError('Serialization of {} has not yet been implemented'.format(content_type))
-
-
-class Encoding:
- def __init__(
- self,
- content_type: str,
- headers: typing.Optional[typing.Dict[str, HeaderParameter]] = None,
- style: typing.Optional[ParameterStyle] = None,
- explode: bool = False,
- allow_reserved: bool = False,
- ):
- self.content_type = content_type
- self.headers = headers
- self.style = style
- self.explode = explode
- self.allow_reserved = allow_reserved
-
-
-@dataclass
-class MediaType:
- """
- Used to store request and response body schema information
- encoding:
- A map between a property name and its encoding information.
- The key, being the property name, MUST exist in the schema as a property.
- The encoding object SHALL only apply to requestBody objects when the media type is
- multipart or application/x-www-form-urlencoded.
- """
- schema: typing.Optional[typing.Type[Schema]] = None
- encoding: typing.Optional[typing.Dict[str, Encoding]] = None
+ try:
+ # perform request and return response
+ response_data = self.rest_client.request(
+ method, url,
+ headers=header_params,
+ body=body, post_params=post_params,
+ _request_timeout=_request_timeout
+ )
+ except ApiException as e:
+ raise e
-@dataclass
-class ApiResponse:
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, Schema]
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
+ return response_data
- def __init__(
+ def response_deserialize(
self,
- response: urllib3.HTTPResponse,
- body: typing.Union[Unset, typing.Type[Schema]],
- headers: typing.Union[Unset, typing.List[HeaderParameter]]
- ):
- """
- pycharm needs this to prevent 'Unexpected argument' warnings
+ response_data: rest.RESTResponse,
+ response_types_map: Optional[Dict[str, ApiResponseT]]=None
+ ) -> ApiResponse[ApiResponseT]:
+ """Deserializes response into an object.
+ :param response_data: RESTResponse object to be deserialized.
+ :param response_types_map: dict of response types.
+ :return: ApiResponse
"""
- self.response = response
- self.body = body
- self.headers = headers
+ msg = "RESTResponse.read() must be called before passing it to response_deserialize()"
+ assert response_data.data is not None, msg
-@dataclass
-class ApiResponseWithoutDeserialization(ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[Unset, typing.Type[Schema]] = unset
- headers: typing.Union[Unset, typing.List[HeaderParameter]] = unset
+ response_type = response_types_map.get(str(response_data.status), None)
+ if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599:
+ # if not found, look for '1XX', '2XX', etc.
+ response_type = response_types_map.get(str(response_data.status)[0] + "XX", None)
+ # deserialize response data
+ response_text = None
+ return_data = None
+ try:
+ if response_type == "bytearray":
+ return_data = response_data.data
+ elif response_type == "file":
+ return_data = self.__deserialize_file(response_data)
+ elif response_type is not None:
+ match = None
+ content_type = response_data.getheader('content-type')
+ if content_type is not None:
+ match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type)
+ encoding = match.group(1) if match else "utf-8"
+ response_text = response_data.data.decode(encoding)
+ return_data = self.deserialize(response_text, response_type, content_type)
+ finally:
+ if not 200 <= response_data.status <= 299:
+ raise ApiException.from_response(
+ http_resp=response_data,
+ body=response_text,
+ data=return_data,
+ )
-class OpenApiResponse(JSONDetector):
- __filename_content_disposition_pattern = re.compile('filename="(.+?)"')
+ return ApiResponse(
+ status_code = response_data.status,
+ data = return_data,
+ headers = response_data.getheaders(),
+ raw_data = response_data.data
+ )
- def __init__(
- self,
- response_cls: typing.Type[ApiResponse] = ApiResponse,
- content: typing.Optional[typing.Dict[str, MediaType]] = None,
- headers: typing.Optional[typing.List[HeaderParameter]] = None,
- ):
- self.headers = headers
- if content is not None and len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
- self.response_cls = response_cls
-
- @staticmethod
- def __deserialize_json(response: urllib3.HTTPResponse) -> typing.Any:
- # python must be >= 3.9 so we can pass in bytes into json.loads
- return json.loads(response.data)
-
- @staticmethod
- def __file_name_from_response_url(response_url: typing.Optional[str]) -> typing.Optional[str]:
- if response_url is None:
- return None
- url_path = urlparse(response_url).path
- if url_path:
- path_basename = os.path.basename(url_path)
- if path_basename:
- _filename, ext = os.path.splitext(path_basename)
- if ext:
- return path_basename
- return None
+ def sanitize_for_serialization(self, obj):
+ """Builds a JSON POST object.
- @classmethod
- def __file_name_from_content_disposition(cls, content_disposition: typing.Optional[str]) -> typing.Optional[str]:
- if content_disposition is None:
- return None
- match = cls.__filename_content_disposition_pattern.search(content_disposition)
- if not match:
- return None
- return match.group(1)
+ If obj is None, return None.
+ If obj is SecretStr, return obj.get_secret_value()
+ If obj is str, int, long, float, bool, return directly.
+ If obj is datetime.datetime, datetime.date
+ convert to string in iso8601 format.
+ If obj is list, sanitize each element in the list.
+ If obj is dict, return the dict.
+ If obj is OpenAPI model, return the properties dict.
- def __deserialize_application_octet_stream(
- self, response: urllib3.HTTPResponse
- ) -> typing.Union[bytes, io.BufferedReader]:
- """
- urllib3 use cases:
- 1. when preload_content=True (stream=False) then supports_chunked_reads is False and bytes are returned
- 2. when preload_content=False (stream=True) then supports_chunked_reads is True and
- a file will be written and returned
+ :param obj: The data to serialize.
+ :return: The serialized form of data.
"""
- if response.supports_chunked_reads():
- file_name = (
- self.__file_name_from_content_disposition(response.headers.get('content-disposition'))
- or self.__file_name_from_response_url(response.geturl())
+ if obj is None:
+ return None
+ elif isinstance(obj, Enum):
+ return obj.value
+ elif isinstance(obj, SecretStr):
+ return obj.get_secret_value()
+ elif isinstance(obj, self.PRIMITIVE_TYPES):
+ return obj
+ elif isinstance(obj, list):
+ return [
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
+ ]
+ elif isinstance(obj, tuple):
+ return tuple(
+ self.sanitize_for_serialization(sub_obj) for sub_obj in obj
)
+ elif isinstance(obj, (datetime.datetime, datetime.date)):
+ return obj.isoformat()
- if file_name is None:
- _fd, path = tempfile.mkstemp()
- else:
- path = os.path.join(tempfile.gettempdir(), file_name)
-
- with open(path, 'wb') as new_file:
- chunk_size = 1024
- while True:
- data = response.read(chunk_size)
- if not data:
- break
- new_file.write(data)
- # release_conn is needed for streaming connections only
- response.release_conn()
- new_file = open(path, 'rb')
- return new_file
+ elif isinstance(obj, dict):
+ obj_dict = obj
else:
- return response.data
+ # Convert model obj to dict except
+ # attributes `openapi_types`, `attribute_map`
+ # and attributes which value is not None.
+ # Convert attribute name to json key in
+ # model definition for request.
+ if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')):
+ obj_dict = obj.to_dict()
+ else:
+ obj_dict = obj.__dict__
- @staticmethod
- def __deserialize_multipart_form_data(
- response: urllib3.HTTPResponse
- ) -> typing.Dict[str, typing.Any]:
- msg = email.message_from_bytes(response.data)
return {
- part.get_param("name", header="Content-Disposition"): part.get_payload(
- decode=True
- ).decode(part.get_content_charset())
- if part.get_content_charset()
- else part.get_payload()
- for part in msg.get_payload()
+ key: self.sanitize_for_serialization(val)
+ for key, val in obj_dict.items()
}
- def deserialize(self, response: urllib3.HTTPResponse, configuration: Configuration) -> ApiResponse:
- content_type = response.getheader('content-type')
- deserialized_body = unset
- streamed = response.supports_chunked_reads()
-
- deserialized_headers = unset
- if self.headers is not None:
- # TODO add header deserialiation here
- pass
-
- if self.content is not None:
- if content_type not in self.content:
- raise ApiValueError(
- f"Invalid content_type returned. Content_type='{content_type}' was returned "
- f"when only {str(set(self.content))} are defined for status_code={str(response.status)}"
- )
- body_schema = self.content[content_type].schema
- if body_schema is None:
- # some specs do not define response content media type schemas
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=unset
- )
+ def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]):
+ """Deserializes response into an object.
+
+ :param response: RESTResponse object to be deserialized.
+ :param response_type: class literal for
+ deserialized object, or string of class name.
+ :param content_type: content type of response.
+
+ :return: deserialized object.
+ """
- if self._content_type_is_json(content_type):
- body_data = self.__deserialize_json(response)
- elif content_type == 'application/octet-stream':
- body_data = self.__deserialize_application_octet_stream(response)
- elif content_type.startswith('multipart/form-data'):
- body_data = self.__deserialize_multipart_form_data(response)
- content_type = 'multipart/form-data'
+ # fetch data from response object
+ if content_type is None:
+ try:
+ data = json.loads(response_text)
+ except ValueError:
+ data = response_text
+ elif content_type.startswith("application/json"):
+ if response_text == "":
+ data = ""
else:
- raise NotImplementedError('Deserialization of {} has not yet been implemented'.format(content_type))
- deserialized_body = body_schema.from_openapi_data_oapg(
- body_data, _configuration=configuration)
- elif streamed:
- response.release_conn()
-
- return self.response_cls(
- response=response,
- headers=deserialized_headers,
- body=deserialized_body
- )
+ data = json.loads(response_text)
+ elif content_type.startswith("text/plain"):
+ data = response_text
+ else:
+ raise ApiException(
+ status=0,
+ reason="Unsupported content type: {0}".format(content_type)
+ )
+ return self.__deserialize(data, response_type)
-class ApiClient:
- """Generic API client for OpenAPI client library builds.
+ def __deserialize(self, data, klass):
+ """Deserializes dict, list, str into an object.
- OpenAPI generic API client. This client handles the client-
- server communication, and is invariant across implementations. Specifics of
- the methods and models for each application are generated from the OpenAPI
- templates.
+ :param data: dict, list or str.
+ :param klass: class literal, or string of class name.
- NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
+ :return: object.
+ """
+ if data is None:
+ return None
- :param configuration: .Configuration object for this client
- :param header_name: a header to pass when making calls to the API.
- :param header_value: a header value to pass when making calls to
- the API.
- :param cookie: a cookie to include in the header when making calls
- to the API
- :param pool_threads: The number of threads to use for async requests
- to the API. More threads means more concurrent API requests.
- """
+ if isinstance(klass, str):
+ if klass.startswith('List['):
+ m = re.match(r'List\[(.*)]', klass)
+ assert m is not None, "Malformed List type definition"
+ sub_kls = m.group(1)
+ return [self.__deserialize(sub_data, sub_kls)
+ for sub_data in data]
+
+ if klass.startswith('Dict['):
+ m = re.match(r'Dict\[([^,]*), (.*)]', klass)
+ assert m is not None, "Malformed Dict type definition"
+ sub_kls = m.group(2)
+ return {k: self.__deserialize(v, sub_kls)
+ for k, v in data.items()}
+
+ # convert str to class
+ if klass in self.NATIVE_TYPES_MAPPING:
+ klass = self.NATIVE_TYPES_MAPPING[klass]
+ else:
+ klass = getattr(cloudharness_cli.workflows.models, klass)
+
+ if klass in self.PRIMITIVE_TYPES:
+ return self.__deserialize_primitive(data, klass)
+ elif klass == object:
+ return self.__deserialize_object(data)
+ elif klass == datetime.date:
+ return self.__deserialize_date(data)
+ elif klass == datetime.datetime:
+ return self.__deserialize_datetime(data)
+ elif issubclass(klass, Enum):
+ return self.__deserialize_enum(data, klass)
+ else:
+ return self.__deserialize_model(data, klass)
- _pool = None
+ def parameters_to_tuples(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __init__(
- self,
- configuration: typing.Optional[Configuration] = None,
- header_name: typing.Optional[str] = None,
- header_value: typing.Optional[str] = None,
- cookie: typing.Optional[str] = None,
- pool_threads: int = 1
- ):
- if configuration is None:
- configuration = Configuration()
- self.configuration = configuration
- self.pool_threads = pool_threads
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: Parameters as list of tuples, collections formatted
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, value) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(str(value) for value in v)))
+ else:
+ new_params.append((k, v))
+ return new_params
- self.rest_client = rest.RESTClientObject(configuration)
- self.default_headers = HTTPHeaderDict()
- if header_name is not None:
- self.default_headers[header_name] = header_value
- self.cookie = cookie
- # Set default User-Agent.
- self.user_agent = 'OpenAPI-Generator/1.0.0/python'
+ def parameters_to_url_query(self, params, collection_formats):
+ """Get parameters as list of tuples, formatting collections.
- def __enter__(self):
- return self
+ :param params: Parameters as dict or list of two-tuples
+ :param dict collection_formats: Parameter collection formats
+ :return: URL query string (e.g. a=Hello%20World&b=123)
+ """
+ new_params: List[Tuple[str, str]] = []
+ if collection_formats is None:
+ collection_formats = {}
+ for k, v in params.items() if isinstance(params, dict) else params:
+ if isinstance(v, bool):
+ v = str(v).lower()
+ if isinstance(v, (int, float)):
+ v = str(v)
+ if isinstance(v, dict):
+ v = json.dumps(v)
+
+ if k in collection_formats:
+ collection_format = collection_formats[k]
+ if collection_format == 'multi':
+ new_params.extend((k, str(value)) for value in v)
+ else:
+ if collection_format == 'ssv':
+ delimiter = ' '
+ elif collection_format == 'tsv':
+ delimiter = '\t'
+ elif collection_format == 'pipes':
+ delimiter = '|'
+ else: # csv is the default
+ delimiter = ','
+ new_params.append(
+ (k, delimiter.join(quote(str(value)) for value in v))
+ )
+ else:
+ new_params.append((k, quote(str(v))))
- def __exit__(self, exc_type, exc_value, traceback):
- self.close()
+ return "&".join(["=".join(map(str, item)) for item in new_params])
- def close(self):
- if self._pool:
- self._pool.close()
- self._pool.join()
- self._pool = None
- if hasattr(atexit, 'unregister'):
- atexit.unregister(self.close)
+ def files_parameters(self, files: Dict[str, Union[str, bytes]]):
+ """Builds form parameters.
- @property
- def pool(self):
- """Create thread pool on first request
- avoids instantiating unused threadpool for blocking clients.
+ :param files: File parameters.
+ :return: Form parameters with files.
"""
- if self._pool is None:
- atexit.register(self.close)
- self._pool = ThreadPool(self.pool_threads)
- return self._pool
-
- @property
- def user_agent(self):
- """User agent for this API client"""
- return self.default_headers['User-Agent']
+ params = []
+ for k, v in files.items():
+ if isinstance(v, str):
+ with open(v, 'rb') as f:
+ filename = os.path.basename(f.name)
+ filedata = f.read()
+ elif isinstance(v, bytes):
+ filename = k
+ filedata = v
+ else:
+ raise ValueError("Unsupported file value")
+ mimetype = (
+ mimetypes.guess_type(filename)[0]
+ or 'application/octet-stream'
+ )
+ params.append(
+ tuple([k, tuple([filename, filedata, mimetype])])
+ )
+ return params
- @user_agent.setter
- def user_agent(self, value):
- self.default_headers['User-Agent'] = value
+ def select_header_accept(self, accepts: List[str]) -> Optional[str]:
+ """Returns `Accept` based on an array of accepts provided.
- def set_default_header(self, header_name, header_value):
- self.default_headers[header_name] = header_value
+ :param accepts: List of headers.
+ :return: Accept (e.g. application/json).
+ """
+ if not accepts:
+ return None
- def __call_api(
- self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
+ for accept in accepts:
+ if re.search('json', accept, re.IGNORECASE):
+ return accept
- # header parameters
- used_headers = HTTPHeaderDict(self.default_headers)
- if self.cookie:
- headers['Cookie'] = self.cookie
+ return accepts[0]
- # auth setting
- self.update_params_for_auth(used_headers,
- auth_settings, resource_path, method, body)
+ def select_header_content_type(self, content_types):
+ """Returns `Content-Type` based on an array of content_types provided.
- # must happen after cookie setting and auth setting in case user is overriding those
- if headers:
- used_headers.update(headers)
+ :param content_types: List of content-types.
+ :return: Content-Type (e.g. application/json).
+ """
+ if not content_types:
+ return None
- # request url
- if host is None:
- url = self.configuration.host + resource_path
- else:
- # use server/host defined in path or operation instead
- url = host + resource_path
+ for content_type in content_types:
+ if re.search('json', content_type, re.IGNORECASE):
+ return content_type
- # perform request and return response
- response = self.request(
- method,
- url,
- headers=used_headers,
- fields=fields,
- body=body,
- stream=stream,
- timeout=timeout,
- )
- return response
+ return content_types[0]
- def call_api(
+ def update_params_for_auth(
self,
- resource_path: str,
- method: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- auth_settings: typing.Optional[typing.List[str]] = None,
- async_req: typing.Optional[bool] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- host: typing.Optional[str] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request (synchronous) and returns deserialized data.
-
- To make an async_req request, set the async_req parameter.
+ headers,
+ queries,
+ auth_settings,
+ resource_path,
+ method,
+ body,
+ request_auth=None
+ ) -> None:
+ """Updates header and query params based on authentication setting.
- :param resource_path: Path to method endpoint.
- :param method: Method to call.
- :param headers: Header parameters to be
- placed in the request header.
- :param body: Request body.
- :param fields: Request post form parameters,
- for `application/x-www-form-urlencoded`, `multipart/form-data`.
- :param auth_settings: Auth Settings names for the request.
- :param async_req: execute request asynchronously
- :type async_req: bool, optional TODO remove, unused
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Also when True, if the openapi spec describes a file download,
- the data will be written to a local filesystme file and the BinarySchema
- instance will also inherit from FileSchema and FileIO
- Default is False.
- :type stream: bool, optional
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
- :param host: api endpoint host
- :return:
- If async_req parameter is True,
- the request will be called asynchronously.
- The method will return the request thread.
- If parameter async_req is False or missing,
- then the method will return the response directly.
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :param auth_settings: Authentication setting identifiers list.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param request_auth: if set, the provided settings will
+ override the token in the configuration.
"""
+ if not auth_settings:
+ return
- if not async_req:
- return self.__call_api(
- resource_path,
- method,
+ if request_auth:
+ self._apply_auth_params(
headers,
- body,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
- )
-
- return self.pool.apply_async(
- self.__call_api,
- (
+ queries,
resource_path,
method,
- headers,
body,
- json,
- fields,
- auth_settings,
- stream,
- timeout,
- host,
+ request_auth
)
- )
-
- def request(
+ else:
+ for auth in auth_settings:
+ auth_setting = self.configuration.auth_settings().get(auth)
+ if auth_setting:
+ self._apply_auth_params(
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ )
+
+ def _apply_auth_params(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, str], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
- """Makes the HTTP request using RESTClient."""
- if method == "GET":
- return self.rest_client.GET(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "HEAD":
- return self.rest_client.HEAD(url,
- stream=stream,
- timeout=timeout,
- headers=headers)
- elif method == "OPTIONS":
- return self.rest_client.OPTIONS(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "POST":
- return self.rest_client.POST(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PUT":
- return self.rest_client.PUT(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "PATCH":
- return self.rest_client.PATCH(url,
- headers=headers,
- fields=fields,
- stream=stream,
- timeout=timeout,
- body=body)
- elif method == "DELETE":
- return self.rest_client.DELETE(url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body)
+ headers,
+ queries,
+ resource_path,
+ method,
+ body,
+ auth_setting
+ ) -> None:
+ """Updates the request parameters based on a single auth_setting
+
+ :param headers: Header parameters dict to be updated.
+ :param queries: Query parameters tuple list to be updated.
+ :resource_path: A string representation of the HTTP request resource path.
+ :method: A string representation of the HTTP request method.
+ :body: A object representing the body of the HTTP request.
+ The object type is the return value of sanitize_for_serialization().
+ :param auth_setting: auth settings for the endpoint
+ """
+ if auth_setting['in'] == 'cookie':
+ headers['Cookie'] = auth_setting['value']
+ elif auth_setting['in'] == 'header':
+ if auth_setting['type'] != 'http-signature':
+ headers[auth_setting['key']] = auth_setting['value']
+ elif auth_setting['in'] == 'query':
+ queries.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
- "http method must be `GET`, `HEAD`, `OPTIONS`,"
- " `POST`, `PATCH`, `PUT` or `DELETE`."
+ 'Authentication token must be in `query` or `header`'
)
- def update_params_for_auth(self, headers, auth_settings,
- resource_path, method, body):
- """Updates header and query params based on authentication setting.
+ def __deserialize_file(self, response):
+ """Deserializes body to file
- :param headers: Header parameters dict to be updated.
- :param auth_settings: Authentication setting identifiers list.
- :param resource_path: A string representation of the HTTP request resource path.
- :param method: A string representation of the HTTP request method.
- :param body: A object representing the body of the HTTP request.
- The object type is the return value of _encoder.default().
- """
- if not auth_settings:
- return
+ Saves response body into a file in a temporary folder,
+ using the filename from the `Content-Disposition` header if provided.
- for auth in auth_settings:
- auth_setting = self.configuration.auth_settings().get(auth)
- if not auth_setting:
- continue
- if auth_setting['in'] == 'cookie':
- headers.add('Cookie', auth_setting['value'])
- elif auth_setting['in'] == 'header':
- if auth_setting['type'] != 'http-signature':
- headers.add(auth_setting['key'], auth_setting['value'])
- elif auth_setting['in'] == 'query':
- """ TODO implement auth in query
- need to pass in prefix_separator_iterator
- and need to output resource_path with query params added
- """
- raise ApiValueError("Auth in query not yet implemented")
- else:
- raise ApiValueError(
- 'Authentication token must be in `query` or `header`'
- )
+ handle file downloading
+ save response body into a tmp file and return the instance
+ :param response: RESTResponse.
+ :return: file path.
+ """
+ fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
+ os.close(fd)
+ os.remove(path)
+
+ content_disposition = response.getheader("Content-Disposition")
+ if content_disposition:
+ m = re.search(
+ r'filename=[\'"]?([^\'"\s]+)[\'"]?',
+ content_disposition
+ )
+ assert m is not None, "Unexpected 'content-disposition' header value"
+ filename = m.group(1)
+ path = os.path.join(os.path.dirname(path), filename)
-class Api:
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
+ with open(path, "wb") as f:
+ f.write(response.data)
- Do not edit the class manually.
- """
+ return path
+
+ def __deserialize_primitive(self, data, klass):
+ """Deserializes string to primitive type.
- def __init__(self, api_client: typing.Optional[ApiClient] = None):
- if api_client is None:
- api_client = ApiClient()
- self.api_client = api_client
+ :param data: str.
+ :param klass: class literal.
- @staticmethod
- def _verify_typed_dict_inputs_oapg(cls: typing.Type[typing_extensions.TypedDict], data: typing.Dict[str, typing.Any]):
+ :return: int, long, float, str, bool.
"""
- Ensures that:
- - required keys are present
- - additional properties are not input
- - value stored under required keys do not have the value unset
- Note: detailed value checking is done in schema classes
+ try:
+ return klass(data)
+ except UnicodeEncodeError:
+ return str(data)
+ except TypeError:
+ return data
+
+ def __deserialize_object(self, value):
+ """Return an original value.
+
+ :return: object.
"""
- missing_required_keys = []
- required_keys_with_unset_values = []
- for required_key in cls.__required_keys__:
- if required_key not in data:
- missing_required_keys.append(required_key)
- continue
- value = data[required_key]
- if value is unset:
- required_keys_with_unset_values.append(required_key)
- if missing_required_keys:
- raise ApiTypeError(
- '{} missing {} required arguments: {}'.format(
- cls.__name__, len(missing_required_keys), missing_required_keys
- )
- )
- if required_keys_with_unset_values:
- raise ApiValueError(
- '{} contains invalid unset values for {} required keys: {}'.format(
- cls.__name__, len(required_keys_with_unset_values), required_keys_with_unset_values
- )
- )
+ return value
- disallowed_additional_keys = []
- for key in data:
- if key in cls.__required_keys__ or key in cls.__optional_keys__:
- continue
- disallowed_additional_keys.append(key)
- if disallowed_additional_keys:
- raise ApiTypeError(
- '{} got {} unexpected keyword arguments: {}'.format(
- cls.__name__, len(disallowed_additional_keys), disallowed_additional_keys
- )
- )
+ def __deserialize_date(self, string):
+ """Deserializes string to date.
- def _get_host_oapg(
- self,
- operation_id: str,
- servers: typing.Tuple[typing.Dict[str, str], ...] = tuple(),
- host_index: typing.Optional[int] = None
- ) -> typing.Optional[str]:
- configuration = self.api_client.configuration
+ :param string: str.
+ :return: date.
+ """
try:
- if host_index is None:
- index = configuration.server_operation_index.get(
- operation_id, configuration.server_index
- )
- else:
- index = host_index
- server_variables = configuration.server_operation_variables.get(
- operation_id, configuration.server_variables
- )
- host = configuration.get_host_from_settings(
- index, variables=server_variables, servers=servers
+ return parse(string).date()
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason="Failed to parse `{0}` as date object".format(string)
)
- except IndexError:
- if servers:
- raise ApiValueError(
- "Invalid host index. Must be 0 <= index < %s" %
- len(servers)
- )
- host = None
- return host
-
-
-class SerializedRequestBody(typing_extensions.TypedDict, total=False):
- body: typing.Union[str, bytes]
- fields: typing.Tuple[typing.Union[RequestField, typing.Tuple[str, str]], ...]
+ def __deserialize_datetime(self, string):
+ """Deserializes string to datetime.
-class RequestBody(StyleFormSerializer, JSONDetector):
- """
- A request body parameter
- content: content_type to MediaType Schema info
- """
- __json_encoder = JSONEncoder()
+ The string should be in iso8601 datetime format.
- def __init__(
- self,
- content: typing.Dict[str, MediaType],
- required: bool = False,
- ):
- self.required = required
- if len(content) == 0:
- raise ValueError('Invalid value for content, the content dict must have >= 1 entry')
- self.content = content
-
- def __serialize_json(
- self,
- in_data: typing.Any
- ) -> typing.Dict[str, bytes]:
- in_data = self.__json_encoder.default(in_data)
- json_str = json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode(
- "utf-8"
- )
- return dict(body=json_str)
-
- @staticmethod
- def __serialize_text_plain(in_data: typing.Any) -> typing.Dict[str, str]:
- if isinstance(in_data, frozendict.frozendict):
- raise ValueError('Unable to serialize type frozendict.frozendict to text/plain')
- elif isinstance(in_data, tuple):
- raise ValueError('Unable to serialize type tuple to text/plain')
- elif isinstance(in_data, NoneClass):
- raise ValueError('Unable to serialize type NoneClass to text/plain')
- elif isinstance(in_data, BoolClass):
- raise ValueError('Unable to serialize type BoolClass to text/plain')
- return dict(body=str(in_data))
-
- def __multipart_json_item(self, key: str, value: Schema) -> RequestField:
- json_value = self.__json_encoder.default(value)
- return RequestField(name=key, data=json.dumps(json_value), headers={'Content-Type': 'application/json'})
-
- def __multipart_form_item(self, key: str, value: Schema) -> RequestField:
- if isinstance(value, str):
- return RequestField(name=key, data=str(value), headers={'Content-Type': 'text/plain'})
- elif isinstance(value, bytes):
- return RequestField(name=key, data=value, headers={'Content-Type': 'application/octet-stream'})
- elif isinstance(value, FileIO):
- request_field = RequestField(
- name=key,
- data=value.read(),
- filename=os.path.basename(value.name),
- headers={'Content-Type': 'application/octet-stream'}
+ :param string: str.
+ :return: datetime.
+ """
+ try:
+ return parse(string)
+ except ImportError:
+ return string
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as datetime object"
+ .format(string)
+ )
)
- value.close()
- return request_field
- else:
- return self.__multipart_json_item(key=key, value=value)
- def __serialize_multipart_form_data(
- self, in_data: Schema
- ) -> typing.Dict[str, typing.Tuple[RequestField, ...]]:
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(f'Unable to serialize {in_data} to multipart/form-data because it is not a dict of data')
+ def __deserialize_enum(self, data, klass):
+ """Deserializes primitive type to enum.
+
+ :param data: primitive type.
+ :param klass: class literal.
+ :return: enum value.
"""
- In a multipart/form-data request body, each schema property, or each element of a schema array property,
- takes a section in the payload with an internal header as defined by RFC7578. The serialization strategy
- for each property of a multipart/form-data request body can be specified in an associated Encoding Object.
+ try:
+ return klass(data)
+ except ValueError:
+ raise rest.ApiException(
+ status=0,
+ reason=(
+ "Failed to parse `{0}` as `{1}`"
+ .format(data, klass)
+ )
+ )
- When passing in multipart types, boundaries MAY be used to separate sections of the content being
- transferred – thus, the following default Content-Types are defined for multipart:
+ def __deserialize_model(self, data, klass):
+ """Deserializes list or dict to model.
- If the (object) property is a primitive, or an array of primitive values, the default Content-Type is text/plain
- If the property is complex, or an array of complex values, the default Content-Type is application/json
- Question: how is the array of primitives encoded?
- If the property is a type: string with a contentEncoding, the default Content-Type is application/octet-stream
- """
- fields = []
- for key, value in in_data.items():
- if isinstance(value, tuple):
- if value:
- # values use explode = True, so the code makes a RequestField for each item with name=key
- for item in value:
- request_field = self.__multipart_form_item(key=key, value=item)
- fields.append(request_field)
- else:
- # send an empty array as json because exploding will not send it
- request_field = self.__multipart_json_item(key=key, value=value)
- fields.append(request_field)
- else:
- request_field = self.__multipart_form_item(key=key, value=value)
- fields.append(request_field)
-
- return dict(fields=tuple(fields))
-
- def __serialize_application_octet_stream(self, in_data: BinarySchema) -> typing.Dict[str, bytes]:
- if isinstance(in_data, bytes):
- return dict(body=in_data)
- # FileIO type
- result = dict(body=in_data.read())
- in_data.close()
- return result
-
- def __serialize_application_x_www_form_data(
- self, in_data: typing.Any
- ) -> SerializedRequestBody:
+ :param data: dict, list.
+ :param klass: class literal.
+ :return: model object.
"""
- POST submission of form data in body
- """
- if not isinstance(in_data, frozendict.frozendict):
- raise ValueError(
- f'Unable to serialize {in_data} to application/x-www-form-urlencoded because it is not a dict of data')
- cast_in_data = self.__json_encoder.default(in_data)
- value = self._serialize_form(cast_in_data, name='', explode=True, percent_encode=True)
- return dict(body=value)
-
- def serialize(
- self, in_data: typing.Any, content_type: str
- ) -> SerializedRequestBody:
- """
- If a str is returned then the result will be assigned to data when making the request
- If a tuple is returned then the result will be used as fields input in encode_multipart_formdata
- Return a tuple of
- The key of the return dict is
- - body for application/json
- - encode_multipart and fields for multipart/form-data
- """
- media_type = self.content[content_type]
- if isinstance(in_data, media_type.schema):
- cast_in_data = in_data
- elif isinstance(in_data, (dict, frozendict.frozendict)) and in_data:
- cast_in_data = media_type.schema(**in_data)
- else:
- cast_in_data = media_type.schema(in_data)
- # TODO check for and use encoding if it exists
- # and content_type is multipart or application/x-www-form-urlencoded
- if self._content_type_is_json(content_type):
- return self.__serialize_json(cast_in_data)
- elif content_type == 'text/plain':
- return self.__serialize_text_plain(cast_in_data)
- elif content_type == 'multipart/form-data':
- return self.__serialize_multipart_form_data(cast_in_data)
- elif content_type == 'application/x-www-form-urlencoded':
- return self.__serialize_application_x_www_form_data(cast_in_data)
- elif content_type == 'application/octet-stream':
- return self.__serialize_application_octet_stream(cast_in_data)
- raise NotImplementedError('Serialization has not yet been implemented for {}'.format(content_type))
+ return klass.from_dict(data)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_response.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_response.py
new file mode 100644
index 000000000..9bc7c11f6
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/api_response.py
@@ -0,0 +1,21 @@
+"""API response object."""
+
+from __future__ import annotations
+from typing import Optional, Generic, Mapping, TypeVar
+from pydantic import Field, StrictInt, StrictBytes, BaseModel
+
+T = TypeVar("T")
+
+class ApiResponse(BaseModel, Generic[T]):
+ """
+ API response object
+ """
+
+ status_code: StrictInt = Field(description="HTTP status code")
+ headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers")
+ data: T = Field(description="Deserialized data given the data type")
+ raw_data: StrictBytes = Field(description="Raw data (HTTP response body)")
+
+ model_config = {
+ "arbitrary_types_allowed": True
+ }
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/__init__.py
deleted file mode 100644
index 7840f7726..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints then import them from
-# tags, paths, or path_to_api, or tag_to_api
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/path_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/path_to_api.py
deleted file mode 100644
index c453deff7..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/path_to_api.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.workflows.paths import PathValues
-from cloudharness_cli.workflows.apis.paths.operations import Operations
-from cloudharness_cli.workflows.apis.paths.operations_name import OperationsName
-from cloudharness_cli.workflows.apis.paths.operations_name_logs import OperationsNameLogs
-
-PathToApi = typing_extensions.TypedDict(
- 'PathToApi',
- {
- PathValues.OPERATIONS: Operations,
- PathValues.OPERATIONS_NAME: OperationsName,
- PathValues.OPERATIONS_NAME_LOGS: OperationsNameLogs,
- }
-)
-
-path_to_api = PathToApi(
- {
- PathValues.OPERATIONS: Operations,
- PathValues.OPERATIONS_NAME: OperationsName,
- PathValues.OPERATIONS_NAME_LOGS: OperationsNameLogs,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/__init__.py
deleted file mode 100644
index e0642734f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.workflows.apis.path_to_api import path_to_api
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations.py
deleted file mode 100644
index 317fd596c..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.workflows.paths.operations.get import ApiForget
-
-
-class Operations(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name.py
deleted file mode 100644
index 698ea5884..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from cloudharness_cli.workflows.paths.operations_name.get import ApiForget
-from cloudharness_cli.workflows.paths.operations_name.delete import ApiFordelete
-
-
-class OperationsName(
- ApiForget,
- ApiFordelete,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name_logs.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name_logs.py
deleted file mode 100644
index beaf0d74e..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/paths/operations_name_logs.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from cloudharness_cli.workflows.paths.operations_name_logs.get import ApiForget
-
-
-class OperationsNameLogs(
- ApiForget,
-):
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tag_to_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tag_to_api.py
deleted file mode 100644
index 25c14c70c..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tag_to_api.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import typing_extensions
-
-from cloudharness_cli.workflows.apis.tags import TagValues
-from cloudharness_cli.workflows.apis.tags.create_and_access_api import CreateAndAccessApi
-
-TagToApi = typing_extensions.TypedDict(
- 'TagToApi',
- {
- TagValues.CREATE_AND_ACCESS: CreateAndAccessApi,
- }
-)
-
-tag_to_api = TagToApi(
- {
- TagValues.CREATE_AND_ACCESS: CreateAndAccessApi,
- }
-)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/__init__.py
deleted file mode 100644
index dfc112ae3..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.workflows.apis.tag_to_api import tag_to_api
-
-import enum
-
-
-class TagValues(str, enum.Enum):
- CREATE_AND_ACCESS = "Create and Access"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/create_and_access_api.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/create_and_access_api.py
deleted file mode 100644
index a1ffaffb5..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/apis/tags/create_and_access_api.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from cloudharness_cli.workflows.paths.operations_name.delete import DeleteOperation
-from cloudharness_cli.workflows.paths.operations_name.get import GetOperation
-from cloudharness_cli.workflows.paths.operations.get import ListOperations
-from cloudharness_cli.workflows.paths.operations_name_logs.get import LogOperation
-
-
-class CreateAndAccessApi(
- DeleteOperation,
- GetOperation,
- ListOperations,
- LogOperation,
-):
- """NOTE: This class is auto generated by OpenAPI Generator
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
- pass
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py
index 690e7a3c2..a82a00124 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py
@@ -3,70 +3,49 @@
"""
Workflows API
- Workflows API # noqa: E501
+ Workflows API
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
import copy
import logging
+from logging import FileHandler
import multiprocessing
import sys
+from typing import Optional
import urllib3
-from http import client as http_client
-from cloudharness_cli.workflows.exceptions import ApiValueError
-
+import http.client as httplib
JSON_SCHEMA_VALIDATION_KEYWORDS = {
'multipleOf', 'maximum', 'exclusiveMaximum',
'minimum', 'exclusiveMinimum', 'maxLength',
- 'minLength', 'pattern', 'maxItems', 'minItems',
- 'uniqueItems', 'maxProperties', 'minProperties',
+ 'minLength', 'pattern', 'maxItems', 'minItems'
}
-class Configuration(object):
- """NOTE: This class is auto generated by OpenAPI Generator
+class Configuration:
+ """This class contains various settings of the API client.
- Ref: https://openapi-generator.tech
- Do not edit the class manually.
-
- :param host: Base url
+ :param host: Base url.
+ :param ignore_operation_servers
+ Boolean to ignore operation servers for the API client.
+ Config will use `host` as the base url regardless of the operation servers.
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
- :param api_key_prefix: Dict to store API prefix (e.g. Bearer)
+ :param api_key_prefix: Dict to store API prefix (e.g. Bearer).
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
- :param username: Username for HTTP basic authentication
- :param password: Password for HTTP basic authentication
- :param discard_unknown_keys: Boolean value indicating whether to discard
- unknown properties. A server may send a response that includes additional
- properties that are not known by the client in the following scenarios:
- 1. The OpenAPI document is incomplete, i.e. it does not match the server
- implementation.
- 2. The client was generated using an older version of the OpenAPI document
- and the server has been upgraded since then.
- If a schema in the OpenAPI document defines the additionalProperties attribute,
- then all undeclared properties received by the server are injected into the
- additional properties map. In that case, there are undeclared properties, and
- nothing to discard.
- :param disabled_client_side_validations (string): Comma-separated list of
- JSON schema validation keywords to disable JSON schema structural validation
- rules. The following keywords may be specified: multipleOf, maximum,
- exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
- maxItems, minItems.
- By default, the validation is performed for data generated locally by the client
- and data received from the server, independent of any validation performed by
- the server side. If the input data does not satisfy the JSON schema validation
- rules specified in the OpenAPI document, an exception is raised.
- If disabled_client_side_validations is set, structural validation is
- disabled. This can be useful to troubleshoot data validation problem, such as
- when the OpenAPI document validation rules do not match the actual API data
- received by the server.
+ :param username: Username for HTTP basic authentication.
+ :param password: Password for HTTP basic authentication.
+ :param access_token: Access token.
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
@@ -75,7 +54,11 @@ class Configuration(object):
configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
- The validation of enums is performed for variables with defined enum values before.
+ The validation of enums is performed for variables with defined enum
+ values before.
+ :param ssl_ca_cert: str - the path to a file of concatenated CA certificates
+ in PEM format.
+ :param retries: Number of retries for API requests.
"""
@@ -84,14 +67,18 @@ class Configuration(object):
def __init__(self, host=None,
api_key=None, api_key_prefix=None,
username=None, password=None,
- discard_unknown_keys=False,
- disabled_client_side_validations="",
+ access_token=None,
server_index=None, server_variables=None,
server_operation_index=None, server_operation_variables=None,
- ):
+ ignore_operation_servers=False,
+ ssl_ca_cert=None,
+ retries=None,
+ *,
+ debug: Optional[bool] = None
+ ) -> None:
"""Constructor
"""
- self._base_path = "https://workflows.cloudharness.metacell.us/api" if host is None else host
+ self._base_path = "/api" if host is None else host
"""Default Base url
"""
self.server_index = 0 if server_index is None and host is None else server_index
@@ -102,6 +89,9 @@ def __init__(self, host=None,
self.server_operation_variables = server_operation_variables or {}
"""Default server variables
"""
+ self.ignore_operation_servers = ignore_operation_servers
+ """Ignore operation servers
+ """
self.temp_folder_path = None
"""Temp file folder for downloading files
"""
@@ -125,8 +115,9 @@ def __init__(self, host=None,
self.password = password
"""Password for HTTP basic authentication
"""
- self.discard_unknown_keys = discard_unknown_keys
- self.disabled_client_side_validations = disabled_client_side_validations
+ self.access_token = access_token
+ """Access token
+ """
self.logger = {}
"""Logging Settings
"""
@@ -138,13 +129,16 @@ def __init__(self, host=None,
self.logger_stream_handler = None
"""Log stream handler
"""
- self.logger_file_handler = None
+ self.logger_file_handler: Optional[FileHandler] = None
"""Log file handler
"""
self.logger_file = None
"""Debug file location
"""
- self.debug = False
+ if debug is not None:
+ self.debug = debug
+ else:
+ self.__debug = False
"""Debug switch
"""
@@ -153,7 +147,7 @@ def __init__(self, host=None,
Set this to false to skip verifying SSL certificate when calling API
from https server.
"""
- self.ssl_ca_cert = None
+ self.ssl_ca_cert = ssl_ca_cert
"""Set this to customize the certificate file to verify the peer.
"""
self.cert_file = None
@@ -165,6 +159,10 @@ def __init__(self, host=None,
self.assert_hostname = None
"""Set this to True/False to enable/disable SSL hostname verification.
"""
+ self.tls_server_name = None
+ """SSL/TLS Server Name Indication (SNI)
+ Set this to the SNI value expected by the server.
+ """
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
"""urllib3 connection pool's maximum number of connections saved
@@ -174,7 +172,7 @@ def __init__(self, host=None,
cpu_count * 5 is used as default value to increase performance.
"""
- self.proxy = None
+ self.proxy: Optional[str] = None
"""Proxy URL
"""
self.proxy_headers = None
@@ -183,14 +181,23 @@ def __init__(self, host=None,
self.safe_chars_for_path_param = ''
"""Safe chars for path_param
"""
- self.retries = None
+ self.retries = retries
"""Adding retries to override urllib3 default value 3
"""
# Enable client side validation
self.client_side_validation = True
- # Options to pass down to the underlying urllib3 socket
self.socket_options = None
+ """Options to pass down to the underlying urllib3 socket
+ """
+
+ self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z"
+ """datetime format
+ """
+
+ self.date_format = "%Y-%m-%d"
+ """date format
+ """
def __deepcopy__(self, memo):
cls = self.__class__
@@ -208,13 +215,6 @@ def __deepcopy__(self, memo):
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
- if name == 'disabled_client_side_validations':
- s = set(filter(None, value.split(',')))
- for v in s:
- if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
- raise ApiValueError(
- "Invalid keyword: '{0}''".format(v))
- self._disabled_client_side_validations = s
@classmethod
def set_default(cls, default):
@@ -225,21 +225,31 @@ def set_default(cls, default):
:param default: object of Configuration
"""
- cls._default = copy.deepcopy(default)
+ cls._default = default
@classmethod
def get_default_copy(cls):
- """Return new instance of configuration.
+ """Deprecated. Please use `get_default` instead.
+
+ Deprecated. Please use `get_default` instead.
+
+ :return: The configuration object.
+ """
+ return cls.get_default()
+
+ @classmethod
+ def get_default(cls):
+ """Return the default configuration.
This method returns newly created, based on default constructor,
object of Configuration class or returns a copy of default
- configuration passed by the set_default method.
+ configuration.
:return: The configuration object.
"""
- if cls._default is not None:
- return copy.deepcopy(cls._default)
- return Configuration()
+ if cls._default is None:
+ cls._default = Configuration()
+ return cls._default
@property
def logger_file(self):
@@ -293,15 +303,15 @@ def debug(self, value):
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
- # turn on http_client debug
- http_client.HTTPConnection.debuglevel = 1
+ # turn on httplib debug
+ httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
- # turn off http_client debug
- http_client.HTTPConnection.debuglevel = 0
+ # turn off httplib debug
+ httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
@@ -385,7 +395,7 @@ def get_host_settings(self):
"""
return [
{
- 'url': "https://workflows.cloudharness.metacell.us/api",
+ 'url': "/api",
'description': "Metacell host",
}
]
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py
index c870fefcd..df9d589f2 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py
@@ -3,13 +3,17 @@
"""
Workflows API
- Workflows API # noqa: E501
+ Workflows API
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+ Do not edit the class manually.
+""" # noqa: E501
+
+from typing import Any, Optional
+from typing_extensions import Self
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
@@ -17,7 +21,7 @@ class OpenApiException(Exception):
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
- key_type=None):
+ key_type=None) -> None:
""" Raises an exception for TypeErrors
Args:
@@ -45,7 +49,7 @@ def __init__(self, msg, path_to_item=None, valid_classes=None,
class ApiValueError(OpenApiException, ValueError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -63,7 +67,7 @@ def __init__(self, msg, path_to_item=None):
class ApiAttributeError(OpenApiException, AttributeError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Raised when an attribute reference or assignment fails.
@@ -82,7 +86,7 @@ def __init__(self, msg, path_to_item=None):
class ApiKeyError(OpenApiException, KeyError):
- def __init__(self, msg, path_to_item=None):
+ def __init__(self, msg, path_to_item=None) -> None:
"""
Args:
msg (str): the exception message
@@ -100,17 +104,56 @@ def __init__(self, msg, path_to_item=None):
class ApiException(OpenApiException):
- def __init__(self, status=None, reason=None, api_response: 'cloudharness_cli.workflows.api_client.ApiResponse' = None):
- if api_response:
- self.status = api_response.response.status
- self.reason = api_response.response.reason
- self.body = api_response.response.data
- self.headers = api_response.response.getheaders()
- else:
- self.status = status
- self.reason = reason
- self.body = None
- self.headers = None
+ def __init__(
+ self,
+ status=None,
+ reason=None,
+ http_resp=None,
+ *,
+ body: Optional[str] = None,
+ data: Optional[Any] = None,
+ ) -> None:
+ self.status = status
+ self.reason = reason
+ self.body = body
+ self.data = data
+ self.headers = None
+
+ if http_resp:
+ if self.status is None:
+ self.status = http_resp.status
+ if self.reason is None:
+ self.reason = http_resp.reason
+ if self.body is None:
+ try:
+ self.body = http_resp.data.decode('utf-8')
+ except Exception:
+ pass
+ self.headers = http_resp.getheaders()
+
+ @classmethod
+ def from_response(
+ cls,
+ *,
+ http_resp,
+ body: Optional[str],
+ data: Optional[Any],
+ ) -> Self:
+ if http_resp.status == 400:
+ raise BadRequestException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 401:
+ raise UnauthorizedException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 403:
+ raise ForbiddenException(http_resp=http_resp, body=body, data=data)
+
+ if http_resp.status == 404:
+ raise NotFoundException(http_resp=http_resp, body=body, data=data)
+
+ if 500 <= http_resp.status <= 599:
+ raise ServiceException(http_resp=http_resp, body=body, data=data)
+ raise ApiException(http_resp=http_resp, body=body, data=data)
def __str__(self):
"""Custom error messages for exception"""
@@ -120,12 +163,32 @@ def __str__(self):
error_message += "HTTP response headers: {0}\n".format(
self.headers)
- if self.body:
- error_message += "HTTP response body: {0}\n".format(self.body)
+ if self.data or self.body:
+ error_message += "HTTP response body: {0}\n".format(self.data or self.body)
return error_message
+class BadRequestException(ApiException):
+ pass
+
+
+class NotFoundException(ApiException):
+ pass
+
+
+class UnauthorizedException(ApiException):
+ pass
+
+
+class ForbiddenException(ApiException):
+ pass
+
+
+class ServiceException(ApiException):
+ pass
+
+
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/__init__.py
deleted file mode 100644
index c138ded74..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# we can not import model classes here because that would create a circular
-# reference which would not work in python2
-# do not import all models into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all models from one package, import them with
-# from cloudharness_cli.workflows.models import ModelA, ModelB
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.py
deleted file mode 100644
index 5013d4a43..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class Operation(
- schemas.AnyTypeSchema,
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- represents the status of a distributed API call
- """
-
-
- class MetaOapg:
-
- class properties:
- message = schemas.StrSchema
- name = schemas.StrSchema
- createTime = schemas.DateTimeSchema
-
- @staticmethod
- def status() -> typing.Type['OperationStatus']:
- return OperationStatus
- workflow = schemas.StrSchema
- __annotations__ = {
- "message": message,
- "name": name,
- "createTime": createTime,
- "status": status,
- "workflow": workflow,
- }
-
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["message"]) -> MetaOapg.properties.message: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["createTime"]) -> MetaOapg.properties.createTime: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["status"]) -> 'OperationStatus': ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["workflow"]) -> MetaOapg.properties.workflow: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["message", "name", "createTime", "status", "workflow", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["message"]) -> typing.Union[MetaOapg.properties.message, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["createTime"]) -> typing.Union[MetaOapg.properties.createTime, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["status"]) -> typing.Union['OperationStatus', schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["workflow"]) -> typing.Union[MetaOapg.properties.workflow, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["message", "name", "createTime", "status", "workflow", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, ],
- message: typing.Union[MetaOapg.properties.message, str, schemas.Unset] = schemas.unset,
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- createTime: typing.Union[MetaOapg.properties.createTime, str, datetime, schemas.Unset] = schemas.unset,
- status: typing.Union['OperationStatus', schemas.Unset] = schemas.unset,
- workflow: typing.Union[MetaOapg.properties.workflow, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'Operation':
- return super().__new__(
- cls,
- *args,
- message=message,
- name=name,
- createTime=createTime,
- status=status,
- workflow=workflow,
- _configuration=_configuration,
- **kwargs,
- )
-
-from cloudharness_cli/workflows.model.operation_status import OperationStatus
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.pyi
deleted file mode 100644
index 5013d4a43..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation.pyi
+++ /dev/null
@@ -1,127 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class Operation(
- schemas.AnyTypeSchema,
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- represents the status of a distributed API call
- """
-
-
- class MetaOapg:
-
- class properties:
- message = schemas.StrSchema
- name = schemas.StrSchema
- createTime = schemas.DateTimeSchema
-
- @staticmethod
- def status() -> typing.Type['OperationStatus']:
- return OperationStatus
- workflow = schemas.StrSchema
- __annotations__ = {
- "message": message,
- "name": name,
- "createTime": createTime,
- "status": status,
- "workflow": workflow,
- }
-
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["message"]) -> MetaOapg.properties.message: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["name"]) -> MetaOapg.properties.name: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["createTime"]) -> MetaOapg.properties.createTime: ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["status"]) -> 'OperationStatus': ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["workflow"]) -> MetaOapg.properties.workflow: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["message", "name", "createTime", "status", "workflow", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["message"]) -> typing.Union[MetaOapg.properties.message, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["name"]) -> typing.Union[MetaOapg.properties.name, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["createTime"]) -> typing.Union[MetaOapg.properties.createTime, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["status"]) -> typing.Union['OperationStatus', schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["workflow"]) -> typing.Union[MetaOapg.properties.workflow, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["message", "name", "createTime", "status", "workflow", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, ],
- message: typing.Union[MetaOapg.properties.message, str, schemas.Unset] = schemas.unset,
- name: typing.Union[MetaOapg.properties.name, str, schemas.Unset] = schemas.unset,
- createTime: typing.Union[MetaOapg.properties.createTime, str, datetime, schemas.Unset] = schemas.unset,
- status: typing.Union['OperationStatus', schemas.Unset] = schemas.unset,
- workflow: typing.Union[MetaOapg.properties.workflow, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'Operation':
- return super().__new__(
- cls,
- *args,
- message=message,
- name=name,
- createTime=createTime,
- status=status,
- workflow=workflow,
- _configuration=_configuration,
- **kwargs,
- )
-
-from cloudharness_cli/workflows.model.operation_status import OperationStatus
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.py
deleted file mode 100644
index 102943395..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class OperationSearchResult(
- schemas.AnyTypeSchema,
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- a list of operations with meta data about the result
- """
-
-
- class MetaOapg:
-
- class properties:
-
- @staticmethod
- def meta() -> typing.Type['SearchResultData']:
- return SearchResultData
-
-
- class items(
- schemas.ListSchema
- ):
-
-
- class MetaOapg:
-
- @staticmethod
- def items() -> typing.Type['Operation']:
- return Operation
-
- def __new__(
- cls,
- arg: typing.Union[typing.Tuple['Operation'], typing.List['Operation']],
- _configuration: typing.Optional[schemas.Configuration] = None,
- ) -> 'items':
- return super().__new__(
- cls,
- arg,
- _configuration=_configuration,
- )
-
- def __getitem__(self, i: int) -> 'Operation':
- return super().__getitem__(i)
- __annotations__ = {
- "meta": meta,
- "items": items,
- }
-
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["meta"]) -> 'SearchResultData': ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["items"]) -> MetaOapg.properties.items: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["meta", "items", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["meta"]) -> typing.Union['SearchResultData', schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["items"]) -> typing.Union[MetaOapg.properties.items, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["meta", "items", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, ],
- meta: typing.Union['SearchResultData', schemas.Unset] = schemas.unset,
- items: typing.Union[MetaOapg.properties.items, list, tuple, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'OperationSearchResult':
- return super().__new__(
- cls,
- *args,
- meta=meta,
- items=items,
- _configuration=_configuration,
- **kwargs,
- )
-
-from cloudharness_cli/workflows.model.operation import Operation
-from cloudharness_cli/workflows.model.search_result_data import SearchResultData
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.pyi
deleted file mode 100644
index 102943395..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_search_result.pyi
+++ /dev/null
@@ -1,123 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class OperationSearchResult(
- schemas.AnyTypeSchema,
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- a list of operations with meta data about the result
- """
-
-
- class MetaOapg:
-
- class properties:
-
- @staticmethod
- def meta() -> typing.Type['SearchResultData']:
- return SearchResultData
-
-
- class items(
- schemas.ListSchema
- ):
-
-
- class MetaOapg:
-
- @staticmethod
- def items() -> typing.Type['Operation']:
- return Operation
-
- def __new__(
- cls,
- arg: typing.Union[typing.Tuple['Operation'], typing.List['Operation']],
- _configuration: typing.Optional[schemas.Configuration] = None,
- ) -> 'items':
- return super().__new__(
- cls,
- arg,
- _configuration=_configuration,
- )
-
- def __getitem__(self, i: int) -> 'Operation':
- return super().__getitem__(i)
- __annotations__ = {
- "meta": meta,
- "items": items,
- }
-
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["meta"]) -> 'SearchResultData': ...
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["items"]) -> MetaOapg.properties.items: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["meta", "items", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["meta"]) -> typing.Union['SearchResultData', schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["items"]) -> typing.Union[MetaOapg.properties.items, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["meta", "items", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, ],
- meta: typing.Union['SearchResultData', schemas.Unset] = schemas.unset,
- items: typing.Union[MetaOapg.properties.items, list, tuple, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'OperationSearchResult':
- return super().__new__(
- cls,
- *args,
- meta=meta,
- items=items,
- _configuration=_configuration,
- **kwargs,
- )
-
-from cloudharness_cli/workflows.model.operation import Operation
-from cloudharness_cli/workflows.model.search_result_data import SearchResultData
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.py
deleted file mode 100644
index f13417b3e..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class OperationStatus(
- schemas.EnumBase,
- schemas.StrSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
-
- class MetaOapg:
- enum_value_to_name = {
- "Pending": "PENDING",
- "Running": "RUNNING",
- "Error": "ERROR",
- "Succeeded": "SUCCEEDED",
- "Skipped": "SKIPPED",
- "Failed": "FAILED",
- }
-
- @schemas.classproperty
- def PENDING(cls):
- return cls("Pending")
-
- @schemas.classproperty
- def RUNNING(cls):
- return cls("Running")
-
- @schemas.classproperty
- def ERROR(cls):
- return cls("Error")
-
- @schemas.classproperty
- def SUCCEEDED(cls):
- return cls("Succeeded")
-
- @schemas.classproperty
- def SKIPPED(cls):
- return cls("Skipped")
-
- @schemas.classproperty
- def FAILED(cls):
- return cls("Failed")
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.pyi
deleted file mode 100644
index 168ddaf89..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/operation_status.pyi
+++ /dev/null
@@ -1,59 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class OperationStatus(
- schemas.EnumBase,
- schemas.StrSchema
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
- """
-
- @schemas.classproperty
- def PENDING(cls):
- return cls("Pending")
-
- @schemas.classproperty
- def RUNNING(cls):
- return cls("Running")
-
- @schemas.classproperty
- def ERROR(cls):
- return cls("Error")
-
- @schemas.classproperty
- def SUCCEEDED(cls):
- return cls("Succeeded")
-
- @schemas.classproperty
- def SKIPPED(cls):
- return cls("Skipped")
-
- @schemas.classproperty
- def FAILED(cls):
- return cls("Failed")
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.py
deleted file mode 100644
index c446f3de9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class SearchResultData(
- schemas.AnyTypeSchema,
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- describes a search
- """
-
-
- class MetaOapg:
-
- class properties:
- continueToken = schemas.StrSchema
- __annotations__ = {
- "continueToken": continueToken,
- }
-
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["continueToken"]) -> MetaOapg.properties.continueToken: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["continueToken", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["continueToken"]) -> typing.Union[MetaOapg.properties.continueToken, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["continueToken", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, ],
- continueToken: typing.Union[MetaOapg.properties.continueToken, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'SearchResultData':
- return super().__new__(
- cls,
- *args,
- continueToken=continueToken,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.pyi
deleted file mode 100644
index c446f3de9..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/model/search_result_data.pyi
+++ /dev/null
@@ -1,82 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-
-class SearchResultData(
- schemas.AnyTypeSchema,
-):
- """NOTE: This class is auto generated by OpenAPI Generator.
- Ref: https://openapi-generator.tech
-
- Do not edit the class manually.
-
- describes a search
- """
-
-
- class MetaOapg:
-
- class properties:
- continueToken = schemas.StrSchema
- __annotations__ = {
- "continueToken": continueToken,
- }
-
-
- @typing.overload
- def __getitem__(self, name: typing_extensions.Literal["continueToken"]) -> MetaOapg.properties.continueToken: ...
-
- @typing.overload
- def __getitem__(self, name: str) -> schemas.UnsetAnyTypeSchema: ...
-
- def __getitem__(self, name: typing.Union[typing_extensions.Literal["continueToken", ], str]):
- # dict_instance[name] accessor
- return super().__getitem__(name)
-
-
- @typing.overload
- def get_item_oapg(self, name: typing_extensions.Literal["continueToken"]) -> typing.Union[MetaOapg.properties.continueToken, schemas.Unset]: ...
-
- @typing.overload
- def get_item_oapg(self, name: str) -> typing.Union[schemas.UnsetAnyTypeSchema, schemas.Unset]: ...
-
- def get_item_oapg(self, name: typing.Union[typing_extensions.Literal["continueToken", ], str]):
- return super().get_item_oapg(name)
-
-
- def __new__(
- cls,
- *args: typing.Union[dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, ],
- continueToken: typing.Union[MetaOapg.properties.continueToken, str, schemas.Unset] = schemas.unset,
- _configuration: typing.Optional[schemas.Configuration] = None,
- **kwargs: typing.Union[schemas.AnyTypeSchema, dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, None, list, tuple, bytes],
- ) -> 'SearchResultData':
- return super().__new__(
- cls,
- *args,
- continueToken=continueToken,
- _configuration=_configuration,
- **kwargs,
- )
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py
index 966c20a09..c8e68a3cf 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py
@@ -1,17 +1,21 @@
# coding: utf-8
# flake8: noqa
+"""
+ Workflows API
-# import all models into this package
-# if you have many models here with many references from one model to another this may
-# raise a RecursionError
-# to avoid this, import only the models that you directly need like:
-# from from cloudharness_cli.workflows.model.pet import Pet
-# or import this package, but before doing it, use:
-# import sys
-# sys.setrecursionlimit(n)
+ Workflows API
-from cloudharness_cli.workflows.model.operation import Operation
-from cloudharness_cli.workflows.model.operation_search_result import OperationSearchResult
-from cloudharness_cli.workflows.model.operation_status import OperationStatus
-from cloudharness_cli.workflows.model.search_result_data import SearchResultData
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+# import models into model package
+from cloudharness_cli.workflows.models.operation import Operation
+from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult
+from cloudharness_cli.workflows.models.operation_status import OperationStatus
+from cloudharness_cli.workflows.models.search_result_data import SearchResultData
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py
new file mode 100644
index 000000000..d5055072e
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py
@@ -0,0 +1,100 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from datetime import datetime
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from cloudharness_cli.workflows.models.operation_status import OperationStatus
+from typing import Optional, Set
+from typing_extensions import Self
+
+class Operation(BaseModel):
+ """
+ represents the status of a distributed API call
+ """ # noqa: E501
+ message: Optional[StrictStr] = Field(default=None, description="usually set when an error occurred")
+ name: Optional[StrictStr] = Field(default=None, description="operation name")
+ create_time: Optional[datetime] = Field(default=None, alias="createTime")
+ status: Optional[OperationStatus] = OperationStatus.PENDING
+ workflow: Optional[StrictStr] = Field(default=None, description="low level representation as an Argo json")
+ __properties: ClassVar[List[str]] = ["message", "name", "createTime", "status", "workflow"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of Operation from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ * OpenAPI `readOnly` fields are excluded.
+ """
+ excluded_fields: Set[str] = set([
+ "create_time",
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of Operation from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "message": obj.get("message"),
+ "name": obj.get("name"),
+ "createTime": obj.get("createTime"),
+ "status": obj.get("status") if obj.get("status") is not None else OperationStatus.PENDING,
+ "workflow": obj.get("workflow")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py
new file mode 100644
index 000000000..6ef727931
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py
@@ -0,0 +1,102 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict
+from typing import Any, ClassVar, Dict, List, Optional
+from cloudharness_cli.workflows.models.operation import Operation
+from cloudharness_cli.workflows.models.search_result_data import SearchResultData
+from typing import Optional, Set
+from typing_extensions import Self
+
+class OperationSearchResult(BaseModel):
+ """
+ a list of operations with meta data about the result
+ """ # noqa: E501
+ meta: Optional[SearchResultData] = None
+ items: Optional[List[Operation]] = None
+ __properties: ClassVar[List[str]] = ["meta", "items"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of OperationSearchResult from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ # override the default output from pydantic by calling `to_dict()` of meta
+ if self.meta:
+ _dict['meta'] = self.meta.to_dict()
+ # override the default output from pydantic by calling `to_dict()` of each item in items (list)
+ _items = []
+ if self.items:
+ for _item in self.items:
+ if _item:
+ _items.append(_item.to_dict())
+ _dict['items'] = _items
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of OperationSearchResult from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "meta": SearchResultData.from_dict(obj["meta"]) if obj.get("meta") is not None else None,
+ "items": [Operation.from_dict(_item) for _item in obj["items"]] if obj.get("items") is not None else None
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py
new file mode 100644
index 000000000..229b22f3f
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import json
+from enum import Enum
+from typing_extensions import Self
+
+
+class OperationStatus(str, Enum):
+ """
+ OperationStatus
+ """
+
+ """
+ allowed enum values
+ """
+ PENDING = 'Pending'
+ RUNNING = 'Running'
+ ERROR = 'Error'
+ SUCCEEDED = 'Succeeded'
+ SKIPPED = 'Skipped'
+ FAILED = 'Failed'
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Self:
+ """Create an instance of OperationStatus from a JSON string"""
+ return cls(json.loads(json_str))
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py
new file mode 100644
index 000000000..b9f9766e2
--- /dev/null
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py
@@ -0,0 +1,88 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+from __future__ import annotations
+import pprint
+import re # noqa: F401
+import json
+
+from pydantic import BaseModel, ConfigDict, Field, StrictStr
+from typing import Any, ClassVar, Dict, List, Optional
+from typing import Optional, Set
+from typing_extensions import Self
+
+class SearchResultData(BaseModel):
+ """
+ describes a search
+ """ # noqa: E501
+ continue_token: Optional[StrictStr] = Field(default=None, description="token to use for pagination", alias="continueToken")
+ __properties: ClassVar[List[str]] = ["continueToken"]
+
+ model_config = ConfigDict(
+ populate_by_name=True,
+ validate_assignment=True,
+ protected_namespaces=(),
+ )
+
+
+ def to_str(self) -> str:
+ """Returns the string representation of the model using alias"""
+ return pprint.pformat(self.model_dump(by_alias=True))
+
+ def to_json(self) -> str:
+ """Returns the JSON representation of the model using alias"""
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
+ return json.dumps(self.to_dict())
+
+ @classmethod
+ def from_json(cls, json_str: str) -> Optional[Self]:
+ """Create an instance of SearchResultData from a JSON string"""
+ return cls.from_dict(json.loads(json_str))
+
+ def to_dict(self) -> Dict[str, Any]:
+ """Return the dictionary representation of the model using alias.
+
+ This has the following differences from calling pydantic's
+ `self.model_dump(by_alias=True)`:
+
+ * `None` is only added to the output dict for nullable fields that
+ were set at model initialization. Other fields with value `None`
+ are ignored.
+ """
+ excluded_fields: Set[str] = set([
+ ])
+
+ _dict = self.model_dump(
+ by_alias=True,
+ exclude=excluded_fields,
+ exclude_none=True,
+ )
+ return _dict
+
+ @classmethod
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
+ """Create an instance of SearchResultData from a dict"""
+ if obj is None:
+ return None
+
+ if not isinstance(obj, dict):
+ return cls.model_validate(obj)
+
+ _obj = cls.model_validate({
+ "continueToken": obj.get("continueToken")
+ })
+ return _obj
+
+
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/__init__.py
deleted file mode 100644
index a74c84d9b..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/__init__.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.workflows.apis.path_to_api import path_to_api
-
-import enum
-
-
-class PathValues(str, enum.Enum):
- OPERATIONS = "/operations"
- OPERATIONS_NAME = "/operations/{name}"
- OPERATIONS_NAME_LOGS = "/operations/{name}/logs"
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/__init__.py
deleted file mode 100644
index 6c469ed6d..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.workflows.paths.operations import Api
-
-from cloudharness_cli.workflows.paths import PathValues
-
-path = PathValues.OPERATIONS
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.py
deleted file mode 100644
index c4a865c14..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.py
+++ /dev/null
@@ -1,330 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-from cloudharness_cli/workflows.model.operation_search_result import OperationSearchResult
-from cloudharness_cli/workflows.model.operation_status import OperationStatus
-
-from . import path
-
-# Query params
-StatusSchema = OperationStatus
-PreviousSearchTokenSchema = schemas.StrSchema
-
-
-class LimitSchema(
- schemas.IntSchema
-):
-
-
- class MetaOapg:
- inclusive_maximum = 50
- inclusive_minimum = 1
-RequestRequiredQueryParams = typing_extensions.TypedDict(
- 'RequestRequiredQueryParams',
- {
- }
-)
-RequestOptionalQueryParams = typing_extensions.TypedDict(
- 'RequestOptionalQueryParams',
- {
- 'status': typing.Union[StatusSchema, ],
- 'previous_search_token': typing.Union[PreviousSearchTokenSchema, str, ],
- 'limit': typing.Union[LimitSchema, decimal.Decimal, int, ],
- },
- total=False
-)
-
-
-class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
- pass
-
-
-request_query_status = api_client.QueryParameter(
- name="status",
- style=api_client.ParameterStyle.FORM,
- schema=StatusSchema,
- explode=True,
-)
-request_query_previous_search_token = api_client.QueryParameter(
- name="previous_search_token",
- style=api_client.ParameterStyle.FORM,
- schema=PreviousSearchTokenSchema,
- explode=True,
-)
-request_query_limit = api_client.QueryParameter(
- name="limit",
- style=api_client.ParameterStyle.FORM,
- schema=LimitSchema,
- explode=True,
-)
-SchemaFor200ResponseBodyApplicationJson = OperationSearchResult
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '400': _response_for_400,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _list_operations_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _list_operations_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _list_operations_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _list_operations_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- lists operations
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
- used_path = path.value
-
- prefix_separator_iterator = None
- for parameter in (
- request_query_status,
- request_query_previous_search_token,
- request_query_limit,
- ):
- parameter_data = query_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- if prefix_separator_iterator is None:
- prefix_separator_iterator = parameter.get_prefix_separator_iterator()
- serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
- for serialized_value in serialized_data.values():
- used_path += serialized_value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class ListOperations(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def list_operations(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def list_operations(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def list_operations(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def list_operations(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._list_operations_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._list_operations_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.pyi
deleted file mode 100644
index a7515a365..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations/get.pyi
+++ /dev/null
@@ -1,320 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-from cloudharness_cli/workflows.model.operation_search_result import OperationSearchResult
-from cloudharness_cli/workflows.model.operation_status import OperationStatus
-
-# Query params
-StatusSchema = OperationStatus
-PreviousSearchTokenSchema = schemas.StrSchema
-
-
-class LimitSchema(
- schemas.IntSchema
-):
- pass
-RequestRequiredQueryParams = typing_extensions.TypedDict(
- 'RequestRequiredQueryParams',
- {
- }
-)
-RequestOptionalQueryParams = typing_extensions.TypedDict(
- 'RequestOptionalQueryParams',
- {
- 'status': typing.Union[StatusSchema, ],
- 'previous_search_token': typing.Union[PreviousSearchTokenSchema, str, ],
- 'limit': typing.Union[LimitSchema, decimal.Decimal, int, ],
- },
- total=False
-)
-
-
-class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
- pass
-
-
-request_query_status = api_client.QueryParameter(
- name="status",
- style=api_client.ParameterStyle.FORM,
- schema=StatusSchema,
- explode=True,
-)
-request_query_previous_search_token = api_client.QueryParameter(
- name="previous_search_token",
- style=api_client.ParameterStyle.FORM,
- schema=PreviousSearchTokenSchema,
- explode=True,
-)
-request_query_limit = api_client.QueryParameter(
- name="limit",
- style=api_client.ParameterStyle.FORM,
- schema=LimitSchema,
- explode=True,
-)
-SchemaFor200ResponseBodyApplicationJson = OperationSearchResult
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor400(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_400 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor400,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _list_operations_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _list_operations_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _list_operations_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _list_operations_oapg(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- lists operations
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
- used_path = path.value
-
- prefix_separator_iterator = None
- for parameter in (
- request_query_status,
- request_query_previous_search_token,
- request_query_limit,
- ):
- parameter_data = query_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- if prefix_separator_iterator is None:
- prefix_separator_iterator = parameter.get_prefix_separator_iterator()
- serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
- for serialized_value in serialized_data.values():
- used_path += serialized_value
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class ListOperations(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def list_operations(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def list_operations(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def list_operations(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def list_operations(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._list_operations_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- query_params: RequestQueryParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._list_operations_oapg(
- query_params=query_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/__init__.py
deleted file mode 100644
index 5eeb5819a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.workflows.paths.operations_name import Api
-
-from cloudharness_cli.workflows.paths import PathValues
-
-path = PathValues.OPERATIONS_NAME
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.py
deleted file mode 100644
index 9f6542f10..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-from . import path
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '404': _response_for_404,
-}
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _delete_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _delete_operation_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _delete_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _delete_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- deletes operation by name
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
- # TODO add cookie handling
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='delete'.upper(),
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class DeleteOperation(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def delete_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def delete_operation(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_operation_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiFordelete(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def delete(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_operation_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.pyi
deleted file mode 100644
index caa9c4ac2..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/delete.pyi
+++ /dev/null
@@ -1,263 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _delete_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _delete_operation_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _delete_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _delete_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- deletes operation by name
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
- # TODO add cookie handling
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='delete'.upper(),
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class DeleteOperation(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def delete_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def delete_operation(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_operation_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiFordelete(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def delete(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def delete(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._delete_operation_oapg(
- path_params=path_params,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.py
deleted file mode 100644
index cfdd1c54e..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.py
+++ /dev/null
@@ -1,327 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-from cloudharness_cli/workflows.model.operation import Operation
-
-from . import path
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-
-
-class SchemaFor200ResponseBodyApplicationJson(
- schemas.ListSchema
-):
-
-
- class MetaOapg:
-
- @staticmethod
- def items() -> typing.Type['Operation']:
- return Operation
-
- def __new__(
- cls,
- arg: typing.Union[typing.Tuple['Operation'], typing.List['Operation']],
- _configuration: typing.Optional[schemas.Configuration] = None,
- ) -> 'SchemaFor200ResponseBodyApplicationJson':
- return super().__new__(
- cls,
- arg,
- _configuration=_configuration,
- )
-
- def __getitem__(self, i: int) -> 'Operation':
- return super().__getitem__(i)
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '404': _response_for_404,
-}
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_operation_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- get operation by name
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetOperation(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_operation(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.pyi
deleted file mode 100644
index 6a9f5b25f..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name/get.pyi
+++ /dev/null
@@ -1,321 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-from cloudharness_cli/workflows.model.operation import Operation
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-
-
-class SchemaFor200ResponseBodyApplicationJson(
- schemas.ListSchema
-):
-
-
- class MetaOapg:
-
- @staticmethod
- def items() -> typing.Type['Operation']:
- return Operation
-
- def __new__(
- cls,
- arg: typing.Union[typing.Tuple['Operation'], typing.List['Operation']],
- _configuration: typing.Optional[schemas.Configuration] = None,
- ) -> 'SchemaFor200ResponseBodyApplicationJson':
- return super().__new__(
- cls,
- arg,
- _configuration=_configuration,
- )
-
- def __getitem__(self, i: int) -> 'Operation':
- return super().__getitem__(i)
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyApplicationJson,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'application/json': api_client.MediaType(
- schema=SchemaFor200ResponseBodyApplicationJson),
- },
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_all_accept_content_types = (
- 'application/json',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _get_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _get_operation_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _get_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _get_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- get operation by name
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class GetOperation(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def get_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get_operation(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._get_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/__init__.py
deleted file mode 100644
index 30638ba28..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# do not import all endpoints into this module because that uses a lot of memory and stack frames
-# if you need the ability to import all endpoints from this module, import them with
-# from cloudharness_cli.workflows.paths.operations_name_logs import Api
-
-from cloudharness_cli.workflows.paths import PathValues
-
-path = PathValues.OPERATIONS_NAME_LOGS
\ No newline at end of file
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.py
deleted file mode 100644
index 997c4e240..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.py
+++ /dev/null
@@ -1,300 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-from . import path
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-SchemaFor200ResponseBodyTextPlain = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyTextPlain,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'text/plain': api_client.MediaType(
- schema=SchemaFor200ResponseBodyTextPlain),
- },
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_status_code_to_response = {
- '200': _response_for_200,
- '404': _response_for_404,
-}
-_all_accept_content_types = (
- 'text/plain',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _log_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _log_operation_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _log_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _log_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- get operation by name
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class LogOperation(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def log_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def log_operation(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def log_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def log_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._log_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._log_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.pyi b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.pyi
deleted file mode 100644
index 1dd0e8a57..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/paths/operations_name_logs/get.pyi
+++ /dev/null
@@ -1,294 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-from dataclasses import dataclass
-import typing_extensions
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-from cloudharness_cli.workflows import api_client, exceptions
-from datetime import date, datetime # noqa: F401
-import decimal # noqa: F401
-import functools # noqa: F401
-import io # noqa: F401
-import re # noqa: F401
-import typing # noqa: F401
-import typing_extensions # noqa: F401
-import uuid # noqa: F401
-
-import frozendict # noqa: F401
-
-from cloudharness_cli.workflows import schemas # noqa: F401
-
-# Path params
-NameSchema = schemas.StrSchema
-RequestRequiredPathParams = typing_extensions.TypedDict(
- 'RequestRequiredPathParams',
- {
- 'name': typing.Union[NameSchema, str, ],
- }
-)
-RequestOptionalPathParams = typing_extensions.TypedDict(
- 'RequestOptionalPathParams',
- {
- },
- total=False
-)
-
-
-class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
- pass
-
-
-request_path_name = api_client.PathParameter(
- name="name",
- style=api_client.ParameterStyle.SIMPLE,
- schema=NameSchema,
- required=True,
-)
-SchemaFor200ResponseBodyTextPlain = schemas.StrSchema
-
-
-@dataclass
-class ApiResponseFor200(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: typing.Union[
- SchemaFor200ResponseBodyTextPlain,
- ]
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_200 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor200,
- content={
- 'text/plain': api_client.MediaType(
- schema=SchemaFor200ResponseBodyTextPlain),
- },
-)
-
-
-@dataclass
-class ApiResponseFor404(api_client.ApiResponse):
- response: urllib3.HTTPResponse
- body: schemas.Unset = schemas.unset
- headers: schemas.Unset = schemas.unset
-
-
-_response_for_404 = api_client.OpenApiResponse(
- response_cls=ApiResponseFor404,
-)
-_all_accept_content_types = (
- 'text/plain',
-)
-
-
-class BaseApi(api_client.Api):
- @typing.overload
- def _log_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def _log_operation_oapg(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def _log_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def _log_operation_oapg(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- """
- get operation by name
- :param skip_deserialization: If true then api_response.response will be set but
- api_response.body and api_response.headers will not be deserialized into schema
- class instances
- """
- self._verify_typed_dict_inputs_oapg(RequestPathParams, path_params)
- used_path = path.value
-
- _path_params = {}
- for parameter in (
- request_path_name,
- ):
- parameter_data = path_params.get(parameter.name, schemas.unset)
- if parameter_data is schemas.unset:
- continue
- serialized_data = parameter.serialize(parameter_data)
- _path_params.update(serialized_data)
-
- for k, v in _path_params.items():
- used_path = used_path.replace('{%s}' % k, v)
-
- _headers = HTTPHeaderDict()
- # TODO add cookie handling
- if accept_content_types:
- for accept_content_type in accept_content_types:
- _headers.add('Accept', accept_content_type)
-
- response = self.api_client.call_api(
- resource_path=used_path,
- method='get'.upper(),
- headers=_headers,
- stream=stream,
- timeout=timeout,
- )
-
- if skip_deserialization:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
- else:
- response_for_status = _status_code_to_response.get(str(response.status))
- if response_for_status:
- api_response = response_for_status.deserialize(response, self.api_client.configuration)
- else:
- api_response = api_client.ApiResponseWithoutDeserialization(response=response)
-
- if not 200 <= response.status <= 299:
- raise exceptions.ApiException(api_response=api_response)
-
- return api_response
-
-
-class LogOperation(BaseApi):
- # this class is used by api classes that refer to endpoints with operationId fn names
-
- @typing.overload
- def log_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def log_operation(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def log_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def log_operation(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._log_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
-class ApiForget(BaseApi):
- # this class is used by api classes that refer to endpoints by path and http method names
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: typing_extensions.Literal[False] = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- ]: ...
-
- @typing.overload
- def get(
- self,
- skip_deserialization: typing_extensions.Literal[True],
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> api_client.ApiResponseWithoutDeserialization: ...
-
- @typing.overload
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = ...,
- ) -> typing.Union[
- ApiResponseFor200,
- api_client.ApiResponseWithoutDeserialization,
- ]: ...
-
- def get(
- self,
- path_params: RequestPathParams = frozendict.frozendict(),
- accept_content_types: typing.Tuple[str] = _all_accept_content_types,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- skip_deserialization: bool = False,
- ):
- return self._log_operation_oapg(
- path_params=path_params,
- accept_content_types=accept_content_types,
- stream=stream,
- timeout=timeout,
- skip_deserialization=skip_deserialization
- )
-
-
diff --git a/libraries/client/cloudharness_cli/test/common/test_paths/test_sentry_getdsn_appname/__init__.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/py.typed
similarity index 100%
rename from libraries/client/cloudharness_cli/test/common/test_paths/test_sentry_getdsn_appname/__init__.py
rename to libraries/client/cloudharness_cli/cloudharness_cli/workflows/py.typed
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/rest.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/rest.py
index c2b38f9b7..977cb4d05 100644
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/rest.py
+++ b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/rest.py
@@ -3,35 +3,67 @@
"""
Workflows API
- Workflows API # noqa: E501
+ Workflows API
The version of the OpenAPI document: 0.1.0
Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
-import logging
+
+import io
+import json
+import re
import ssl
-from urllib.parse import urlencode
-import typing
-import certifi
import urllib3
-from urllib3._collections import HTTPHeaderDict
from cloudharness_cli.workflows.exceptions import ApiException, ApiValueError
+SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"}
+RESTResponseType = urllib3.HTTPResponse
+
+
+def is_socks_proxy_url(url):
+ if url is None:
+ return False
+ split_section = url.split("://")
+ if len(split_section) < 2:
+ return False
+ else:
+ return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES
+
+
+class RESTResponse(io.IOBase):
+
+ def __init__(self, resp) -> None:
+ self.response = resp
+ self.status = resp.status
+ self.reason = resp.reason
+ self.data = None
+
+ def read(self):
+ if self.data is None:
+ self.data = self.response.data
+ return self.data
+
+ def getheaders(self):
+ """Returns a dictionary of the response headers."""
+ return self.response.headers
-logger = logging.getLogger(__name__)
+ def getheader(self, name, default=None):
+ """Returns a given response header."""
+ return self.response.headers.get(name, default)
-class RESTClientObject(object):
+class RESTClientObject:
- def __init__(self, configuration, pools_size=4, maxsize=None):
+ def __init__(self, configuration) -> None:
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
- # maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
@@ -40,140 +72,167 @@ def __init__(self, configuration, pools_size=4, maxsize=None):
else:
cert_reqs = ssl.CERT_NONE
- # ca_certs
- if configuration.ssl_ca_cert:
- ca_certs = configuration.ssl_ca_cert
- else:
- # if not set certificate file, use Mozilla's root certificates.
- ca_certs = certifi.where()
-
- addition_pool_args = {}
+ pool_args = {
+ "cert_reqs": cert_reqs,
+ "ca_certs": configuration.ssl_ca_cert,
+ "cert_file": configuration.cert_file,
+ "key_file": configuration.key_file,
+ }
if configuration.assert_hostname is not None:
- addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
+ pool_args['assert_hostname'] = (
+ configuration.assert_hostname
+ )
if configuration.retries is not None:
- addition_pool_args['retries'] = configuration.retries
+ pool_args['retries'] = configuration.retries
+
+ if configuration.tls_server_name:
+ pool_args['server_hostname'] = configuration.tls_server_name
+
if configuration.socket_options is not None:
- addition_pool_args['socket_options'] = configuration.socket_options
+ pool_args['socket_options'] = configuration.socket_options
- if maxsize is None:
- if configuration.connection_pool_maxsize is not None:
- maxsize = configuration.connection_pool_maxsize
- else:
- maxsize = 4
+ if configuration.connection_pool_maxsize is not None:
+ pool_args['maxsize'] = configuration.connection_pool_maxsize
# https pool manager
+ self.pool_manager: urllib3.PoolManager
+
if configuration.proxy:
- self.pool_manager = urllib3.ProxyManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- proxy_url=configuration.proxy,
- proxy_headers=configuration.proxy_headers,
- **addition_pool_args
- )
+ if is_socks_proxy_url(configuration.proxy):
+ from urllib3.contrib.socks import SOCKSProxyManager
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["headers"] = configuration.proxy_headers
+ self.pool_manager = SOCKSProxyManager(**pool_args)
+ else:
+ pool_args["proxy_url"] = configuration.proxy
+ pool_args["proxy_headers"] = configuration.proxy_headers
+ self.pool_manager = urllib3.ProxyManager(**pool_args)
else:
- self.pool_manager = urllib3.PoolManager(
- num_pools=pools_size,
- maxsize=maxsize,
- cert_reqs=cert_reqs,
- ca_certs=ca_certs,
- cert_file=configuration.cert_file,
- key_file=configuration.key_file,
- **addition_pool_args
- )
+ self.pool_manager = urllib3.PoolManager(**pool_args)
def request(
self,
- method: str,
- url: str,
- headers: typing.Optional[HTTPHeaderDict] = None,
- fields: typing.Optional[typing.Tuple[typing.Tuple[str, typing.Any], ...]] = None,
- body: typing.Optional[typing.Union[str, bytes]] = None,
- stream: bool = False,
- timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
- ) -> urllib3.HTTPResponse:
+ method,
+ url,
+ headers=None,
+ body=None,
+ post_params=None,
+ _request_timeout=None
+ ):
"""Perform requests.
:param method: http request method
:param url: http request url
:param headers: http request headers
- :param body: request body, for other types
- :param fields: request parameters for
- `application/x-www-form-urlencoded`
- or `multipart/form-data`
- :param stream: if True, the urllib3.HTTPResponse object will
- be returned without reading/decoding response
- data. Default is False.
- :param timeout: timeout setting for this request. If one
- number provided, it will be total request
- timeout. It can also be a pair (tuple) of
- (connection, read) timeouts.
+ :param body: request json body, for `application/json`
+ :param post_params: request post parameters,
+ `application/x-www-form-urlencoded`
+ and `multipart/form-data`
+ :param _request_timeout: timeout setting for this request. If one
+ number provided, it will be total request
+ timeout. It can also be a pair (tuple) of
+ (connection, read) timeouts.
"""
method = method.upper()
- assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
- 'PATCH', 'OPTIONS']
-
- if fields and body:
+ assert method in [
+ 'GET',
+ 'HEAD',
+ 'DELETE',
+ 'POST',
+ 'PUT',
+ 'PATCH',
+ 'OPTIONS'
+ ]
+
+ if post_params and body:
raise ApiValueError(
- "body parameter cannot be used with fields parameter."
+ "body parameter cannot be used with post_params parameter."
)
- fields = fields or {}
+ post_params = post_params or {}
headers = headers or {}
- if timeout:
- if isinstance(timeout, (int, float)): # noqa: E501,F821
- timeout = urllib3.Timeout(total=timeout)
- elif (isinstance(timeout, tuple) and
- len(timeout) == 2):
- timeout = urllib3.Timeout(connect=timeout[0], read=timeout[1])
+ timeout = None
+ if _request_timeout:
+ if isinstance(_request_timeout, (int, float)):
+ timeout = urllib3.Timeout(total=_request_timeout)
+ elif (
+ isinstance(_request_timeout, tuple)
+ and len(_request_timeout) == 2
+ ):
+ timeout = urllib3.Timeout(
+ connect=_request_timeout[0],
+ read=_request_timeout[1]
+ )
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
- if 'Content-Type' not in headers and body is None:
+
+ # no content type provided or payload is json
+ content_type = headers.get('Content-Type')
+ if (
+ not content_type
+ or re.search('json', content_type, re.IGNORECASE)
+ ):
+ request_body = None
+ if body is not None:
+ request_body = json.dumps(body)
r = self.pool_manager.request(
method,
url,
- preload_content=not stream,
+ body=request_body,
timeout=timeout,
- headers=headers
+ headers=headers,
+ preload_content=False
)
- elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
+ elif content_type == 'application/x-www-form-urlencoded':
r = self.pool_manager.request(
- method, url,
- body=body,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=False,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
- elif headers['Content-Type'] == 'multipart/form-data':
+ headers=headers,
+ preload_content=False
+ )
+ elif content_type == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
+ # Ensures that dict objects are serialized
+ post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params]
r = self.pool_manager.request(
- method, url,
- fields=fields,
+ method,
+ url,
+ fields=post_params,
encode_multipart=True,
- preload_content=not stream,
timeout=timeout,
- headers=headers)
+ headers=headers,
+ preload_content=False
+ )
# Pass a `string` parameter directly in the body to support
- # other content types than Json when `body` argument is
- # provided in serialized form
+ # other content types than JSON when `body` argument is
+ # provided in serialized form.
elif isinstance(body, str) or isinstance(body, bytes):
- request_body = body
r = self.pool_manager.request(
- method, url,
+ method,
+ url,
+ body=body,
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
+ elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool):
+ request_body = "true" if body else "false"
+ r = self.pool_manager.request(
+ method,
+ url,
body=request_body,
- preload_content=not stream,
+ preload_content=False,
timeout=timeout,
headers=headers)
else:
@@ -184,72 +243,16 @@ def request(
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
- r = self.pool_manager.request(method, url,
- preload_content=not stream,
- timeout=timeout,
- headers=headers)
+ r = self.pool_manager.request(
+ method,
+ url,
+ fields={},
+ timeout=timeout,
+ headers=headers,
+ preload_content=False
+ )
except urllib3.exceptions.SSLError as e:
- msg = "{0}\n{1}".format(type(e).__name__, str(e))
+ msg = "\n".join([type(e).__name__, str(e)])
raise ApiException(status=0, reason=msg)
- if not stream:
- # log response body
- logger.debug("response body: %s", r.data)
-
- return r
-
- def GET(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("GET", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def HEAD(self, url, headers=None, stream=False,
- timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("HEAD", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- fields=fields)
-
- def OPTIONS(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("OPTIONS", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def DELETE(self, url, headers=None, body=None,
- stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("DELETE", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def POST(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("POST", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PUT(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PUT", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
-
- def PATCH(self, url, headers=None,
- body=None, stream=False, timeout=None, fields=None) -> urllib3.HTTPResponse:
- return self.request("PATCH", url,
- headers=headers,
- stream=stream,
- timeout=timeout,
- body=body, fields=fields)
+ return RESTResponse(r)
diff --git a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/schemas.py b/libraries/client/cloudharness_cli/cloudharness_cli/workflows/schemas.py
deleted file mode 100644
index 6f66f649a..000000000
--- a/libraries/client/cloudharness_cli/cloudharness_cli/workflows/schemas.py
+++ /dev/null
@@ -1,2463 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-from collections import defaultdict
-from datetime import date, datetime, timedelta # noqa: F401
-import functools
-import decimal
-import io
-import re
-import types
-import typing
-import uuid
-
-from dateutil.parser.isoparser import isoparser, _takes_ascii
-import frozendict
-
-from cloudharness_cli.workflows.exceptions import (
- ApiTypeError,
- ApiValueError,
-)
-from cloudharness_cli.workflows.configuration import (
- Configuration,
-)
-
-
-class Unset(object):
- """
- An instance of this class is set as the default value for object type(dict) properties that are optional
- When a property has an unset value, that property will not be assigned in the dict
- """
- pass
-
-unset = Unset()
-
-none_type = type(None)
-file_type = io.IOBase
-
-
-class FileIO(io.FileIO):
- """
- A class for storing files
- Note: this class is not immutable
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]):
- if isinstance(arg, (io.FileIO, io.BufferedReader)):
- if arg.closed:
- raise ApiValueError('Invalid file state; file is closed and must be open')
- arg.close()
- inst = super(FileIO, cls).__new__(cls, arg.name)
- super(FileIO, inst).__init__(arg.name)
- return inst
- raise ApiValueError('FileIO must be passed arg which contains the open file')
-
- def __init__(self, arg: typing.Union[io.FileIO, io.BufferedReader]):
- pass
-
-
-def update(d: dict, u: dict):
- """
- Adds u to d
- Where each dict is defaultdict(set)
- """
- if not u:
- return d
- for k, v in u.items():
- if k not in d:
- d[k] = v
- else:
- d[k] = d[k] | v
-
-
-class ValidationMetadata(frozendict.frozendict):
- """
- A class storing metadata that is needed to validate OpenApi Schema payloads
- """
- def __new__(
- cls,
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
- from_server: bool = False,
- configuration: typing.Optional[Configuration] = None,
- seen_classes: typing.FrozenSet[typing.Type] = frozenset(),
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]] = frozendict.frozendict()
- ):
- """
- Args:
- path_to_item: the path to the current data being instantiated.
- For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0)
- This changes from location to location
- from_server: whether or not this data came form the server
- True when receiving server data
- False when instantiating model with client side data not form the server
- This does not change from location to location
- configuration: the Configuration instance to use
- This is needed because in Configuration:
- - one can disable validation checking
- This does not change from location to location
- seen_classes: when deserializing data that matches multiple schemas, this is used to store
- the schemas that have been traversed. This is used to stop processing when a cycle is seen.
- This changes from location to location
- validated_path_to_schemas: stores the already validated schema classes for a given path location
- This does not change from location to location
- """
- return super().__new__(
- cls,
- path_to_item=path_to_item,
- from_server=from_server,
- configuration=configuration,
- seen_classes=seen_classes,
- validated_path_to_schemas=validated_path_to_schemas
- )
-
- def validation_ran_earlier(self, cls: type) -> bool:
- validated_schemas = self.validated_path_to_schemas.get(self.path_to_item, set())
- validation_ran_earlier = validated_schemas and cls in validated_schemas
- if validation_ran_earlier:
- return True
- if cls in self.seen_classes:
- return True
- return False
-
- @property
- def path_to_item(self) -> typing.Tuple[typing.Union[str, int], ...]:
- return self.get('path_to_item')
-
- @property
- def from_server(self) -> bool:
- return self.get('from_server')
-
- @property
- def configuration(self) -> typing.Optional[Configuration]:
- return self.get('configuration')
-
- @property
- def seen_classes(self) -> typing.FrozenSet[typing.Type]:
- return self.get('seen_classes')
-
- @property
- def validated_path_to_schemas(self) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Type]]:
- return self.get('validated_path_to_schemas')
-
-
-class Singleton:
- """
- Enums and singletons are the same
- The same instance is returned for a given key of (cls, arg)
- """
- _instances = {}
-
- def __new__(cls, arg: typing.Any, **kwargs):
- """
- cls base classes: BoolClass, NoneClass, str, decimal.Decimal
- The 3rd key is used in the tuple below for a corner case where an enum contains integer 1
- However 1.0 can also be ingested into that enum schema because 1.0 == 1 and
- Decimal('1.0') == Decimal('1')
- But if we omitted the 3rd value in the key, then Decimal('1.0') would be stored as Decimal('1')
- and json serializing that instance would be '1' rather than the expected '1.0'
- Adding the 3rd value, the str of arg ensures that 1.0 -> Decimal('1.0') which is serialized as 1.0
- """
- key = (cls, arg, str(arg))
- if key not in cls._instances:
- if isinstance(arg, (none_type, bool, BoolClass, NoneClass)):
- inst = super().__new__(cls)
- cls._instances[key] = inst
- else:
- cls._instances[key] = super().__new__(cls, arg)
- return cls._instances[key]
-
- def __repr__(self):
- if isinstance(self, NoneClass):
- return f'<{self.__class__.__name__}: None>'
- elif isinstance(self, BoolClass):
- if bool(self):
- return f'<{self.__class__.__name__}: True>'
- return f'<{self.__class__.__name__}: False>'
- return f'<{self.__class__.__name__}: {super().__repr__()}>'
-
-
-class classproperty:
-
- def __init__(self, fget):
- self.fget = fget
-
- def __get__(self, owner_self, owner_cls):
- return self.fget(owner_cls)
-
-
-class NoneClass(Singleton):
- @classproperty
- def NONE(cls):
- return cls(None)
-
- def __bool__(self) -> bool:
- return False
-
-
-class BoolClass(Singleton):
- @classproperty
- def TRUE(cls):
- return cls(True)
-
- @classproperty
- def FALSE(cls):
- return cls(False)
-
- @functools.lru_cache()
- def __bool__(self) -> bool:
- for key, instance in self._instances.items():
- if self is instance:
- return bool(key[1])
- raise ValueError('Unable to find the boolean value of this instance')
-
-
-class MetaOapgTyped:
- exclusive_maximum: typing.Union[int, float]
- inclusive_maximum: typing.Union[int, float]
- exclusive_minimum: typing.Union[int, float]
- inclusive_minimum: typing.Union[int, float]
- max_items: int
- min_items: int
- discriminator: typing.Dict[str, typing.Dict[str, typing.Type['Schema']]]
-
-
- class properties:
- # to hold object properties
- pass
-
- additional_properties: typing.Optional[typing.Type['Schema']]
- max_properties: int
- min_properties: int
- all_of: typing.List[typing.Type['Schema']]
- one_of: typing.List[typing.Type['Schema']]
- any_of: typing.List[typing.Type['Schema']]
- not_schema: typing.Type['Schema']
- max_length: int
- min_length: int
- items: typing.Type['Schema']
-
-
-class Schema:
- """
- the base class of all swagger/openapi schemas/models
- """
- __inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict.frozendict, FileIO, bytes, BoolClass, NoneClass}
- _types: typing.Set[typing.Type]
- MetaOapg = MetaOapgTyped
-
- @staticmethod
- def __get_valid_classes_phrase(input_classes):
- """Returns a string phrase describing what types are allowed"""
- all_classes = list(input_classes)
- all_classes = sorted(all_classes, key=lambda cls: cls.__name__)
- all_class_names = [cls.__name__ for cls in all_classes]
- if len(all_class_names) == 1:
- return "is {0}".format(all_class_names[0])
- return "is one of [{0}]".format(", ".join(all_class_names))
-
- @staticmethod
- def _get_class_oapg(item_cls: typing.Union[types.FunctionType, staticmethod, typing.Type['Schema']]) -> typing.Type['Schema']:
- if isinstance(item_cls, types.FunctionType):
- # referenced schema
- return item_cls()
- elif isinstance(item_cls, staticmethod):
- # referenced schema
- return item_cls.__func__()
- return item_cls
-
- @classmethod
- def __type_error_message(
- cls, var_value=None, var_name=None, valid_classes=None, key_type=None
- ):
- """
- Keyword Args:
- var_value (any): the variable which has the type_error
- var_name (str): the name of the variable which has the typ error
- valid_classes (tuple): the accepted classes for current_item's
- value
- key_type (bool): False if our value is a value in a dict
- True if it is a key in a dict
- False if our item is an item in a tuple
- """
- key_or_value = "value"
- if key_type:
- key_or_value = "key"
- valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes)
- msg = "Invalid type. Required {1} type {2} and " "passed type was {3}".format(
- var_name,
- key_or_value,
- valid_classes_phrase,
- type(var_value).__name__,
- )
- return msg
-
- @classmethod
- def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False):
- error_msg = cls.__type_error_message(
- var_name=path_to_item[-1],
- var_value=var_value,
- valid_classes=valid_classes,
- key_type=key_type,
- )
- return ApiTypeError(
- error_msg,
- path_to_item=path_to_item,
- valid_classes=valid_classes,
- key_type=key_type,
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- Schema _validate_oapg
- All keyword validation except for type checking was done in calling stack frames
- If those validations passed, the validated classes are collected in path_to_schemas
-
- Returns:
- path_to_schemas: a map of path to schemas
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- base_class = type(arg)
- if base_class not in cls._types:
- raise cls.__get_type_error(
- arg,
- validation_metadata.path_to_item,
- cls._types,
- key_type=False,
- )
-
- path_to_schemas = {validation_metadata.path_to_item: set()}
- path_to_schemas[validation_metadata.path_to_item].add(cls)
- path_to_schemas[validation_metadata.path_to_item].add(base_class)
- return path_to_schemas
-
- @staticmethod
- def _process_schema_classes_oapg(
- schema_classes: typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]
- ):
- """
- Processes and mutates schema_classes
- If a SomeSchema is a subclass of DictSchema then remove DictSchema because it is already included
- """
- if len(schema_classes) < 2:
- return
- if len(schema_classes) > 2 and UnsetAnyTypeSchema in schema_classes:
- schema_classes.remove(UnsetAnyTypeSchema)
- x_schema = schema_type_classes & schema_classes
- if not x_schema:
- return
- x_schema = x_schema.pop()
- if any(c is not x_schema and issubclass(c, x_schema) for c in schema_classes):
- # needed to not have a mro error in get_new_class
- schema_classes.remove(x_schema)
-
- @classmethod
- def __get_new_cls(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]:
- """
- Make a new dynamic class and return an instance of that class
- We are making an instance of cls, but instead of making cls
- make a new class, new_cls
- which includes dynamic bases including cls
- return an instance of that new class
-
- Dict property + List Item Assignment Use cases:
- 1. value is NOT an instance of the required schema class
- the value is validated by _validate_oapg
- _validate_oapg returns a key value pair
- where the key is the path to the item, and the value will be the required manufactured class
- made out of the matching schemas
- 2. value is an instance of the the correct schema type
- the value is NOT validated by _validate_oapg, _validate_oapg only checks that the instance is of the correct schema type
- for this value, _validate_oapg does NOT return an entry for it in _path_to_schemas
- and in list/dict _get_items_oapg,_get_properties_oapg the value will be directly assigned
- because value is of the correct type, and validation was run earlier when the instance was created
- """
- _path_to_schemas = {}
- if validation_metadata.validated_path_to_schemas:
- update(_path_to_schemas, validation_metadata.validated_path_to_schemas)
- if not validation_metadata.validation_ran_earlier(cls):
- other_path_to_schemas = cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- # loop through it make a new class for each entry
- # do not modify the returned result because it is cached and we would be modifying the cached value
- path_to_schemas = {}
- for path, schema_classes in _path_to_schemas.items():
- """
- Use cases
- 1. N number of schema classes + enum + type != bool/None, classes in path_to_schemas: tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- needs Singleton added
- 2. N number of schema classes + enum + type == bool/None, classes in path_to_schemas: BoolClass/NoneClass
- Singleton already added
- 3. N number of schema classes, classes in path_to_schemas: BoolClass/NoneClass/tuple/frozendict.frozendict/str/Decimal/bytes/FileIo
- """
- cls._process_schema_classes_oapg(schema_classes)
- enum_schema = any(
- issubclass(this_cls, EnumBase) for this_cls in schema_classes)
- inheritable_primitive_type = schema_classes.intersection(cls.__inheritable_primitive_types_set)
- chosen_schema_classes = schema_classes - inheritable_primitive_type
- suffix = tuple(inheritable_primitive_type)
- if enum_schema and suffix[0] not in {NoneClass, BoolClass}:
- suffix = (Singleton,) + suffix
-
- used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix
- mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes)
- path_to_schemas[path] = mfg_cls
-
- return path_to_schemas
-
- @classmethod
- def _get_new_instance_without_conversion_oapg(
- cls,
- arg: typing.Any,
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- # We have a Dynamic class and we are making an instance of it
- if issubclass(cls, frozendict.frozendict) and issubclass(cls, DictBase):
- properties = cls._get_properties_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, properties)
- elif issubclass(cls, tuple) and issubclass(cls, ListBase):
- items = cls._get_items_oapg(arg, path_to_item, path_to_schemas)
- return super(Schema, cls).__new__(cls, items)
- """
- str = openapi str, date, and datetime
- decimal.Decimal = openapi int and float
- FileIO = openapi binary type and the user inputs a file
- bytes = openapi binary type and the user inputs bytes
- """
- return super(Schema, cls).__new__(cls, arg)
-
- @classmethod
- def from_openapi_data_oapg(
- cls,
- arg: typing.Union[
- str,
- date,
- datetime,
- int,
- float,
- decimal.Decimal,
- bool,
- None,
- 'Schema',
- dict,
- frozendict.frozendict,
- tuple,
- list,
- io.FileIO,
- io.BufferedReader,
- bytes
- ],
- _configuration: typing.Optional[Configuration]
- ):
- """
- Schema from_openapi_data_oapg
- """
- from_server = True
- validated_path_to_schemas = {}
- arg = cast_to_allowed_types(arg, from_server, validated_path_to_schemas)
- validation_metadata = ValidationMetadata(
- from_server=from_server, configuration=_configuration, validated_path_to_schemas=validated_path_to_schemas)
- path_to_schemas = cls.__get_new_cls(arg, validation_metadata)
- new_cls = path_to_schemas[validation_metadata.path_to_item]
- new_inst = new_cls._get_new_instance_without_conversion_oapg(
- arg,
- validation_metadata.path_to_item,
- path_to_schemas
- )
- return new_inst
-
- @staticmethod
- def __get_input_dict(*args, **kwargs) -> frozendict.frozendict:
- input_dict = {}
- if args and isinstance(args[0], (dict, frozendict.frozendict)):
- input_dict.update(args[0])
- if kwargs:
- input_dict.update(kwargs)
- return frozendict.frozendict(input_dict)
-
- @staticmethod
- def __remove_unsets(kwargs):
- return {key: val for key, val in kwargs.items() if val is not unset}
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]):
- """
- Schema __new__
-
- Args:
- args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): the value
- kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict.frozendict/bool/None): dict values
- _configuration: contains the Configuration that enables json schema validation keywords
- like minItems, minLength etc
-
- Note: double underscores are used here because pycharm thinks that these variables
- are instance properties if they are named normally :(
- """
- __kwargs = cls.__remove_unsets(kwargs)
- if not args and not __kwargs:
- raise TypeError(
- 'No input given. args or kwargs must be given.'
- )
- if not __kwargs and args and not isinstance(args[0], dict):
- __arg = args[0]
- else:
- __arg = cls.__get_input_dict(*args, **__kwargs)
- __from_server = False
- __validated_path_to_schemas = {}
- __arg = cast_to_allowed_types(
- __arg, __from_server, __validated_path_to_schemas)
- __validation_metadata = ValidationMetadata(
- configuration=_configuration, from_server=__from_server, validated_path_to_schemas=__validated_path_to_schemas)
- __path_to_schemas = cls.__get_new_cls(__arg, __validation_metadata)
- __new_cls = __path_to_schemas[__validation_metadata.path_to_item]
- return __new_cls._get_new_instance_without_conversion_oapg(
- __arg,
- __validation_metadata.path_to_item,
- __path_to_schemas
- )
-
- def __init__(
- self,
- *args: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'],
- _configuration: typing.Optional[Configuration] = None,
- **kwargs: typing.Union[
- dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset
- ]
- ):
- """
- this is needed to fix 'Unexpected argument' warning in pycharm
- this code does nothing because all Schema instances are immutable
- this means that all input data is passed into and used in new, and after the new instance is made
- no new attributes are assigned and init is not used
- """
- pass
-
-"""
-import itertools
-data_types = ('None', 'FrozenDict', 'Tuple', 'Str', 'Decimal', 'Bool')
-type_to_cls = {
- 'None': 'NoneClass',
- 'FrozenDict': 'frozendict.frozendict',
- 'Tuple': 'tuple',
- 'Str': 'str',
- 'Decimal': 'decimal.Decimal',
- 'Bool': 'BoolClass'
-}
-cls_tuples = [v for v in itertools.combinations(data_types, 5)]
-typed_classes = [f"class {''.join(cls_tuple)}Mixin({', '.join(type_to_cls[typ] for typ in cls_tuple)}):\n pass" for cls_tuple in cls_tuples]
-for cls in typed_classes:
- print(cls)
-object_classes = [f"{''.join(cls_tuple)}Mixin = object" for cls_tuple in cls_tuples]
-for cls in object_classes:
- print(cls)
-"""
-if typing.TYPE_CHECKING:
- # qty 1
- NoneMixin = NoneClass
- FrozenDictMixin = frozendict.frozendict
- TupleMixin = tuple
- StrMixin = str
- DecimalMixin = decimal.Decimal
- BoolMixin = BoolClass
- BytesMixin = bytes
- FileMixin = FileIO
- # qty 2
- class BinaryMixin(bytes, FileIO):
- pass
- class NoneFrozenDictMixin(NoneClass, frozendict.frozendict):
- pass
- class NoneTupleMixin(NoneClass, tuple):
- pass
- class NoneStrMixin(NoneClass, str):
- pass
- class NoneDecimalMixin(NoneClass, decimal.Decimal):
- pass
- class NoneBoolMixin(NoneClass, BoolClass):
- pass
- class FrozenDictTupleMixin(frozendict.frozendict, tuple):
- pass
- class FrozenDictStrMixin(frozendict.frozendict, str):
- pass
- class FrozenDictDecimalMixin(frozendict.frozendict, decimal.Decimal):
- pass
- class FrozenDictBoolMixin(frozendict.frozendict, BoolClass):
- pass
- class TupleStrMixin(tuple, str):
- pass
- class TupleDecimalMixin(tuple, decimal.Decimal):
- pass
- class TupleBoolMixin(tuple, BoolClass):
- pass
- class StrDecimalMixin(str, decimal.Decimal):
- pass
- class StrBoolMixin(str, BoolClass):
- pass
- class DecimalBoolMixin(decimal.Decimal, BoolClass):
- pass
- # qty 3
- class NoneFrozenDictTupleMixin(NoneClass, frozendict.frozendict, tuple):
- pass
- class NoneFrozenDictStrMixin(NoneClass, frozendict.frozendict, str):
- pass
- class NoneFrozenDictDecimalMixin(NoneClass, frozendict.frozendict, decimal.Decimal):
- pass
- class NoneFrozenDictBoolMixin(NoneClass, frozendict.frozendict, BoolClass):
- pass
- class NoneTupleStrMixin(NoneClass, tuple, str):
- pass
- class NoneTupleDecimalMixin(NoneClass, tuple, decimal.Decimal):
- pass
- class NoneTupleBoolMixin(NoneClass, tuple, BoolClass):
- pass
- class NoneStrDecimalMixin(NoneClass, str, decimal.Decimal):
- pass
- class NoneStrBoolMixin(NoneClass, str, BoolClass):
- pass
- class NoneDecimalBoolMixin(NoneClass, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrMixin(frozendict.frozendict, tuple, str):
- pass
- class FrozenDictTupleDecimalMixin(frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class FrozenDictTupleBoolMixin(frozendict.frozendict, tuple, BoolClass):
- pass
- class FrozenDictStrDecimalMixin(frozendict.frozendict, str, decimal.Decimal):
- pass
- class FrozenDictStrBoolMixin(frozendict.frozendict, str, BoolClass):
- pass
- class FrozenDictDecimalBoolMixin(frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalMixin(tuple, str, decimal.Decimal):
- pass
- class TupleStrBoolMixin(tuple, str, BoolClass):
- pass
- class TupleDecimalBoolMixin(tuple, decimal.Decimal, BoolClass):
- pass
- class StrDecimalBoolMixin(str, decimal.Decimal, BoolClass):
- pass
- # qty 4
- class NoneFrozenDictTupleStrMixin(NoneClass, frozendict.frozendict, tuple, str):
- pass
- class NoneFrozenDictTupleDecimalMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal):
- pass
- class NoneFrozenDictTupleBoolMixin(NoneClass, frozendict.frozendict, tuple, BoolClass):
- pass
- class NoneFrozenDictStrDecimalMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal):
- pass
- class NoneFrozenDictStrBoolMixin(NoneClass, frozendict.frozendict, str, BoolClass):
- pass
- class NoneFrozenDictDecimalBoolMixin(NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalMixin(NoneClass, tuple, str, decimal.Decimal):
- pass
- class NoneTupleStrBoolMixin(NoneClass, tuple, str, BoolClass):
- pass
- class NoneTupleDecimalBoolMixin(NoneClass, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneStrDecimalBoolMixin(NoneClass, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalMixin(frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class FrozenDictTupleStrBoolMixin(frozendict.frozendict, tuple, str, BoolClass):
- pass
- class FrozenDictTupleDecimalBoolMixin(frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class FrozenDictStrDecimalBoolMixin(frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class TupleStrDecimalBoolMixin(tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal):
- pass
- class NoneFrozenDictTupleStrBoolMixin(NoneClass, frozendict.frozendict, tuple, str, BoolClass):
- pass
- class NoneFrozenDictTupleDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass):
- pass
- class NoneFrozenDictStrDecimalBoolMixin(NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass):
- pass
- class NoneTupleStrDecimalBoolMixin(NoneClass, tuple, str, decimal.Decimal, BoolClass):
- pass
- class FrozenDictTupleStrDecimalBoolMixin(frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass):
- pass
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin(NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes):
- pass
-else:
- # qty 1
- class NoneMixin:
- _types = {NoneClass}
- class FrozenDictMixin:
- _types = {frozendict.frozendict}
- class TupleMixin:
- _types = {tuple}
- class StrMixin:
- _types = {str}
- class DecimalMixin:
- _types = {decimal.Decimal}
- class BoolMixin:
- _types = {BoolClass}
- class BytesMixin:
- _types = {bytes}
- class FileMixin:
- _types = {FileIO}
- # qty 2
- class BinaryMixin:
- _types = {bytes, FileIO}
- class NoneFrozenDictMixin:
- _types = {NoneClass, frozendict.frozendict}
- class NoneTupleMixin:
- _types = {NoneClass, tuple}
- class NoneStrMixin:
- _types = {NoneClass, str}
- class NoneDecimalMixin:
- _types = {NoneClass, decimal.Decimal}
- class NoneBoolMixin:
- _types = {NoneClass, BoolClass}
- class FrozenDictTupleMixin:
- _types = {frozendict.frozendict, tuple}
- class FrozenDictStrMixin:
- _types = {frozendict.frozendict, str}
- class FrozenDictDecimalMixin:
- _types = {frozendict.frozendict, decimal.Decimal}
- class FrozenDictBoolMixin:
- _types = {frozendict.frozendict, BoolClass}
- class TupleStrMixin:
- _types = {tuple, str}
- class TupleDecimalMixin:
- _types = {tuple, decimal.Decimal}
- class TupleBoolMixin:
- _types = {tuple, BoolClass}
- class StrDecimalMixin:
- _types = {str, decimal.Decimal}
- class StrBoolMixin:
- _types = {str, BoolClass}
- class DecimalBoolMixin:
- _types = {decimal.Decimal, BoolClass}
- # qty 3
- class NoneFrozenDictTupleMixin:
- _types = {NoneClass, frozendict.frozendict, tuple}
- class NoneFrozenDictStrMixin:
- _types = {NoneClass, frozendict.frozendict, str}
- class NoneFrozenDictDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal}
- class NoneFrozenDictBoolMixin:
- _types = {NoneClass, frozendict.frozendict, BoolClass}
- class NoneTupleStrMixin:
- _types = {NoneClass, tuple, str}
- class NoneTupleDecimalMixin:
- _types = {NoneClass, tuple, decimal.Decimal}
- class NoneTupleBoolMixin:
- _types = {NoneClass, tuple, BoolClass}
- class NoneStrDecimalMixin:
- _types = {NoneClass, str, decimal.Decimal}
- class NoneStrBoolMixin:
- _types = {NoneClass, str, BoolClass}
- class NoneDecimalBoolMixin:
- _types = {NoneClass, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrMixin:
- _types = {frozendict.frozendict, tuple, str}
- class FrozenDictTupleDecimalMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal}
- class FrozenDictTupleBoolMixin:
- _types = {frozendict.frozendict, tuple, BoolClass}
- class FrozenDictStrDecimalMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal}
- class FrozenDictStrBoolMixin:
- _types = {frozendict.frozendict, str, BoolClass}
- class FrozenDictDecimalBoolMixin:
- _types = {frozendict.frozendict, decimal.Decimal, BoolClass}
- class TupleStrDecimalMixin:
- _types = {tuple, str, decimal.Decimal}
- class TupleStrBoolMixin:
- _types = {tuple, str, BoolClass}
- class TupleDecimalBoolMixin:
- _types = {tuple, decimal.Decimal, BoolClass}
- class StrDecimalBoolMixin:
- _types = {str, decimal.Decimal, BoolClass}
- # qty 4
- class NoneFrozenDictTupleStrMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str}
- class NoneFrozenDictTupleDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal}
- class NoneFrozenDictTupleBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, BoolClass}
- class NoneFrozenDictStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal}
- class NoneFrozenDictStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, BoolClass}
- class NoneFrozenDictDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal}
- class NoneTupleStrBoolMixin:
- _types = {NoneClass, tuple, str, BoolClass}
- class NoneTupleDecimalBoolMixin:
- _types = {NoneClass, tuple, decimal.Decimal, BoolClass}
- class NoneStrDecimalBoolMixin:
- _types = {NoneClass, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal}
- class FrozenDictTupleStrBoolMixin:
- _types = {frozendict.frozendict, tuple, str, BoolClass}
- class FrozenDictTupleDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class FrozenDictStrDecimalBoolMixin:
- _types = {frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class TupleStrDecimalBoolMixin:
- _types = {tuple, str, decimal.Decimal, BoolClass}
- # qty 5
- class NoneFrozenDictTupleStrDecimalMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal}
- class NoneFrozenDictTupleStrBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, BoolClass}
- class NoneFrozenDictTupleDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, decimal.Decimal, BoolClass}
- class NoneFrozenDictStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, str, decimal.Decimal, BoolClass}
- class NoneTupleStrDecimalBoolMixin:
- _types = {NoneClass, tuple, str, decimal.Decimal, BoolClass}
- class FrozenDictTupleStrDecimalBoolMixin:
- _types = {frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 6
- class NoneFrozenDictTupleStrDecimalBoolMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass}
- # qty 8
- class NoneFrozenDictTupleStrDecimalBoolFileBytesMixin:
- _types = {NoneClass, frozendict.frozendict, tuple, str, decimal.Decimal, BoolClass, FileIO, bytes}
-
-
-class ValidatorBase:
- @staticmethod
- def _is_json_validation_enabled_oapg(schema_keyword, configuration=None):
- """Returns true if JSON schema validation is enabled for the specified
- validation keyword. This can be used to skip JSON schema structural validation
- as requested in the configuration.
- Note: the suffix _oapg stands for openapi python (experimental) generator and
- it has been added to prevent collisions with other methods and properties
-
- Args:
- schema_keyword (string): the name of a JSON schema validation keyword.
- configuration (Configuration): the configuration class.
- """
-
- return (configuration is None or
- not hasattr(configuration, '_disabled_client_side_validations') or
- schema_keyword not in configuration._disabled_client_side_validations)
-
- @staticmethod
- def _raise_validation_errror_message_oapg(value, constraint_msg, constraint_value, path_to_item, additional_txt=""):
- raise ApiValueError(
- "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format(
- value=value,
- constraint_msg=constraint_msg,
- constraint_value=constraint_value,
- additional_txt=additional_txt,
- path_to_item=path_to_item,
- )
- )
-
-
-class EnumBase:
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- EnumBase _validate_oapg
- Validates that arg is in the enum's allowed values
- """
- try:
- cls.MetaOapg.enum_value_to_name[arg]
- except KeyError:
- raise ApiValueError("Invalid value {} passed in to {}, allowed_values={}".format(arg, cls, cls.MetaOapg.enum_value_to_name.keys()))
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class BoolBase:
- def is_true_oapg(self) -> bool:
- """
- A replacement for x is True
- True if the instance is a BoolClass True Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self)
-
- def is_false_oapg(self) -> bool:
- """
- A replacement for x is False
- True if the instance is a BoolClass False Singleton
- """
- if not issubclass(self.__class__, BoolClass):
- return False
- return bool(self) is False
-
-
-class NoneBase:
- def is_none_oapg(self) -> bool:
- """
- A replacement for x is None
- True if the instance is a NoneClass None Singleton
- """
- if issubclass(self.__class__, NoneClass):
- return True
- return False
-
-
-class StrBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_str_oapg(self) -> str:
- return self
-
- @property
- def as_date_oapg(self) -> date:
- raise Exception('not implemented')
-
- @property
- def as_datetime_oapg(self) -> datetime:
- raise Exception('not implemented')
-
- @property
- def as_decimal_oapg(self) -> decimal.Decimal:
- raise Exception('not implemented')
-
- @property
- def as_uuid_oapg(self) -> uuid.UUID:
- raise Exception('not implemented')
-
- @classmethod
- def __check_str_validations(
- cls,
- arg: str,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_length') and
- len(arg) > cls.MetaOapg.max_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be less than or equal to",
- constraint_value=cls.MetaOapg.max_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minLength', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_length') and
- len(arg) < cls.MetaOapg.min_length):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="length must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_length,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('pattern', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'regex')):
- for regex_dict in cls.MetaOapg.regex:
- flags = regex_dict.get('flags', 0)
- if not re.search(regex_dict['pattern'], arg, flags=flags):
- if flags != 0:
- # Don't print the regex flags if the flags are not
- # specified in the OAS document.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item,
- additional_txt=" with flags=`{}`".format(flags)
- )
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must match regular expression",
- constraint_value=regex_dict['pattern'],
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- StrBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, str):
- cls.__check_str_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class UUIDBase:
- @property
- @functools.lru_cache()
- def as_uuid_oapg(self) -> uuid.UUID:
- return uuid.UUID(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- uuid.UUID(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Invalid value '{}' for type UUID at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- UUIDBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class CustomIsoparser(isoparser):
-
- @_takes_ascii
- def parse_isodatetime(self, dt_str):
- components, pos = self._parse_isodate(dt_str)
- if len(dt_str) > pos:
- if self._sep is None or dt_str[pos:pos + 1] == self._sep:
- components += self._parse_isotime(dt_str[pos + 1:])
- else:
- raise ValueError('String contains unknown ISO components')
-
- if len(components) > 3 and components[3] == 24:
- components[3] = 0
- return datetime(*components) + timedelta(days=1)
-
- if len(components) <= 3:
- raise ValueError('Value is not a datetime')
-
- return datetime(*components)
-
- @_takes_ascii
- def parse_isodate(self, datestr):
- components, pos = self._parse_isodate(datestr)
-
- if len(datestr) > pos:
- raise ValueError('String contains invalid time components')
-
- if len(components) > 3:
- raise ValueError('String contains invalid time components')
-
- return date(*components)
-
-
-DEFAULT_ISOPARSER = CustomIsoparser()
-
-
-class DateBase:
- @property
- @functools.lru_cache()
- def as_date_oapg(self) -> date:
- return DEFAULT_ISOPARSER.parse_isodate(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodate(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 date format. "
- "Invalid value '{}' for type date at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: typing.Optional[ValidationMetadata] = None,
- ):
- """
- DateBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DateTimeBase:
- @property
- @functools.lru_cache()
- def as_datetime_oapg(self) -> datetime:
- return DEFAULT_ISOPARSER.parse_isodatetime(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- DEFAULT_ISOPARSER.parse_isodatetime(arg)
- return True
- except ValueError:
- raise ApiValueError(
- "Value does not conform to the required ISO-8601 datetime format. "
- "Invalid value '{}' for type datetime at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DateTimeBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class DecimalBase:
- """
- A class for storing decimals that are sent over the wire as strings
- These schemas must remain based on StrBase rather than NumberBase
- because picking base classes must be deterministic
- """
-
- @property
- @functools.lru_cache()
- def as_decimal_oapg(self) -> decimal.Decimal:
- return decimal.Decimal(self)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[str], validation_metadata: ValidationMetadata):
- if isinstance(arg, str):
- try:
- decimal.Decimal(arg)
- return True
- except decimal.InvalidOperation:
- raise ApiValueError(
- "Value cannot be converted to a decimal. "
- "Invalid value '{}' for type decimal at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DecimalBase _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class NumberBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- """
- Note: for some numbers like 9.0 they could be represented as an
- integer but our code chooses to store them as
- >>> Decimal('9.0').as_tuple()
- DecimalTuple(sign=0, digits=(9, 0), exponent=-1)
- so we can tell that the value came from a float and convert it back to a float
- during later serialization
- """
- if self.as_tuple().exponent < 0:
- # this could be represented as an integer but should be represented as a float
- # because that's what it was serialized from
- raise ApiValueError(f'{self} is not an integer')
- self._as_int = int(self)
- return self._as_int
-
- @property
- def as_float_oapg(self) -> float:
- try:
- return self._as_float
- except AttributeError:
- if self.as_tuple().exponent >= 0:
- raise ApiValueError(f'{self} is not an float')
- self._as_float = float(self)
- return self._as_float
-
- @classmethod
- def __check_numeric_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if cls._is_json_validation_enabled_oapg('multipleOf',
- validation_metadata.configuration) and hasattr(cls.MetaOapg, 'multiple_of'):
- multiple_of_value = cls.MetaOapg.multiple_of
- if (not (float(arg) / multiple_of_value).is_integer()):
- # Note 'multipleOf' will be as good as the floating point arithmetic.
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="value must be a multiple of",
- constraint_value=multiple_of_value,
- path_to_item=validation_metadata.path_to_item
- )
-
- checking_max_or_min_values = any(
- hasattr(cls.MetaOapg, validation_key) for validation_key in {
- 'exclusive_maximum',
- 'inclusive_maximum',
- 'exclusive_minimum',
- 'inclusive_minimum',
- }
- )
- if not checking_max_or_min_values:
- return
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMaximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_maximum') and
- arg >= cls.MetaOapg.exclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('maximum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_maximum') and
- arg > cls.MetaOapg.inclusive_maximum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value less than or equal to",
- constraint_value=cls.MetaOapg.inclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('exclusiveMinimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'exclusive_minimum') and
- arg <= cls.MetaOapg.exclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than",
- constraint_value=cls.MetaOapg.exclusive_maximum,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minimum', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'inclusive_minimum') and
- arg < cls.MetaOapg.inclusive_minimum):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="must be a value greater than or equal to",
- constraint_value=cls.MetaOapg.inclusive_minimum,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- NumberBase _validate_oapg
- Validates that validations pass
- """
- if isinstance(arg, decimal.Decimal):
- cls.__check_numeric_validations(arg, validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class ListBase(ValidatorBase):
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def __validate_items(cls, list_items, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for items are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- list_items: the input list of items
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
-
- # if we have definitions for an items schema, use it
- # otherwise accept anything
- item_cls = getattr(cls.MetaOapg, 'items', UnsetAnyTypeSchema)
- item_cls = cls._get_class_oapg(item_cls)
- path_to_schemas = {}
- for i, value in enumerate(list_items):
- item_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=validation_metadata.path_to_item+(i,),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if item_validation_metadata.validation_ran_earlier(item_cls):
- continue
- other_path_to_schemas = item_cls._validate_oapg(
- value, validation_metadata=item_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_tuple_validations(
- cls, arg,
- validation_metadata: ValidationMetadata):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_items') and
- len(arg) > cls.MetaOapg.max_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be less than or equal to",
- constraint_value=cls.MetaOapg.max_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_items') and
- len(arg) < cls.MetaOapg.min_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of items must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_items,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('uniqueItems', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'unique_items') and cls.MetaOapg.unique_items and arg):
- unique_items = set(arg)
- if len(arg) > len(unique_items):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="duplicate items were found, and the tuple must not contain duplicates because",
- constraint_value='unique_items==True',
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- ListBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, tuple):
- cls.__check_tuple_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, tuple):
- return _path_to_schemas
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- other_path_to_schemas = cls.__validate_items(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_items_oapg(
- cls: 'Schema',
- arg: typing.List[typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- '''
- ListBase _get_items_oapg
- '''
- cast_items = []
-
- for i, value in enumerate(arg):
- item_path_to_item = path_to_item + (i,)
- item_cls = path_to_schemas[item_path_to_item]
- new_value = item_cls._get_new_instance_without_conversion_oapg(
- value,
- item_path_to_item,
- path_to_schemas
- )
- cast_items.append(new_value)
-
- return cast_items
-
-
-class Discriminable:
- MetaOapg: MetaOapgTyped
-
- @classmethod
- def _ensure_discriminator_value_present_oapg(cls, disc_property_name: str, validation_metadata: ValidationMetadata, *args):
- if not args or args and disc_property_name not in args[0]:
- # The input data does not contain the discriminator property
- raise ApiValueError(
- "Cannot deserialize input data due to missing discriminator. "
- "The discriminator property '{}' is missing at path: {}".format(disc_property_name, validation_metadata.path_to_item)
- )
-
- @classmethod
- def get_discriminated_class_oapg(cls, disc_property_name: str, disc_payload_value: str):
- """
- Used in schemas with discriminators
- """
- if not hasattr(cls.MetaOapg, 'discriminator'):
- return None
- disc = cls.MetaOapg.discriminator()
- if disc_property_name not in disc:
- return None
- discriminated_cls = disc[disc_property_name].get(disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if not hasattr(cls, 'MetaOapg'):
- return None
- elif not (
- hasattr(cls.MetaOapg, 'all_of') or
- hasattr(cls.MetaOapg, 'one_of') or
- hasattr(cls.MetaOapg, 'any_of')
- ):
- return None
- # TODO stop traveling if a cycle is hit
- if hasattr(cls.MetaOapg, 'all_of'):
- for allof_cls in cls.MetaOapg.all_of():
- discriminated_cls = allof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'one_of'):
- for oneof_cls in cls.MetaOapg.one_of():
- discriminated_cls = oneof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- if hasattr(cls.MetaOapg, 'any_of'):
- for anyof_cls in cls.MetaOapg.any_of():
- discriminated_cls = anyof_cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=disc_payload_value)
- if discriminated_cls is not None:
- return discriminated_cls
- return None
-
-
-class DictBase(Discriminable, ValidatorBase):
-
- @classmethod
- def __validate_arg_presence(cls, arg):
- """
- Ensures that:
- - all required arguments are passed in
- - the input variable names are valid
- - present in properties or
- - accepted because additionalProperties exists
- Exceptions will be raised if:
- - invalid arguments were passed in
- - a var_name is invalid if additional_properties == NotAnyTypeSchema
- and var_name not in properties.__annotations__
- - required properties were not passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- seen_required_properties = set()
- invalid_arguments = []
- required_property_names = getattr(cls.MetaOapg, 'required', set())
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name in arg:
- if property_name in required_property_names:
- seen_required_properties.add(property_name)
- elif property_name in property_annotations:
- continue
- elif additional_properties is not NotAnyTypeSchema:
- continue
- else:
- invalid_arguments.append(property_name)
- missing_required_arguments = list(required_property_names - seen_required_properties)
- if missing_required_arguments:
- missing_required_arguments.sort()
- raise ApiTypeError(
- "{} is missing {} required argument{}: {}".format(
- cls.__name__,
- len(missing_required_arguments),
- "s" if len(missing_required_arguments) > 1 else "",
- missing_required_arguments
- )
- )
- if invalid_arguments:
- invalid_arguments.sort()
- raise ApiTypeError(
- "{} was passed {} invalid argument{}: {}".format(
- cls.__name__,
- len(invalid_arguments),
- "s" if len(invalid_arguments) > 1 else "",
- invalid_arguments
- )
- )
-
- @classmethod
- def __validate_args(cls, arg, validation_metadata: ValidationMetadata):
- """
- Ensures that:
- - values passed in for properties are valid
- Exceptions will be raised if:
- - invalid arguments were passed in
-
- Args:
- arg: the input dict
-
- Raises:
- ApiTypeError - for missing required arguments, or for invalid properties
- """
- path_to_schemas = {}
- additional_properties = getattr(cls.MetaOapg, 'additional_properties', UnsetAnyTypeSchema)
- properties = getattr(cls.MetaOapg, 'properties', {})
- property_annotations = getattr(properties, '__annotations__', {})
- for property_name, value in arg.items():
- path_to_item = validation_metadata.path_to_item+(property_name,)
- if property_name in property_annotations:
- schema = property_annotations[property_name]
- elif additional_properties is not NotAnyTypeSchema:
- if additional_properties is UnsetAnyTypeSchema:
- """
- If additionalProperties is unset and this path_to_item does not yet have
- any validations on it, validate it.
- If it already has validations on it, skip this validation.
- """
- if path_to_item in path_to_schemas:
- continue
- schema = additional_properties
- else:
- raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format(
- value, cls, validation_metadata.path_to_item+(property_name,)
- ))
- schema = cls._get_class_oapg(schema)
- arg_validation_metadata = ValidationMetadata(
- from_server=validation_metadata.from_server,
- configuration=validation_metadata.configuration,
- path_to_item=path_to_item,
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if arg_validation_metadata.validation_ran_earlier(schema):
- continue
- other_path_to_schemas = schema._validate_oapg(value, validation_metadata=arg_validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __check_dict_validations(
- cls,
- arg,
- validation_metadata: ValidationMetadata
- ):
- if not hasattr(cls, 'MetaOapg'):
- return
- if (cls._is_json_validation_enabled_oapg('maxProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'max_properties') and
- len(arg) > cls.MetaOapg.max_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be less than or equal to",
- constraint_value=cls.MetaOapg.max_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- if (cls._is_json_validation_enabled_oapg('minProperties', validation_metadata.configuration) and
- hasattr(cls.MetaOapg, 'min_properties') and
- len(arg) < cls.MetaOapg.min_properties):
- cls._raise_validation_errror_message_oapg(
- value=arg,
- constraint_msg="number of properties must be greater than or equal to",
- constraint_value=cls.MetaOapg.min_properties,
- path_to_item=validation_metadata.path_to_item
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- DictBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- if isinstance(arg, frozendict.frozendict):
- cls.__check_dict_validations(arg, validation_metadata)
- _path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
- if not isinstance(arg, frozendict.frozendict):
- return _path_to_schemas
- cls.__validate_arg_presence(arg)
- other_path_to_schemas = cls.__validate_args(arg, validation_metadata=validation_metadata)
- update(_path_to_schemas, other_path_to_schemas)
- try:
- discriminator = cls.MetaOapg.discriminator()
- except AttributeError:
- return _path_to_schemas
- # discriminator exists
- disc_prop_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_prop_name, validation_metadata, arg)
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format(
- cls.__name__,
- disc_prop_name,
- list(discriminator[disc_prop_name].keys()),
- validation_metadata.path_to_item + (disc_prop_name,)
- )
- )
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
- if updated_vm.validation_ran_earlier(discriminated_cls):
- return _path_to_schemas
- other_path_to_schemas = discriminated_cls._validate_oapg(arg, validation_metadata=updated_vm)
- update(_path_to_schemas, other_path_to_schemas)
- return _path_to_schemas
-
- @classmethod
- def _get_properties_oapg(
- cls,
- arg: typing.Dict[str, typing.Any],
- path_to_item: typing.Tuple[typing.Union[str, int], ...],
- path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Type['Schema']]
- ):
- """
- DictBase _get_properties_oapg, this is how properties are set
- These values already passed validation
- """
- dict_items = {}
-
- for property_name_js, value in arg.items():
- property_path_to_item = path_to_item + (property_name_js,)
- property_cls = path_to_schemas[property_path_to_item]
- new_value = property_cls._get_new_instance_without_conversion_oapg(
- value,
- property_path_to_item,
- path_to_schemas
- )
- dict_items[property_name_js] = new_value
-
- return dict_items
-
- def __setattr__(self, name: str, value: typing.Any):
- if not isinstance(self, FileIO):
- raise AttributeError('property setting not supported on immutable instances')
-
- def __getattr__(self, name: str):
- """
- for instance.name access
- Properties are only type hinted for required properties
- so that hasattr(instance, 'optionalProp') is False when that key is not present
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- if name not in self.__class__.__annotations__:
- raise AttributeError(f"{self} has no attribute '{name}'")
- try:
- value = self[name]
- return value
- except KeyError as ex:
- raise AttributeError(str(ex))
-
- def __getitem__(self, name: str):
- """
- dict_instance[name] accessor
- key errors thrown
- """
- if not isinstance(self, frozendict.frozendict):
- return super().__getattr__(name)
- return super().__getitem__(name)
-
- def get_item_oapg(self, name: str) -> typing.Union['AnyTypeSchema', Unset]:
- # dict_instance[name] accessor
- if not isinstance(self, frozendict.frozendict):
- raise NotImplementedError()
- try:
- return super().__getitem__(name)
- except KeyError:
- return unset
-
-
-def cast_to_allowed_types(
- arg: typing.Union[str, date, datetime, uuid.UUID, decimal.Decimal, int, float, None, dict, frozendict.frozendict, list, tuple, bytes, Schema, io.FileIO, io.BufferedReader],
- from_server: bool,
- validated_path_to_schemas: typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]],
- path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']),
-) -> typing.Union[frozendict.frozendict, tuple, decimal.Decimal, str, bytes, BoolClass, NoneClass, FileIO]:
- """
- Casts the input payload arg into the allowed types
- The input validated_path_to_schemas is mutated by running this function
-
- When from_server is False then
- - date/datetime is cast to str
- - int/float is cast to Decimal
-
- If a Schema instance is passed in it is converted back to a primitive instance because
- One may need to validate that data to the original Schema class AND additional different classes
- those additional classes will need to be added to the new manufactured class for that payload
- If the code didn't do this and kept the payload as a Schema instance it would fail to validate to other
- Schema classes and the code wouldn't be able to mfg a new class that includes all valid schemas
- TODO: store the validated schema classes in validation_metadata
-
- Args:
- arg: the payload
- from_server: whether this payload came from the server or not
- validated_path_to_schemas: a dict that stores the validated classes at any path location in the payload
- """
- if isinstance(arg, Schema):
- # store the already run validations
- schema_classes = set()
- source_schema_was_unset = len(arg.__class__.__bases__) == 2 and UnsetAnyTypeSchema in arg.__class__.__bases__
- if not source_schema_was_unset:
- """
- Do not include UnsetAnyTypeSchema and its base class because
- it did not exist in the original spec schema definition
- It was added to ensure that all instances are of type Schema and the allowed base types
- """
- for cls in arg.__class__.__bases__:
- if cls is Singleton:
- # Skip Singleton
- continue
- schema_classes.add(cls)
- validated_path_to_schemas[path_to_item] = schema_classes
-
- type_error = ApiTypeError(f"Invalid type. Required value type is str and passed type was {type(arg)} at {path_to_item}")
- if isinstance(arg, str):
- return str(arg)
- elif isinstance(arg, (dict, frozendict.frozendict)):
- return frozendict.frozendict({key: cast_to_allowed_types(val, from_server, validated_path_to_schemas, path_to_item + (key,)) for key, val in arg.items()})
- elif isinstance(arg, (bool, BoolClass)):
- """
- this check must come before isinstance(arg, (int, float))
- because isinstance(True, int) is True
- """
- if arg:
- return BoolClass.TRUE
- return BoolClass.FALSE
- elif isinstance(arg, int):
- return decimal.Decimal(arg)
- elif isinstance(arg, float):
- decimal_from_float = decimal.Decimal(arg)
- if decimal_from_float.as_integer_ratio()[1] == 1:
- # 9.0 -> Decimal('9.0')
- # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0')
- return decimal.Decimal(str(decimal_from_float)+'.0')
- return decimal_from_float
- elif isinstance(arg, (tuple, list)):
- return tuple([cast_to_allowed_types(item, from_server, validated_path_to_schemas, path_to_item + (i,)) for i, item in enumerate(arg)])
- elif isinstance(arg, (none_type, NoneClass)):
- return NoneClass.NONE
- elif isinstance(arg, (date, datetime)):
- if not from_server:
- return arg.isoformat()
- raise type_error
- elif isinstance(arg, uuid.UUID):
- if not from_server:
- return str(arg)
- raise type_error
- elif isinstance(arg, decimal.Decimal):
- return decimal.Decimal(arg)
- elif isinstance(arg, bytes):
- return bytes(arg)
- elif isinstance(arg, (io.FileIO, io.BufferedReader)):
- return FileIO(arg)
- raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg)))
-
-
-class ComposedBase(Discriminable):
-
- @classmethod
- def __get_allof_classes(cls, arg, validation_metadata: ValidationMetadata):
- path_to_schemas = defaultdict(set)
- for allof_cls in cls.MetaOapg.all_of():
- if validation_metadata.validation_ran_earlier(allof_cls):
- continue
- other_path_to_schemas = allof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- update(path_to_schemas, other_path_to_schemas)
- return path_to_schemas
-
- @classmethod
- def __get_oneof_class(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata,
- ):
- oneof_classes = []
- path_to_schemas = defaultdict(set)
- for oneof_cls in cls.MetaOapg.one_of():
- if oneof_cls in path_to_schemas[validation_metadata.path_to_item]:
- oneof_classes.append(oneof_cls)
- continue
- if validation_metadata.validation_ran_earlier(oneof_cls):
- oneof_classes.append(oneof_cls)
- continue
- try:
- path_to_schemas = oneof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and oneof_cls is discriminated_cls:
- raise ex
- continue
- oneof_classes.append(oneof_cls)
- if not oneof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the oneOf schemas matched the input data.".format(cls)
- )
- elif len(oneof_classes) > 1:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. Multiple "
- "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes)
- )
- # exactly one class matches
- return path_to_schemas
-
- @classmethod
- def __get_anyof_classes(
- cls,
- arg,
- discriminated_cls,
- validation_metadata: ValidationMetadata
- ):
- anyof_classes = []
- path_to_schemas = defaultdict(set)
- for anyof_cls in cls.MetaOapg.any_of():
- if validation_metadata.validation_ran_earlier(anyof_cls):
- anyof_classes.append(anyof_cls)
- continue
-
- try:
- other_path_to_schemas = anyof_cls._validate_oapg(arg, validation_metadata=validation_metadata)
- except (ApiValueError, ApiTypeError) as ex:
- if discriminated_cls is not None and anyof_cls is discriminated_cls:
- raise ex
- continue
- anyof_classes.append(anyof_cls)
- update(path_to_schemas, other_path_to_schemas)
- if not anyof_classes:
- raise ApiValueError(
- "Invalid inputs given to generate an instance of {}. None "
- "of the anyOf schemas matched the input data.".format(cls)
- )
- return path_to_schemas
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ) -> typing.Dict[typing.Tuple[typing.Union[str, int], ...], typing.Set[typing.Union['Schema', str, decimal.Decimal, BoolClass, NoneClass, frozendict.frozendict, tuple]]]:
- """
- ComposedBase _validate_oapg
- We return dynamic classes of different bases depending upon the inputs
- This makes it so:
- - the returned instance is always a subclass of our defining schema
- - this allows us to check type based on whether an instance is a subclass of a schema
- - the returned instance is a serializable type (except for None, True, and False) which are enums
-
- Returns:
- new_cls (type): the new class
-
- Raises:
- ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes
- ApiTypeError: when the input type is not in the list of allowed spec types
- """
- # validation checking on types, validations, and enums
- path_to_schemas = super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
- updated_vm = ValidationMetadata(
- configuration=validation_metadata.configuration,
- from_server=validation_metadata.from_server,
- path_to_item=validation_metadata.path_to_item,
- seen_classes=validation_metadata.seen_classes | frozenset({cls}),
- validated_path_to_schemas=validation_metadata.validated_path_to_schemas
- )
-
- # process composed schema
- discriminator = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'discriminator'):
- discriminator = cls.MetaOapg.discriminator()
- discriminated_cls = None
- if discriminator and arg and isinstance(arg, frozendict.frozendict):
- disc_property_name = list(discriminator.keys())[0]
- cls._ensure_discriminator_value_present_oapg(disc_property_name, updated_vm, arg)
- # get discriminated_cls by looking at the dict in the current class
- discriminated_cls = cls.get_discriminated_class_oapg(
- disc_property_name=disc_property_name, disc_payload_value=arg[disc_property_name])
- if discriminated_cls is None:
- raise ApiValueError(
- "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format(
- arg[disc_property_name],
- cls.__name__,
- disc_property_name,
- list(discriminator[disc_property_name].keys()),
- updated_vm.path_to_item + (disc_property_name,)
- )
- )
-
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'all_of'):
- other_path_to_schemas = cls.__get_allof_classes(arg, validation_metadata=updated_vm)
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'one_of'):
- other_path_to_schemas = cls.__get_oneof_class(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'any_of'):
- other_path_to_schemas = cls.__get_anyof_classes(
- arg,
- discriminated_cls=discriminated_cls,
- validation_metadata=updated_vm
- )
- update(path_to_schemas, other_path_to_schemas)
- not_cls = None
- if hasattr(cls, 'MetaOapg') and hasattr(cls.MetaOapg, 'not_schema'):
- not_cls = cls.MetaOapg.not_schema
- not_cls = cls._get_class_oapg(not_cls)
- if not_cls:
- other_path_to_schemas = None
- not_exception = ApiValueError(
- "Invalid value '{}' was passed in to {}. Value is invalid because it is disallowed by {}".format(
- arg,
- cls.__name__,
- not_cls.__name__,
- )
- )
- if updated_vm.validation_ran_earlier(not_cls):
- raise not_exception
-
- try:
- other_path_to_schemas = not_cls._validate_oapg(arg, validation_metadata=updated_vm)
- except (ApiValueError, ApiTypeError):
- pass
- if other_path_to_schemas:
- raise not_exception
-
- if discriminated_cls is not None and not updated_vm.validation_ran_earlier(discriminated_cls):
- # TODO use an exception from this package here
- assert discriminated_cls in path_to_schemas[updated_vm.path_to_item]
- return path_to_schemas
-
-
-# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase
-class ComposedSchema(
- ComposedBase,
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, *args: typing.Any, _configuration: typing.Optional[Configuration] = None, **kwargs):
- if not args:
- if not kwargs:
- raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__))
- args = (kwargs, )
- return super().from_openapi_data_oapg(args[0], _configuration=_configuration)
-
-
-class ListSchema(
- ListBase,
- Schema,
- TupleMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.List[typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[typing.List[typing.Any], typing.Tuple[typing.Any]], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NoneSchema(
- NoneBase,
- Schema,
- NoneMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: None, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: None, **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class NumberSchema(
- NumberBase,
- Schema,
- DecimalMixin
-):
- """
- This is used for type: number with no format
- Both integers AND floats are accepted
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Union[int, float], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class IntBase:
- @property
- def as_int_oapg(self) -> int:
- try:
- return self._as_int
- except AttributeError:
- self._as_int = int(self)
- return self._as_int
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
-
- denominator = arg.as_integer_ratio()[-1]
- if denominator != 1:
- raise ApiValueError(
- "Invalid value '{}' for type integer at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- IntBase _validate_oapg
- TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class IntSchema(IntBase, NumberSchema):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: int, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class Int32Base:
- __inclusive_minimum = decimal.Decimal(-2147483648)
- __inclusive_maximum = decimal.Decimal(2147483647)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int32 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int32Schema(
- Int32Base,
- IntSchema
-):
- pass
-
-
-class Int64Base:
- __inclusive_minimum = decimal.Decimal(-9223372036854775808)
- __inclusive_maximum = decimal.Decimal(9223372036854775807)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal) and arg.as_tuple().exponent == 0:
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type int64 at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Int64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Int64Schema(
- Int64Base,
- IntSchema
-):
- pass
-
-
-class Float32Base:
- __inclusive_minimum = decimal.Decimal(-3.4028234663852886e+38)
- __inclusive_maximum = decimal.Decimal(3.4028234663852886e+38)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type float at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float32Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-
-class Float32Schema(
- Float32Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class Float64Base:
- __inclusive_minimum = decimal.Decimal(-1.7976931348623157E+308)
- __inclusive_maximum = decimal.Decimal(1.7976931348623157E+308)
-
- @classmethod
- def __validate_format(cls, arg: typing.Optional[decimal.Decimal], validation_metadata: ValidationMetadata):
- if isinstance(arg, decimal.Decimal):
- if not cls.__inclusive_minimum <= arg <= cls.__inclusive_maximum:
- raise ApiValueError(
- "Invalid value '{}' for type double at {}".format(arg, validation_metadata.path_to_item)
- )
-
- @classmethod
- def _validate_oapg(
- cls,
- arg,
- validation_metadata: ValidationMetadata,
- ):
- """
- Float64Base _validate_oapg
- """
- cls.__validate_format(arg, validation_metadata=validation_metadata)
- return super()._validate_oapg(arg, validation_metadata=validation_metadata)
-
-class Float64Schema(
- Float64Base,
- NumberSchema
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: float, _configuration: typing.Optional[Configuration] = None):
- # todo check format
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
-
-class StrSchema(
- StrBase,
- Schema,
- StrMixin
-):
- """
- date + datetime string types must inherit from this class
- That is because one can validate a str payload as both:
- - type: string (format unset)
- - type: string, format: date
- """
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: str, _configuration: typing.Optional[Configuration] = None) -> 'StrSchema':
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: typing.Union[str, date, datetime, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class UUIDSchema(UUIDBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, uuid.UUID], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateSchema(DateBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, date], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DateTimeSchema(DateTimeBase, StrSchema):
-
- def __new__(cls, arg: typing.Union[str, datetime], **kwargs: Configuration):
- return super().__new__(cls, arg, **kwargs)
-
-
-class DecimalSchema(DecimalBase, StrSchema):
-
- def __new__(cls, arg: str, **kwargs: Configuration):
- """
- Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads
- which can be simple (str) or complex (dicts or lists with nested values)
- Because casting is only done once and recursively casts all values prior to validation then for a potential
- client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know
- if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema
- where it should stay as Decimal.
- """
- return super().__new__(cls, arg, **kwargs)
-
-
-class BytesSchema(
- Schema,
- BytesMixin
-):
- """
- this class will subclass bytes and is immutable
- """
- def __new__(cls, arg: bytes, **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class FileSchema(
- Schema,
- FileMixin
-):
- """
- This class is NOT immutable
- Dynamic classes are built using it for example when AnyType allows in binary data
- Al other schema classes ARE immutable
- If one wanted to make this immutable one could make this a DictSchema with required properties:
- - data = BytesSchema (which would be an immutable bytes based schema)
- - file_name = StrSchema
- and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name
- The downside would be that data would be stored in memory which one may not want to do for very large files
-
- The developer is responsible for closing this file and deleting it
-
- This class was kept as mutable:
- - to allow file reading and writing to disk
- - to be able to preserve file name info
- """
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: Configuration):
- return super(Schema, cls).__new__(cls, arg)
-
-
-class BinaryBase:
- pass
-
-
-class BinarySchema(
- ComposedBase,
- BinaryBase,
- Schema,
- BinaryMixin
-):
- class MetaOapg:
- @staticmethod
- def one_of():
- return [
- BytesSchema,
- FileSchema,
- ]
-
- def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: Configuration):
- return super().__new__(cls, arg)
-
-
-class BoolSchema(
- BoolBase,
- Schema,
- BoolMixin
-):
-
- @classmethod
- def from_openapi_data_oapg(cls, arg: bool, _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, arg: bool, **kwargs: ValidationMetadata):
- return super().__new__(cls, arg, **kwargs)
-
-
-class AnyTypeSchema(
- DictBase,
- ListBase,
- NumberBase,
- StrBase,
- BoolBase,
- NoneBase,
- Schema,
- NoneFrozenDictTupleStrDecimalBoolFileBytesMixin
-):
- # Python representation of a schema defined as true or {}
- pass
-
-
-class UnsetAnyTypeSchema(AnyTypeSchema):
- # Used when additionalProperties/items was not explicitly defined and a defining schema is needed
- pass
-
-
-class NotAnyTypeSchema(
- ComposedSchema,
-):
- """
- Python representation of a schema defined as false or {'not': {}}
- Does not allow inputs in of AnyType
- Note: validations on this class are never run because the code knows that no inputs will ever validate
- """
-
- class MetaOapg:
- not_schema = AnyTypeSchema
-
- def __new__(
- cls,
- *args,
- _configuration: typing.Optional[Configuration] = None,
- ) -> 'NotAnyTypeSchema':
- return super().__new__(
- cls,
- *args,
- _configuration=_configuration,
- )
-
-
-class DictSchema(
- DictBase,
- Schema,
- FrozenDictMixin
-):
- @classmethod
- def from_openapi_data_oapg(cls, arg: typing.Dict[str, typing.Any], _configuration: typing.Optional[Configuration] = None):
- return super().from_openapi_data_oapg(arg, _configuration=_configuration)
-
- def __new__(cls, *args: typing.Union[dict, frozendict.frozendict], **kwargs: typing.Union[dict, frozendict.frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, ValidationMetadata]):
- return super().__new__(cls, *args, **kwargs)
-
-
-schema_type_classes = {NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema, AnyTypeSchema}
-
-
-@functools.lru_cache()
-def get_new_class(
- class_name: str,
- bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...]
-) -> typing.Type[Schema]:
- """
- Returns a new class that is made with the subclass bases
- """
- new_cls: typing.Type[Schema] = type(class_name, bases, {})
- return new_cls
-
-
-LOG_CACHE_USAGE = False
-
-
-def log_cache_usage(cache_fn):
- if LOG_CACHE_USAGE:
- print(cache_fn.__name__, cache_fn.cache_info())
diff --git a/libraries/client/cloudharness_cli/docs/common/AccountsApi.md b/libraries/client/cloudharness_cli/docs/common/AccountsApi.md
new file mode 100644
index 000000000..d9841b429
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/common/AccountsApi.md
@@ -0,0 +1,73 @@
+# cloudharness_cli.common.AccountsApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**get_config**](AccountsApi.md#get_config) | **GET** /accounts/config | Gets the config for logging in into accounts
+
+
+# **get_config**
+> GetConfig200Response get_config()
+
+Gets the config for logging in into accounts
+
+Gets the config for logging in into accounts
+
+### Example
+
+
+```python
+import cloudharness_cli.common
+from cloudharness_cli.common.models.get_config200_response import GetConfig200Response
+from cloudharness_cli.common.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.common.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.common.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.common.AccountsApi(api_client)
+
+ try:
+ # Gets the config for logging in into accounts
+ api_response = api_instance.get_config()
+ print("The response of AccountsApi->get_config:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling AccountsApi->get_config: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+[**GetConfig200Response**](GetConfig200Response.md)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Config for accounts log in | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/common/AppVersion.md b/libraries/client/cloudharness_cli/docs/common/AppVersion.md
new file mode 100644
index 000000000..8fa56cb6b
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/common/AppVersion.md
@@ -0,0 +1,31 @@
+# AppVersion
+
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**build** | **str** | | [optional]
+**tag** | **str** | | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.common.models.app_version import AppVersion
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AppVersion from a JSON string
+app_version_instance = AppVersion.from_json(json)
+# print the JSON string representation of the object
+print(AppVersion.to_json())
+
+# convert the object into a dict
+app_version_dict = app_version_instance.to_dict()
+# create an instance of AppVersion from a dict
+app_version_from_dict = AppVersion.from_dict(app_version_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/common/ConfigApi.md b/libraries/client/cloudharness_cli/docs/common/ConfigApi.md
new file mode 100644
index 000000000..e49919d52
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/common/ConfigApi.md
@@ -0,0 +1,70 @@
+# cloudharness_cli.common.ConfigApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**get_version**](ConfigApi.md#get_version) | **GET** /version |
+
+
+# **get_version**
+> AppVersion get_version()
+
+
+
+### Example
+
+
+```python
+import cloudharness_cli.common
+from cloudharness_cli.common.models.app_version import AppVersion
+from cloudharness_cli.common.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.common.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.common.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.common.ConfigApi(api_client)
+
+ try:
+ api_response = api_instance.get_version()
+ print("The response of ConfigApi->get_version:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling ConfigApi->get_version: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+[**AppVersion**](AppVersion.md)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Deployment version GET | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/common/GetConfig200Response.md b/libraries/client/cloudharness_cli/docs/common/GetConfig200Response.md
new file mode 100644
index 000000000..dbf58049f
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/common/GetConfig200Response.md
@@ -0,0 +1,31 @@
+# GetConfig200Response
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**url** | **str** | The auth URL. | [optional]
+**realm** | **str** | The realm. | [optional]
+**client_id** | **str** | The clientID. | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.common.models.get_config200_response import GetConfig200Response
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of GetConfig200Response from a JSON string
+get_config200_response_instance = GetConfig200Response.from_json(json)
+# print the JSON string representation of the object
+print(GetConfig200Response.to_json())
+
+# convert the object into a dict
+get_config200_response_dict = get_config200_response_instance.to_dict()
+# create an instance of GetConfig200Response from a dict
+get_config200_response_from_dict = GetConfig200Response.from_dict(get_config200_response_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/common/SentryApi.md b/libraries/client/cloudharness_cli/docs/common/SentryApi.md
new file mode 100644
index 000000000..83e14bb50
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/common/SentryApi.md
@@ -0,0 +1,78 @@
+# cloudharness_cli.common.SentryApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**getdsn**](SentryApi.md#getdsn) | **GET** /sentry/getdsn/{appname} | Gets the Sentry DSN for a given application
+
+
+# **getdsn**
+> object getdsn(appname)
+
+Gets the Sentry DSN for a given application
+
+Gets the Sentry DSN for a given application
+
+### Example
+
+
+```python
+import cloudharness_cli.common
+from cloudharness_cli.common.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.common.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.common.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.common.SentryApi(api_client)
+ appname = 'appname_example' # str |
+
+ try:
+ # Gets the Sentry DSN for a given application
+ api_response = api_instance.getdsn(appname)
+ print("The response of SentryApi->getdsn:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling SentryApi->getdsn: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **appname** | **str**| |
+
+### Return type
+
+**object**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json, text/html, application/problem+json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Sentry DSN for the given application | - |
+**400** | Sentry not configured for the given application | - |
+**404** | Sentry not configured for the given application | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/common/apis/tags/AccountsApi.md b/libraries/client/cloudharness_cli/docs/common/apis/tags/AccountsApi.md
deleted file mode 100644
index 8f749f2e9..000000000
--- a/libraries/client/cloudharness_cli/docs/common/apis/tags/AccountsApi.md
+++ /dev/null
@@ -1,80 +0,0 @@
-
-# cloudharness_cli.common.apis.tags.accounts_api.AccountsApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**get_config**](#get_config) | **get** /accounts/config | Gets the config for logging in into accounts
-
-# **get_config**
-
-> {str: (bool, date, datetime, dict, float, int, list, str, none_type)} get_config()
-
-Gets the config for logging in into accounts
-
-Gets the config for logging in into accounts
-
-### Example
-
-```python
-import cloudharness_cli.common
-from cloudharness_cli.common.apis.tags import accounts_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.common.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.common.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = accounts_api.AccountsApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # Gets the config for logging in into accounts
- api_response = api_instance.get_config()
- pprint(api_response)
- except cloudharness_cli.common.ApiException as e:
- print("Exception when calling AccountsApi->get_config: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#get_config.ApiResponseFor200) | Config for accounts log in
-
-#### get_config.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**url** | str, | str, | The auth URL. | [optional]
-**realm** | str, | str, | The realm. | [optional]
-**clientId** | str, | str, | The clientID. | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/common/apis/tags/SentryApi.md b/libraries/client/cloudharness_cli/docs/common/apis/tags/SentryApi.md
deleted file mode 100644
index 8a32225fc..000000000
--- a/libraries/client/cloudharness_cli/docs/common/apis/tags/SentryApi.md
+++ /dev/null
@@ -1,98 +0,0 @@
-
-# cloudharness_cli.common.apis.tags.sentry_api.SentryApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**getdsn**](#getdsn) | **get** /sentry/getdsn/{appname} | Gets the Sentry DSN for a given application
-
-# **getdsn**
-
-> str getdsn(appname)
-
-Gets the Sentry DSN for a given application
-
-Gets the Sentry DSN for a given application
-
-### Example
-
-```python
-import cloudharness_cli.common
-from cloudharness_cli.common.apis.tags import sentry_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.common.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.common.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = sentry_api.SentryApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'appname': "appname_example",
- }
- try:
- # Gets the Sentry DSN for a given application
- api_response = api_instance.getdsn(
- path_params=path_params,
- )
- pprint(api_response)
- except cloudharness_cli.common.ApiException as e:
- print("Exception when calling SentryApi->getdsn: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-appname | AppnameSchema | |
-
-# AppnameSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#getdsn.ApiResponseFor200) | Sentry DSN for the given application
-
-#### getdsn.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/AuthApi.md b/libraries/client/cloudharness_cli/docs/samples/AuthApi.md
new file mode 100644
index 000000000..04a7dbbd6
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/AuthApi.md
@@ -0,0 +1,159 @@
+# cloudharness_cli.samples.AuthApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**valid_cookie**](AuthApi.md#valid_cookie) | **GET** /valid-cookie | Check if the token is valid. Get a token by logging into the base url
+[**valid_token**](AuthApi.md#valid_token) | **GET** /valid | Check if the token is valid. Get a token by logging into the base url
+
+
+# **valid_cookie**
+> str valid_cookie()
+
+Check if the token is valid. Get a token by logging into the base url
+
+Check if the token is valid
+
+### Example
+
+* Api Key Authentication (cookieAuth):
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+# The client must configure the authentication and authorization parameters
+# in accordance with the API server security policy.
+# Examples for each auth method are provided below, use the example that
+# satisfies your auth use case.
+
+# Configure API key authorization: cookieAuth
+configuration.api_key['cookieAuth'] = os.environ["API_KEY"]
+
+# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
+# configuration.api_key_prefix['cookieAuth'] = 'Bearer'
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.AuthApi(api_client)
+
+ try:
+ # Check if the token is valid. Get a token by logging into the base url
+ api_response = api_instance.valid_cookie()
+ print("The response of AuthApi->valid_cookie:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling AuthApi->valid_cookie: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+**str**
+
+### Authorization
+
+[cookieAuth](../README.md#cookieAuth)
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Check if token is valid | - |
+**401** | invalid token, unauthorized | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **valid_token**
+> str valid_token()
+
+Check if the token is valid. Get a token by logging into the base url
+
+Check if the token is valid
+
+### Example
+
+* Bearer (JWT) Authentication (bearerAuth):
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+# The client must configure the authentication and authorization parameters
+# in accordance with the API server security policy.
+# Examples for each auth method are provided below, use the example that
+# satisfies your auth use case.
+
+# Configure Bearer authorization (JWT): bearerAuth
+configuration = cloudharness_cli.samples.Configuration(
+ access_token = os.environ["BEARER_TOKEN"]
+)
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.AuthApi(api_client)
+
+ try:
+ # Check if the token is valid. Get a token by logging into the base url
+ api_response = api_instance.valid_token()
+ print("The response of AuthApi->valid_token:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling AuthApi->valid_token: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+**str**
+
+### Authorization
+
+[bearerAuth](../README.md#bearerAuth)
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Check if token is valid | - |
+**401** | invalid token, unauthorized | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/InlineResponse202.md b/libraries/client/cloudharness_cli/docs/samples/InlineResponse202.md
new file mode 100644
index 000000000..bb52e77e4
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/InlineResponse202.md
@@ -0,0 +1,29 @@
+# InlineResponse202
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**task** | [**InlineResponse202Task**](InlineResponse202Task.md) | | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.samples.models.inline_response202 import InlineResponse202
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of InlineResponse202 from a JSON string
+inline_response202_instance = InlineResponse202.from_json(json)
+# print the JSON string representation of the object
+print(InlineResponse202.to_json())
+
+# convert the object into a dict
+inline_response202_dict = inline_response202_instance.to_dict()
+# create an instance of InlineResponse202 from a dict
+inline_response202_from_dict = InlineResponse202.from_dict(inline_response202_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/InlineResponse202Task.md b/libraries/client/cloudharness_cli/docs/samples/InlineResponse202Task.md
new file mode 100644
index 000000000..f5ef12ac1
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/InlineResponse202Task.md
@@ -0,0 +1,30 @@
+# InlineResponse202Task
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**href** | **str** | the url where to check the operation status | [optional]
+**name** | **str** | | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of InlineResponse202Task from a JSON string
+inline_response202_task_instance = InlineResponse202Task.from_json(json)
+# print the JSON string representation of the object
+print(InlineResponse202Task.to_json())
+
+# convert the object into a dict
+inline_response202_task_dict = inline_response202_task_instance.to_dict()
+# create an instance of InlineResponse202Task from a dict
+inline_response202_task_from_dict = InlineResponse202Task.from_dict(inline_response202_task_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/ResourceApi.md b/libraries/client/cloudharness_cli/docs/samples/ResourceApi.md
new file mode 100644
index 000000000..b2d8848f4
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/ResourceApi.md
@@ -0,0 +1,351 @@
+# cloudharness_cli.samples.ResourceApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**create_sample_resource**](ResourceApi.md#create_sample_resource) | **POST** /sampleresources | Create a SampleResource
+[**delete_sample_resource**](ResourceApi.md#delete_sample_resource) | **DELETE** /sampleresources/{sampleresourceId} | Delete a SampleResource
+[**get_sample_resource**](ResourceApi.md#get_sample_resource) | **GET** /sampleresources/{sampleresourceId} | Get a SampleResource
+[**get_sample_resources**](ResourceApi.md#get_sample_resources) | **GET** /sampleresources | List All SampleResources
+[**update_sample_resource**](ResourceApi.md#update_sample_resource) | **PUT** /sampleresources/{sampleresourceId} | Update a SampleResource
+
+
+# **create_sample_resource**
+> create_sample_resource(sample_resource)
+
+Create a SampleResource
+
+Creates a new instance of a `SampleResource`.
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.ResourceApi(api_client)
+ sample_resource = cloudharness_cli.samples.SampleResource() # SampleResource | A new `SampleResource` to be created.
+
+ try:
+ # Create a SampleResource
+ api_instance.create_sample_resource(sample_resource)
+ except Exception as e:
+ print("Exception when calling ResourceApi->create_sample_resource: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **sample_resource** | [**SampleResource**](SampleResource.md)| A new `SampleResource` to be created. |
+
+### Return type
+
+void (empty response body)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: application/json
+ - **Accept**: Not defined
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**201** | Successful response. | - |
+**400** | Payload must be of type SampleResource | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **delete_sample_resource**
+> delete_sample_resource(sampleresource_id)
+
+Delete a SampleResource
+
+Deletes an existing `SampleResource`.
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.ResourceApi(api_client)
+ sampleresource_id = 'sampleresource_id_example' # str | A unique identifier for a `SampleResource`.
+
+ try:
+ # Delete a SampleResource
+ api_instance.delete_sample_resource(sampleresource_id)
+ except Exception as e:
+ print("Exception when calling ResourceApi->delete_sample_resource: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **sampleresource_id** | **str**| A unique identifier for a `SampleResource`. |
+
+### Return type
+
+void (empty response body)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: Not defined
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**204** | Successful response. | - |
+**400** | Parameter must be integer | - |
+**404** | Resource not found | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **get_sample_resource**
+> SampleResource get_sample_resource(sampleresource_id)
+
+Get a SampleResource
+
+Gets the details of a single instance of a `SampleResource`.
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.ResourceApi(api_client)
+ sampleresource_id = 'sampleresource_id_example' # str | A unique identifier for a `SampleResource`.
+
+ try:
+ # Get a SampleResource
+ api_response = api_instance.get_sample_resource(sampleresource_id)
+ print("The response of ResourceApi->get_sample_resource:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling ResourceApi->get_sample_resource: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **sampleresource_id** | **str**| A unique identifier for a `SampleResource`. |
+
+### Return type
+
+[**SampleResource**](SampleResource.md)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful response - returns a single `SampleResource`. | - |
+**400** | Parameter must be integer | - |
+**404** | Resource not found | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **get_sample_resources**
+> List[SampleResource] get_sample_resources()
+
+List All SampleResources
+
+Gets a list of all `SampleResource` entities.
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.ResourceApi(api_client)
+
+ try:
+ # List All SampleResources
+ api_response = api_instance.get_sample_resources()
+ print("The response of ResourceApi->get_sample_resources:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling ResourceApi->get_sample_resources: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+[**List[SampleResource]**](SampleResource.md)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful response - returns an array of `SampleResource` entities. | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **update_sample_resource**
+> update_sample_resource(sampleresource_id, sample_resource)
+
+Update a SampleResource
+
+Updates an existing `SampleResource`.
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.ResourceApi(api_client)
+ sampleresource_id = 'sampleresource_id_example' # str | A unique identifier for a `SampleResource`.
+ sample_resource = cloudharness_cli.samples.SampleResource() # SampleResource | Updated `SampleResource` information.
+
+ try:
+ # Update a SampleResource
+ api_instance.update_sample_resource(sampleresource_id, sample_resource)
+ except Exception as e:
+ print("Exception when calling ResourceApi->update_sample_resource: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **sampleresource_id** | **str**| A unique identifier for a `SampleResource`. |
+ **sample_resource** | [**SampleResource**](SampleResource.md)| Updated `SampleResource` information. |
+
+### Return type
+
+void (empty response body)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: application/json
+ - **Accept**: Not defined
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**202** | Successful response. | - |
+**400** | Parameter must be integer, payload must be of type SampleResource | - |
+**404** | Resource not found | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/SampleResource.md b/libraries/client/cloudharness_cli/docs/samples/SampleResource.md
new file mode 100644
index 000000000..19d6adba7
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/SampleResource.md
@@ -0,0 +1,32 @@
+# SampleResource
+
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**a** | **float** | |
+**b** | **float** | | [optional]
+**id** | **float** | | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of SampleResource from a JSON string
+sample_resource_instance = SampleResource.from_json(json)
+# print the JSON string representation of the object
+print(SampleResource.to_json())
+
+# convert the object into a dict
+sample_resource_dict = sample_resource_instance.to_dict()
+# create an instance of SampleResource from a dict
+sample_resource_from_dict = SampleResource.from_dict(sample_resource_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/TestApi.md b/libraries/client/cloudharness_cli/docs/samples/TestApi.md
new file mode 100644
index 000000000..b1eed271b
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/TestApi.md
@@ -0,0 +1,134 @@
+# cloudharness_cli.samples.TestApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**error**](TestApi.md#error) | **GET** /error | test sentry is working
+[**ping**](TestApi.md#ping) | **GET** /ping | test the application is up
+
+
+# **error**
+> str error()
+
+test sentry is working
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.TestApi(api_client)
+
+ try:
+ # test sentry is working
+ api_response = api_instance.error()
+ print("The response of TestApi->error:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling TestApi->error: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+**str**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | This won't happen | - |
+**500** | Sentry entry should come! | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **ping**
+> float ping()
+
+test the application is up
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.TestApi(api_client)
+
+ try:
+ # test the application is up
+ api_response = api_instance.ping()
+ print("The response of TestApi->ping:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling TestApi->ping: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+**float**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | What we want | - |
+**500** | This shouldn't happen | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/WorkflowsApi.md b/libraries/client/cloudharness_cli/docs/samples/WorkflowsApi.md
new file mode 100644
index 000000000..3df62043c
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/samples/WorkflowsApi.md
@@ -0,0 +1,201 @@
+# cloudharness_cli.samples.WorkflowsApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**submit_async**](WorkflowsApi.md#submit_async) | **GET** /operation_async | Send an asynchronous operation
+[**submit_sync**](WorkflowsApi.md#submit_sync) | **GET** /operation_sync | Send a synchronous operation
+[**submit_sync_with_results**](WorkflowsApi.md#submit_sync_with_results) | **GET** /operation_sync_results | Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+
+
+# **submit_async**
+> InlineResponse202 submit_async()
+
+Send an asynchronous operation
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.models.inline_response202 import InlineResponse202
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.WorkflowsApi(api_client)
+
+ try:
+ # Send an asynchronous operation
+ api_response = api_instance.submit_async()
+ print("The response of WorkflowsApi->submit_async:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling WorkflowsApi->submit_async: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+[**InlineResponse202**](InlineResponse202.md)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**202** | Submitted operation. See also https://restfulapi.net/http-status-202-accepted/ | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **submit_sync**
+> object submit_sync()
+
+Send a synchronous operation
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.WorkflowsApi(api_client)
+
+ try:
+ # Send a synchronous operation
+ api_response = api_instance.submit_sync()
+ print("The response of WorkflowsApi->submit_sync:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling WorkflowsApi->submit_sync: %s\n" % e)
+```
+
+
+
+### Parameters
+
+This endpoint does not need any parameter.
+
+### Return type
+
+**object**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Operation result | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **submit_sync_with_results**
+> str submit_sync_with_results(a, b)
+
+Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+
+### Example
+
+
+```python
+import cloudharness_cli.samples
+from cloudharness_cli.samples.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.samples.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.samples.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.samples.WorkflowsApi(api_client)
+ a = 10 # float | first number to sum
+ b = 10 # float | second number to sum
+
+ try:
+ # Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+ api_response = api_instance.submit_sync_with_results(a, b)
+ print("The response of WorkflowsApi->submit_sync_with_results:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling WorkflowsApi->submit_sync_with_results: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **a** | **float**| first number to sum |
+ **b** | **float**| second number to sum |
+
+### Return type
+
+**str**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Operation result | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/samples/apis/tags/AuthApi.md b/libraries/client/cloudharness_cli/docs/samples/apis/tags/AuthApi.md
deleted file mode 100644
index f6a58dfa6..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/apis/tags/AuthApi.md
+++ /dev/null
@@ -1,173 +0,0 @@
-
-# cloudharness_cli.samples.apis.tags.auth_api.AuthApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**valid_cookie**](#valid_cookie) | **get** /valid-cookie | Check if the token is valid. Get a token by logging into the base url
-[**valid_token**](#valid_token) | **get** /valid | Check if the token is valid. Get a token by logging into the base url
-
-# **valid_cookie**
-
-> str valid_cookie()
-
-Check if the token is valid. Get a token by logging into the base url
-
-Check if the token is valid
-
-### Example
-
-* Api Key Authentication (cookieAuth):
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import auth_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure API key authorization: cookieAuth
-configuration.api_key['cookieAuth'] = 'YOUR_API_KEY'
-
-# Uncomment below to setup prefix (e.g. Bearer) for API key, if needed
-# configuration.api_key_prefix['cookieAuth'] = 'Bearer'
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = auth_api.AuthApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # Check if the token is valid. Get a token by logging into the base url
- api_response = api_instance.valid_cookie()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling AuthApi->valid_cookie: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#valid_cookie.ApiResponseFor200) | Check if token is valid
-401 | [ApiResponseFor401](#valid_cookie.ApiResponseFor401) | invalid token, unauthorized
-
-#### valid_cookie.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-#### valid_cookie.ApiResponseFor401
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-[cookieAuth](../../../README.md#cookieAuth)
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **valid_token**
-
-> str valid_token()
-
-Check if the token is valid. Get a token by logging into the base url
-
-Check if the token is valid
-
-### Example
-
-* Bearer (JWT) Authentication (bearerAuth):
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import auth_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure Bearer authorization (JWT): bearerAuth
-configuration = cloudharness_cli.samples.Configuration(
- access_token = 'YOUR_BEARER_TOKEN'
-)
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = auth_api.AuthApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # Check if the token is valid. Get a token by logging into the base url
- api_response = api_instance.valid_token()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling AuthApi->valid_token: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#valid_token.ApiResponseFor200) | Check if token is valid
-401 | [ApiResponseFor401](#valid_token.ApiResponseFor401) | invalid token, unauthorized
-
-#### valid_token.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-#### valid_token.ApiResponseFor401
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-[bearerAuth](../../../README.md#bearerAuth)
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/apis/tags/ResourceApi.md b/libraries/client/cloudharness_cli/docs/samples/apis/tags/ResourceApi.md
deleted file mode 100644
index 5c0bca7c7..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/apis/tags/ResourceApi.md
+++ /dev/null
@@ -1,482 +0,0 @@
-
-# cloudharness_cli.samples.apis.tags.resource_api.ResourceApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**create_sample_resource**](#create_sample_resource) | **post** /sampleresources | Create a SampleResource
-[**delete_sample_resource**](#delete_sample_resource) | **delete** /sampleresources/{sampleresourceId} | Delete a SampleResource
-[**get_sample_resource**](#get_sample_resource) | **get** /sampleresources/{sampleresourceId} | Get a SampleResource
-[**get_sample_resources**](#get_sample_resources) | **get** /sampleresources | List All SampleResources
-[**update_sample_resource**](#update_sample_resource) | **put** /sampleresources/{sampleresourceId} | Update a SampleResource
-
-# **create_sample_resource**
-
-> create_sample_resource(sample_resource)
-
-Create a SampleResource
-
-Creates a new instance of a `SampleResource`.
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import resource_api
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = resource_api.ResourceApi(api_client)
-
- # example passing only required values which don't have defaults set
- body = SampleResource(
- a=3.14,
- b=3.14,
- id=3.14,
- )
- try:
- # Create a SampleResource
- api_response = api_instance.create_sample_resource(
- body=body,
- )
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling ResourceApi->create_sample_resource: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-body | typing.Union[SchemaForRequestBodyApplicationJson] | required |
-content_type | str | optional, default is 'application/json' | Selects the schema and serialization of the request body
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### body
-
-# SchemaForRequestBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**SampleResource**](../../models/SampleResource.md) | |
-
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-201 | [ApiResponseFor201](#create_sample_resource.ApiResponseFor201) | Successful response.
-400 | [ApiResponseFor400](#create_sample_resource.ApiResponseFor400) | Payload must be of type SampleResource
-
-#### create_sample_resource.ApiResponseFor201
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### create_sample_resource.ApiResponseFor400
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **delete_sample_resource**
-
-> delete_sample_resource(sampleresource_id)
-
-Delete a SampleResource
-
-Deletes an existing `SampleResource`.
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import resource_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = resource_api.ResourceApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'sampleresourceId': "sampleresourceId_example",
- }
- try:
- # Delete a SampleResource
- api_response = api_instance.delete_sample_resource(
- path_params=path_params,
- )
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling ResourceApi->delete_sample_resource: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-sampleresourceId | SampleresourceIdSchema | |
-
-# SampleresourceIdSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-204 | [ApiResponseFor204](#delete_sample_resource.ApiResponseFor204) | Successful response.
-400 | [ApiResponseFor400](#delete_sample_resource.ApiResponseFor400) | Parameter must be integer
-404 | [ApiResponseFor404](#delete_sample_resource.ApiResponseFor404) | Resource not found
-
-#### delete_sample_resource.ApiResponseFor204
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### delete_sample_resource.ApiResponseFor400
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### delete_sample_resource.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **get_sample_resource**
-
-> SampleResource get_sample_resource(sampleresource_id)
-
-Get a SampleResource
-
-Gets the details of a single instance of a `SampleResource`.
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import resource_api
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = resource_api.ResourceApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'sampleresourceId': "sampleresourceId_example",
- }
- try:
- # Get a SampleResource
- api_response = api_instance.get_sample_resource(
- path_params=path_params,
- )
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling ResourceApi->get_sample_resource: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-sampleresourceId | SampleresourceIdSchema | |
-
-# SampleresourceIdSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#get_sample_resource.ApiResponseFor200) | Successful response - returns a single `SampleResource`.
-400 | [ApiResponseFor400](#get_sample_resource.ApiResponseFor400) | Parameter must be integer
-404 | [ApiResponseFor404](#get_sample_resource.ApiResponseFor404) | Resource not found
-
-#### get_sample_resource.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**SampleResource**](../../models/SampleResource.md) | |
-
-
-#### get_sample_resource.ApiResponseFor400
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### get_sample_resource.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **get_sample_resources**
-
-> [SampleResource] get_sample_resources()
-
-List All SampleResources
-
-Gets a list of all `SampleResource` entities.
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import resource_api
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = resource_api.ResourceApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # List All SampleResources
- api_response = api_instance.get_sample_resources()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling ResourceApi->get_sample_resources: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#get_sample_resources.ApiResponseFor200) | Successful response - returns an array of `SampleResource` entities.
-
-#### get_sample_resources.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-list, tuple, | tuple, | |
-
-### Tuple Items
-Class Name | Input Type | Accessed Type | Description | Notes
-------------- | ------------- | ------------- | ------------- | -------------
-[**SampleResource**]({{complexTypePrefix}}SampleResource.md) | [**SampleResource**]({{complexTypePrefix}}SampleResource.md) | [**SampleResource**]({{complexTypePrefix}}SampleResource.md) | |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **update_sample_resource**
-
-> update_sample_resource(sampleresource_idsample_resource)
-
-Update a SampleResource
-
-Updates an existing `SampleResource`.
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import resource_api
-from cloudharness_cli/samples.model.sample_resource import SampleResource
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = resource_api.ResourceApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'sampleresourceId': "sampleresourceId_example",
- }
- body = SampleResource(
- a=3.14,
- b=3.14,
- id=3.14,
- )
- try:
- # Update a SampleResource
- api_response = api_instance.update_sample_resource(
- path_params=path_params,
- body=body,
- )
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling ResourceApi->update_sample_resource: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-body | typing.Union[SchemaForRequestBodyApplicationJson] | required |
-path_params | RequestPathParams | |
-content_type | str | optional, default is 'application/json' | Selects the schema and serialization of the request body
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### body
-
-# SchemaForRequestBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**SampleResource**](../../models/SampleResource.md) | |
-
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-sampleresourceId | SampleresourceIdSchema | |
-
-# SampleresourceIdSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-202 | [ApiResponseFor202](#update_sample_resource.ApiResponseFor202) | Successful response.
-400 | [ApiResponseFor400](#update_sample_resource.ApiResponseFor400) | Parameter must be integer, payload must be of type SampleResource
-404 | [ApiResponseFor404](#update_sample_resource.ApiResponseFor404) | Resource not found
-
-#### update_sample_resource.ApiResponseFor202
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### update_sample_resource.ApiResponseFor400
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### update_sample_resource.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/apis/tags/TestApi.md b/libraries/client/cloudharness_cli/docs/samples/apis/tags/TestApi.md
deleted file mode 100644
index 9b85504c6..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/apis/tags/TestApi.md
+++ /dev/null
@@ -1,148 +0,0 @@
-
-# cloudharness_cli.samples.apis.tags.test_api.TestApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**error**](#error) | **get** /error | test sentry is working
-[**ping**](#ping) | **get** /ping | test the application is up
-
-# **error**
-
-> str error()
-
-test sentry is working
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import test_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = test_api.TestApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # test sentry is working
- api_response = api_instance.error()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling TestApi->error: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#error.ApiResponseFor200) | This won't happen
-500 | [ApiResponseFor500](#error.ApiResponseFor500) | Sentry entry should come!
-
-#### error.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-#### error.ApiResponseFor500
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **ping**
-
-> int, float ping()
-
-test the application is up
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import test_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = test_api.TestApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # test the application is up
- api_response = api_instance.ping()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling TestApi->ping: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#ping.ApiResponseFor200) | What we want
-500 | [ApiResponseFor500](#ping.ApiResponseFor500) | This shouldn't happen
-
-#### ping.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-decimal.Decimal, int, float, | decimal.Decimal, | |
-
-#### ping.ApiResponseFor500
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/apis/tags/WorkflowsApi.md b/libraries/client/cloudharness_cli/docs/samples/apis/tags/WorkflowsApi.md
deleted file mode 100644
index b5754d762..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/apis/tags/WorkflowsApi.md
+++ /dev/null
@@ -1,230 +0,0 @@
-
-# cloudharness_cli.samples.apis.tags.workflows_api.WorkflowsApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**submit_async**](#submit_async) | **get** /operation_async | Send an asynchronous operation
-[**submit_sync**](#submit_sync) | **get** /operation_sync | Send a synchronous operation
-[**submit_sync_with_results**](#submit_sync_with_results) | **get** /operation_sync_results | Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
-
-# **submit_async**
-
-> InlineResponse202 submit_async()
-
-Send an asynchronous operation
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import workflows_api
-from cloudharness_cli/samples.model.inline_response202 import InlineResponse202
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = workflows_api.WorkflowsApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # Send an asynchronous operation
- api_response = api_instance.submit_async()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling WorkflowsApi->submit_async: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-202 | [ApiResponseFor202](#submit_async.ApiResponseFor202) | Submitted operation. See also https://restfulapi.net/http-status-202-accepted/
-
-#### submit_async.ApiResponseFor202
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor202ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor202ResponseBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**InlineResponse202**](../../models/InlineResponse202.md) | |
-
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **submit_sync**
-
-> {str: (bool, date, datetime, dict, float, int, list, str, none_type)} submit_sync()
-
-Send a synchronous operation
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import workflows_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = workflows_api.WorkflowsApi(api_client)
-
- # example, this endpoint has no required or optional parameters
- try:
- # Send a synchronous operation
- api_response = api_instance.submit_sync()
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling WorkflowsApi->submit_sync: %s\n" % e)
-```
-### Parameters
-This endpoint does not need any parameter.
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#submit_sync.ApiResponseFor200) | Operation result
-
-#### submit_sync.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **submit_sync_with_results**
-
-> str submit_sync_with_results(ab)
-
-Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
-
-### Example
-
-```python
-import cloudharness_cli.samples
-from cloudharness_cli.samples.apis.tags import workflows_api
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.samples.Configuration(
- host = "/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.samples.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = workflows_api.WorkflowsApi(api_client)
-
- # example passing only required values which don't have defaults set
- query_params = {
- 'a': 10,
- 'b': 10,
- }
- try:
- # Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
- api_response = api_instance.submit_sync_with_results(
- query_params=query_params,
- )
- pprint(api_response)
- except cloudharness_cli.samples.ApiException as e:
- print("Exception when calling WorkflowsApi->submit_sync_with_results: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-query_params | RequestQueryParams | |
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### query_params
-#### RequestQueryParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-a | ASchema | |
-b | BSchema | |
-
-
-# ASchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-decimal.Decimal, int, float, | decimal.Decimal, | |
-
-# BSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-decimal.Decimal, int, float, | decimal.Decimal, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#submit_sync_with_results.ApiResponseFor200) | Operation result
-
-#### submit_sync_with_results.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202.md b/libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202.md
deleted file mode 100644
index d94ffedd7..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202.md
+++ /dev/null
@@ -1,15 +0,0 @@
-# cloudharness_cli.samples.model.inline_response202.InlineResponse202
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**task** | [**InlineResponse202Task**](InlineResponse202Task.md) | [**InlineResponse202Task**](InlineResponse202Task.md) | | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202Task.md b/libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202Task.md
deleted file mode 100644
index 8f4c6566e..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/models/InlineResponse202Task.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# cloudharness_cli.samples.model.inline_response202_task.InlineResponse202Task
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**href** | str, | str, | the url where to check the operation status | [optional]
-**name** | str, | str, | | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/samples/models/SampleResource.md b/libraries/client/cloudharness_cli/docs/samples/models/SampleResource.md
deleted file mode 100644
index 509566322..000000000
--- a/libraries/client/cloudharness_cli/docs/samples/models/SampleResource.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# cloudharness_cli.samples.model.sample_resource.SampleResource
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**a** | decimal.Decimal, int, float, | decimal.Decimal, | |
-**b** | decimal.Decimal, int, float, | decimal.Decimal, | | [optional]
-**id** | decimal.Decimal, int, float, | decimal.Decimal, | | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaim.md b/libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaim.md
new file mode 100644
index 000000000..41dffef2f
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaim.md
@@ -0,0 +1,32 @@
+# PersistentVolumeClaim
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**name** | **str** | Unique name for the Persisten Volume Claim | [optional]
+**namespace** | **str** | The namespace where the Persistent Volume Claim resides in | [optional]
+**accessmode** | **str** | The accessmode of the Persistent Volume Claim | [optional]
+**size** | **str** | The size of the Persistent Volume Claim. | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of PersistentVolumeClaim from a JSON string
+persistent_volume_claim_instance = PersistentVolumeClaim.from_json(json)
+# print the JSON string representation of the object
+print(PersistentVolumeClaim.to_json())
+
+# convert the object into a dict
+persistent_volume_claim_dict = persistent_volume_claim_instance.to_dict()
+# create an instance of PersistentVolumeClaim from a dict
+persistent_volume_claim_from_dict = PersistentVolumeClaim.from_dict(persistent_volume_claim_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaimCreate.md b/libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaimCreate.md
new file mode 100644
index 000000000..c0936ab39
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/volumemanager/PersistentVolumeClaimCreate.md
@@ -0,0 +1,30 @@
+# PersistentVolumeClaimCreate
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**name** | **str** | Unique name for the Persisten Volume Claim to create. | [optional]
+**size** | **str** | The size of the Persistent Volume Claim to create. | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.volumemanager.models.persistent_volume_claim_create import PersistentVolumeClaimCreate
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of PersistentVolumeClaimCreate from a JSON string
+persistent_volume_claim_create_instance = PersistentVolumeClaimCreate.from_json(json)
+# print the JSON string representation of the object
+print(PersistentVolumeClaimCreate.to_json())
+
+# convert the object into a dict
+persistent_volume_claim_create_dict = persistent_volume_claim_create_instance.to_dict()
+# create an instance of PersistentVolumeClaimCreate from a dict
+persistent_volume_claim_create_from_dict = PersistentVolumeClaimCreate.from_dict(persistent_volume_claim_create_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/volumemanager/RestApi.md b/libraries/client/cloudharness_cli/docs/volumemanager/RestApi.md
new file mode 100644
index 000000000..15334dc8a
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/volumemanager/RestApi.md
@@ -0,0 +1,165 @@
+# cloudharness_cli.volumemanager.RestApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**pvc_name_get**](RestApi.md#pvc_name_get) | **GET** /pvc/{name} | Retrieve a Persistent Volume Claim from the Kubernetes repository.
+[**pvc_post**](RestApi.md#pvc_post) | **POST** /pvc | Create a Persistent Volume Claim in Kubernetes
+
+
+# **pvc_name_get**
+> PersistentVolumeClaim pvc_name_get(name)
+
+Retrieve a Persistent Volume Claim from the Kubernetes repository.
+
+### Example
+
+* Bearer (JWT) Authentication (bearerAuth):
+
+```python
+import cloudharness_cli.volumemanager
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+from cloudharness_cli.volumemanager.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.volumemanager.Configuration(
+ host = "/api"
+)
+
+# The client must configure the authentication and authorization parameters
+# in accordance with the API server security policy.
+# Examples for each auth method are provided below, use the example that
+# satisfies your auth use case.
+
+# Configure Bearer authorization (JWT): bearerAuth
+configuration = cloudharness_cli.volumemanager.Configuration(
+ access_token = os.environ["BEARER_TOKEN"]
+)
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.volumemanager.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.volumemanager.RestApi(api_client)
+ name = 'name_example' # str | The name of the Persistent Volume Claim to be retrieved
+
+ try:
+ # Retrieve a Persistent Volume Claim from the Kubernetes repository.
+ api_response = api_instance.pvc_name_get(name)
+ print("The response of RestApi->pvc_name_get:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling RestApi->pvc_name_get: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **name** | **str**| The name of the Persistent Volume Claim to be retrieved |
+
+### Return type
+
+[**PersistentVolumeClaim**](PersistentVolumeClaim.md)
+
+### Authorization
+
+[bearerAuth](../README.md#bearerAuth)
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | The Persistent Volume Claim. | - |
+**404** | The Persistent Volume Claim was not found. | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **pvc_post**
+> PersistentVolumeClaim pvc_post(persistent_volume_claim_create)
+
+Create a Persistent Volume Claim in Kubernetes
+
+### Example
+
+* Bearer (JWT) Authentication (bearerAuth):
+
+```python
+import cloudharness_cli.volumemanager
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+from cloudharness_cli.volumemanager.models.persistent_volume_claim_create import PersistentVolumeClaimCreate
+from cloudharness_cli.volumemanager.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.volumemanager.Configuration(
+ host = "/api"
+)
+
+# The client must configure the authentication and authorization parameters
+# in accordance with the API server security policy.
+# Examples for each auth method are provided below, use the example that
+# satisfies your auth use case.
+
+# Configure Bearer authorization (JWT): bearerAuth
+configuration = cloudharness_cli.volumemanager.Configuration(
+ access_token = os.environ["BEARER_TOKEN"]
+)
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.volumemanager.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.volumemanager.RestApi(api_client)
+ persistent_volume_claim_create = cloudharness_cli.volumemanager.PersistentVolumeClaimCreate() # PersistentVolumeClaimCreate | The Persistent Volume Claim to create.
+
+ try:
+ # Create a Persistent Volume Claim in Kubernetes
+ api_response = api_instance.pvc_post(persistent_volume_claim_create)
+ print("The response of RestApi->pvc_post:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling RestApi->pvc_post: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **persistent_volume_claim_create** | [**PersistentVolumeClaimCreate**](PersistentVolumeClaimCreate.md)| The Persistent Volume Claim to create. |
+
+### Return type
+
+[**PersistentVolumeClaim**](PersistentVolumeClaim.md)
+
+### Authorization
+
+[bearerAuth](../README.md#bearerAuth)
+
+### HTTP request headers
+
+ - **Content-Type**: application/json
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**201** | Save successful. | - |
+**400** | The Persistent Volume Claim already exists. | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/volumemanager/apis/tags/RestApi.md b/libraries/client/cloudharness_cli/docs/volumemanager/apis/tags/RestApi.md
deleted file mode 100644
index 17531ea29..000000000
--- a/libraries/client/cloudharness_cli/docs/volumemanager/apis/tags/RestApi.md
+++ /dev/null
@@ -1,217 +0,0 @@
-
-# cloudharness_cli.volumemanager.apis.tags.rest_api.RestApi
-
-All URIs are relative to */api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**pvc_name_get**](#pvc_name_get) | **get** /pvc/{name} | Retrieve a Persistent Volume Claim from the Kubernetes repository.
-[**pvc_post**](#pvc_post) | **post** /pvc | Create a Persistent Volume Claim in Kubernetes
-
-# **pvc_name_get**
-
-> PersistentVolumeClaim pvc_name_get(name)
-
-Retrieve a Persistent Volume Claim from the Kubernetes repository.
-
-### Example
-
-* Bearer (JWT) Authentication (bearerAuth):
-```python
-import cloudharness_cli.volumemanager
-from cloudharness_cli.volumemanager.apis.tags import rest_api
-from cloudharness_cli/volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.volumemanager.Configuration(
- host = "/api"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure Bearer authorization (JWT): bearerAuth
-configuration = cloudharness_cli.volumemanager.Configuration(
- access_token = 'YOUR_BEARER_TOKEN'
-)
-# Enter a context with an instance of the API client
-with cloudharness_cli.volumemanager.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = rest_api.RestApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'name': "name_example",
- }
- try:
- # Retrieve a Persistent Volume Claim from the Kubernetes repository.
- api_response = api_instance.pvc_name_get(
- path_params=path_params,
- )
- pprint(api_response)
- except cloudharness_cli.volumemanager.ApiException as e:
- print("Exception when calling RestApi->pvc_name_get: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-name | NameSchema | |
-
-# NameSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#pvc_name_get.ApiResponseFor200) | The Persistent Volume Claim.
-404 | [ApiResponseFor404](#pvc_name_get.ApiResponseFor404) | The Persistent Volume Claim was not found.
-
-#### pvc_name_get.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**PersistentVolumeClaim**](../../models/PersistentVolumeClaim.md) | |
-
-
-#### pvc_name_get.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-[bearerAuth](../../../README.md#bearerAuth)
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **pvc_post**
-
-> PersistentVolumeClaim pvc_post(persistent_volume_claim_create)
-
-Create a Persistent Volume Claim in Kubernetes
-
-### Example
-
-* Bearer (JWT) Authentication (bearerAuth):
-```python
-import cloudharness_cli.volumemanager
-from cloudharness_cli.volumemanager.apis.tags import rest_api
-from cloudharness_cli/volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-from cloudharness_cli/volumemanager.model.persistent_volume_claim_create import PersistentVolumeClaimCreate
-from pprint import pprint
-# Defining the host is optional and defaults to /api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.volumemanager.Configuration(
- host = "/api"
-)
-
-# The client must configure the authentication and authorization parameters
-# in accordance with the API server security policy.
-# Examples for each auth method are provided below, use the example that
-# satisfies your auth use case.
-
-# Configure Bearer authorization (JWT): bearerAuth
-configuration = cloudharness_cli.volumemanager.Configuration(
- access_token = 'YOUR_BEARER_TOKEN'
-)
-# Enter a context with an instance of the API client
-with cloudharness_cli.volumemanager.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = rest_api.RestApi(api_client)
-
- # example passing only required values which don't have defaults set
- body = PersistentVolumeClaimCreate(
- name="pvc-1",
- size="2Gi (see also https://github.com/kubernetes/community/blob/master/contributors/design-proposals/scheduling/resources.md#resource-quantities)",
- )
- try:
- # Create a Persistent Volume Claim in Kubernetes
- api_response = api_instance.pvc_post(
- body=body,
- )
- pprint(api_response)
- except cloudharness_cli.volumemanager.ApiException as e:
- print("Exception when calling RestApi->pvc_post: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-body | typing.Union[SchemaForRequestBodyApplicationJson] | required |
-content_type | str | optional, default is 'application/json' | Selects the schema and serialization of the request body
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### body
-
-# SchemaForRequestBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**PersistentVolumeClaimCreate**](../../models/PersistentVolumeClaimCreate.md) | |
-
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-201 | [ApiResponseFor201](#pvc_post.ApiResponseFor201) | Save successful.
-400 | [ApiResponseFor400](#pvc_post.ApiResponseFor400) | The Persistent Volume Claim already exists.
-
-#### pvc_post.ApiResponseFor201
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor201ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor201ResponseBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**PersistentVolumeClaim**](../../models/PersistentVolumeClaim.md) | |
-
-
-#### pvc_post.ApiResponseFor400
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-[bearerAuth](../../../README.md#bearerAuth)
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaim.md b/libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaim.md
deleted file mode 100644
index dee966db4..000000000
--- a/libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaim.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# cloudharness_cli.volumemanager.model.persistent_volume_claim.PersistentVolumeClaim
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**name** | str, | str, | Unique name for the Persisten Volume Claim | [optional]
-**namespace** | str, | str, | The namespace where the Persistent Volume Claim resides in | [optional]
-**accessmode** | str, | str, | The accessmode of the Persistent Volume Claim | [optional]
-**size** | str, | str, | The size of the Persistent Volume Claim. | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaimCreate.md b/libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaimCreate.md
deleted file mode 100644
index b73300c2f..000000000
--- a/libraries/client/cloudharness_cli/docs/volumemanager/models/PersistentVolumeClaimCreate.md
+++ /dev/null
@@ -1,16 +0,0 @@
-# cloudharness_cli.volumemanager.model.persistent_volume_claim_create.PersistentVolumeClaimCreate
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, | frozendict.frozendict, | |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**name** | str, | str, | Unique name for the Persisten Volume Claim to create. | [optional]
-**size** | str, | str, | The size of the Persistent Volume Claim to create. | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md b/libraries/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md
new file mode 100644
index 000000000..c8a9ce6d5
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md
@@ -0,0 +1,290 @@
+# cloudharness_cli.workflows.CreateAndAccessApi
+
+All URIs are relative to */api*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**delete_operation**](CreateAndAccessApi.md#delete_operation) | **DELETE** /operations/{name} | deletes operation by name
+[**get_operation**](CreateAndAccessApi.md#get_operation) | **GET** /operations/{name} | get operation by name
+[**list_operations**](CreateAndAccessApi.md#list_operations) | **GET** /operations | lists operations
+[**log_operation**](CreateAndAccessApi.md#log_operation) | **GET** /operations/{name}/logs | get operation by name
+
+
+# **delete_operation**
+> delete_operation(name)
+
+deletes operation by name
+
+delete operation by its name
+
+### Example
+
+
+```python
+import cloudharness_cli.workflows
+from cloudharness_cli.workflows.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.workflows.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client)
+ name = 'my-workflow' # str |
+
+ try:
+ # deletes operation by name
+ api_instance.delete_operation(name)
+ except Exception as e:
+ print("Exception when calling CreateAndAccessApi->delete_operation: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **name** | **str**| |
+
+### Return type
+
+void (empty response body)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: Not defined
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | delete OK | - |
+**404** | not found | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **get_operation**
+> str get_operation(name)
+
+get operation by name
+
+retrieves an operation by its name
+
+### Example
+
+
+```python
+import cloudharness_cli.workflows
+from cloudharness_cli.workflows.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.workflows.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client)
+ name = 'name_example' # str |
+
+ try:
+ # get operation by name
+ api_response = api_instance.get_operation(name)
+ print("The response of CreateAndAccessApi->get_operation:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling CreateAndAccessApi->get_operation: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **name** | **str**| |
+
+### Return type
+
+**str**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | search results matching criteria | - |
+**400** | Bad request | - |
+**404** | not found | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **list_operations**
+> OperationSearchResult list_operations(status=status, previous_search_token=previous_search_token, limit=limit)
+
+lists operations
+
+see all operations for the user
+
+### Example
+
+
+```python
+import cloudharness_cli.workflows
+from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult
+from cloudharness_cli.workflows.models.operation_status import OperationStatus
+from cloudharness_cli.workflows.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.workflows.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client)
+ status = Pending # OperationStatus | filter by status (optional) (default to Pending)
+ previous_search_token = '\"eyJ2IjoibWV0YS5rOHMuaW8vdjEiLCJydiI6NDUzMDMzOCwic3RhcnQiOiJoZWxsby13b3JsZC05YnE2ZFx1MDAwMCJ8\"' # str | continue previous search (pagination chunks) (optional)
+ limit = 10 # int | maximum number of records to return per page (optional) (default to 10)
+
+ try:
+ # lists operations
+ api_response = api_instance.list_operations(status=status, previous_search_token=previous_search_token, limit=limit)
+ print("The response of CreateAndAccessApi->list_operations:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling CreateAndAccessApi->list_operations: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **status** | [**OperationStatus**](.md)| filter by status | [optional] [default to Pending]
+ **previous_search_token** | **str**| continue previous search (pagination chunks) | [optional]
+ **limit** | **int**| maximum number of records to return per page | [optional] [default to 10]
+
+### Return type
+
+[**OperationSearchResult**](OperationSearchResult.md)
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | search results matching criteria | - |
+**400** | bad input parameter | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **log_operation**
+> str log_operation(name)
+
+get operation by name
+
+retrieves an operation log by its name
+
+### Example
+
+
+```python
+import cloudharness_cli.workflows
+from cloudharness_cli.workflows.rest import ApiException
+from pprint import pprint
+
+# Defining the host is optional and defaults to /api
+# See configuration.py for a list of all supported configuration parameters.
+configuration = cloudharness_cli.workflows.Configuration(
+ host = "/api"
+)
+
+
+# Enter a context with an instance of the API client
+with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
+ # Create an instance of the API class
+ api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client)
+ name = '\"my-operation\"' # str |
+
+ try:
+ # get operation by name
+ api_response = api_instance.log_operation(name)
+ print("The response of CreateAndAccessApi->log_operation:\n")
+ pprint(api_response)
+ except Exception as e:
+ print("Exception when calling CreateAndAccessApi->log_operation: %s\n" % e)
+```
+
+
+
+### Parameters
+
+
+Name | Type | Description | Notes
+------------- | ------------- | ------------- | -------------
+ **name** | **str**| |
+
+### Return type
+
+**str**
+
+### Authorization
+
+No authorization required
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: text/plain
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | search results matching criteria | - |
+**400** | Bad request | - |
+**404** | not found | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/libraries/client/cloudharness_cli/docs/workflows/Operation.md b/libraries/client/cloudharness_cli/docs/workflows/Operation.md
new file mode 100644
index 000000000..8c580ac43
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/workflows/Operation.md
@@ -0,0 +1,34 @@
+# Operation
+
+represents the status of a distributed API call
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**message** | **str** | usually set when an error occurred | [optional]
+**name** | **str** | operation name | [optional]
+**create_time** | **datetime** | | [optional] [readonly]
+**status** | [**OperationStatus**](OperationStatus.md) | | [optional] [default to OperationStatus.PENDING]
+**workflow** | **str** | low level representation as an Argo json | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.workflows.models.operation import Operation
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Operation from a JSON string
+operation_instance = Operation.from_json(json)
+# print the JSON string representation of the object
+print(Operation.to_json())
+
+# convert the object into a dict
+operation_dict = operation_instance.to_dict()
+# create an instance of Operation from a dict
+operation_from_dict = Operation.from_dict(operation_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/workflows/OperationSearchResult.md b/libraries/client/cloudharness_cli/docs/workflows/OperationSearchResult.md
new file mode 100644
index 000000000..d9bedbb46
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/workflows/OperationSearchResult.md
@@ -0,0 +1,31 @@
+# OperationSearchResult
+
+a list of operations with meta data about the result
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**meta** | [**SearchResultData**](SearchResultData.md) | | [optional]
+**items** | [**List[Operation]**](Operation.md) | | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of OperationSearchResult from a JSON string
+operation_search_result_instance = OperationSearchResult.from_json(json)
+# print the JSON string representation of the object
+print(OperationSearchResult.to_json())
+
+# convert the object into a dict
+operation_search_result_dict = operation_search_result_instance.to_dict()
+# create an instance of OperationSearchResult from a dict
+operation_search_result_from_dict = OperationSearchResult.from_dict(operation_search_result_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/workflows/OperationStatus.md b/libraries/client/cloudharness_cli/docs/workflows/OperationStatus.md
new file mode 100644
index 000000000..709563890
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/workflows/OperationStatus.md
@@ -0,0 +1,20 @@
+# OperationStatus
+
+
+## Enum
+
+* `PENDING` (value: `'Pending'`)
+
+* `RUNNING` (value: `'Running'`)
+
+* `ERROR` (value: `'Error'`)
+
+* `SUCCEEDED` (value: `'Succeeded'`)
+
+* `SKIPPED` (value: `'Skipped'`)
+
+* `FAILED` (value: `'Failed'`)
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/workflows/SearchResultData.md b/libraries/client/cloudharness_cli/docs/workflows/SearchResultData.md
new file mode 100644
index 000000000..f7978e00b
--- /dev/null
+++ b/libraries/client/cloudharness_cli/docs/workflows/SearchResultData.md
@@ -0,0 +1,30 @@
+# SearchResultData
+
+describes a search
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**continue_token** | **str** | token to use for pagination | [optional]
+
+## Example
+
+```python
+from cloudharness_cli.workflows.models.search_result_data import SearchResultData
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of SearchResultData from a JSON string
+search_result_data_instance = SearchResultData.from_json(json)
+# print the JSON string representation of the object
+print(SearchResultData.to_json())
+
+# convert the object into a dict
+search_result_data_dict = search_result_data_instance.to_dict()
+# create an instance of SearchResultData from a dict
+search_result_data_from_dict = SearchResultData.from_dict(search_result_data_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/libraries/client/cloudharness_cli/docs/workflows/apis/tags/CreateAndAccessApi.md b/libraries/client/cloudharness_cli/docs/workflows/apis/tags/CreateAndAccessApi.md
deleted file mode 100644
index 9696ac40c..000000000
--- a/libraries/client/cloudharness_cli/docs/workflows/apis/tags/CreateAndAccessApi.md
+++ /dev/null
@@ -1,416 +0,0 @@
-
-# cloudharness_cli.workflows.apis.tags.create_and_access_api.CreateAndAccessApi
-
-All URIs are relative to *https://workflows.cloudharness.metacell.us/api*
-
-Method | HTTP request | Description
-------------- | ------------- | -------------
-[**delete_operation**](#delete_operation) | **delete** /operations/{name} | deletes operation by name
-[**get_operation**](#get_operation) | **get** /operations/{name} | get operation by name
-[**list_operations**](#list_operations) | **get** /operations | lists operations
-[**log_operation**](#log_operation) | **get** /operations/{name}/logs | get operation by name
-
-# **delete_operation**
-
-> delete_operation(name)
-
-deletes operation by name
-
-delete operation by its name
-
-### Example
-
-```python
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.apis.tags import create_and_access_api
-from pprint import pprint
-# Defining the host is optional and defaults to https://workflows.cloudharness.metacell.us/api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.workflows.Configuration(
- host = "https://workflows.cloudharness.metacell.us/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = create_and_access_api.CreateAndAccessApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'name': "name_example",
- }
- try:
- # deletes operation by name
- api_response = api_instance.delete_operation(
- path_params=path_params,
- )
- except cloudharness_cli.workflows.ApiException as e:
- print("Exception when calling CreateAndAccessApi->delete_operation: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-name | NameSchema | |
-
-# NameSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#delete_operation.ApiResponseFor200) | delete OK
-404 | [ApiResponseFor404](#delete_operation.ApiResponseFor404) | not found
-
-#### delete_operation.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-#### delete_operation.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **get_operation**
-
-> [Operation] get_operation(name)
-
-get operation by name
-
-retrieves an operation by its name
-
-### Example
-
-```python
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.apis.tags import create_and_access_api
-from cloudharness_cli/workflows.model.operation import Operation
-from pprint import pprint
-# Defining the host is optional and defaults to https://workflows.cloudharness.metacell.us/api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.workflows.Configuration(
- host = "https://workflows.cloudharness.metacell.us/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = create_and_access_api.CreateAndAccessApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'name': "name_example",
- }
- try:
- # get operation by name
- api_response = api_instance.get_operation(
- path_params=path_params,
- )
- pprint(api_response)
- except cloudharness_cli.workflows.ApiException as e:
- print("Exception when calling CreateAndAccessApi->get_operation: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-name | NameSchema | |
-
-# NameSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#get_operation.ApiResponseFor200) | search results matching criteria
-404 | [ApiResponseFor404](#get_operation.ApiResponseFor404) | not found
-
-#### get_operation.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-list, tuple, | tuple, | |
-
-### Tuple Items
-Class Name | Input Type | Accessed Type | Description | Notes
-------------- | ------------- | ------------- | ------------- | -------------
-[**Operation**]({{complexTypePrefix}}Operation.md) | [**Operation**]({{complexTypePrefix}}Operation.md) | [**Operation**]({{complexTypePrefix}}Operation.md) | |
-
-#### get_operation.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **list_operations**
-
-> OperationSearchResult list_operations()
-
-lists operations
-
-see all operations for the user
-
-### Example
-
-```python
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.apis.tags import create_and_access_api
-from cloudharness_cli/workflows.model.operation_search_result import OperationSearchResult
-from cloudharness_cli/workflows.model.operation_status import OperationStatus
-from pprint import pprint
-# Defining the host is optional and defaults to https://workflows.cloudharness.metacell.us/api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.workflows.Configuration(
- host = "https://workflows.cloudharness.metacell.us/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = create_and_access_api.CreateAndAccessApi(api_client)
-
- # example passing only optional values
- query_params = {
- 'status': OperationStatus("QUEUED"),
- 'previous_search_token': "previous_search_token_example",
- 'limit': 10,
- }
- try:
- # lists operations
- api_response = api_instance.list_operations(
- query_params=query_params,
- )
- pprint(api_response)
- except cloudharness_cli.workflows.ApiException as e:
- print("Exception when calling CreateAndAccessApi->list_operations: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-query_params | RequestQueryParams | |
-accept_content_types | typing.Tuple[str] | default is ('application/json', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### query_params
-#### RequestQueryParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-status | StatusSchema | | optional
-previous_search_token | PreviousSearchTokenSchema | | optional
-limit | LimitSchema | | optional
-
-
-# StatusSchema
-Type | Description | Notes
-------------- | ------------- | -------------
-[**OperationStatus**](../../models/OperationStatus.md) | |
-
-
-# PreviousSearchTokenSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-# LimitSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-decimal.Decimal, int, | decimal.Decimal, | | if omitted the server will use the default value of 10
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#list_operations.ApiResponseFor200) | search results matching criteria
-400 | [ApiResponseFor400](#list_operations.ApiResponseFor400) | bad input parameter
-
-#### list_operations.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyApplicationJson, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyApplicationJson
-Type | Description | Notes
-------------- | ------------- | -------------
-[**OperationSearchResult**](../../models/OperationSearchResult.md) | |
-
-
-#### list_operations.ApiResponseFor400
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
-# **log_operation**
-
-> str log_operation(name)
-
-get operation by name
-
-retrieves an operation log by its name
-
-### Example
-
-```python
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.apis.tags import create_and_access_api
-from pprint import pprint
-# Defining the host is optional and defaults to https://workflows.cloudharness.metacell.us/api
-# See configuration.py for a list of all supported configuration parameters.
-configuration = cloudharness_cli.workflows.Configuration(
- host = "https://workflows.cloudharness.metacell.us/api"
-)
-
-# Enter a context with an instance of the API client
-with cloudharness_cli.workflows.ApiClient(configuration) as api_client:
- # Create an instance of the API class
- api_instance = create_and_access_api.CreateAndAccessApi(api_client)
-
- # example passing only required values which don't have defaults set
- path_params = {
- 'name': "name_example",
- }
- try:
- # get operation by name
- api_response = api_instance.log_operation(
- path_params=path_params,
- )
- pprint(api_response)
- except cloudharness_cli.workflows.ApiException as e:
- print("Exception when calling CreateAndAccessApi->log_operation: %s\n" % e)
-```
-### Parameters
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-path_params | RequestPathParams | |
-accept_content_types | typing.Tuple[str] | default is ('text/plain', ) | Tells the server the content type(s) that are accepted by the client
-stream | bool | default is False | if True then the response.content will be streamed and loaded from a file like object. When downloading a file, set this to True to force the code to deserialize the content to a FileSchema file
-timeout | typing.Optional[typing.Union[int, typing.Tuple]] | default is None | the timeout used by the rest client
-skip_deserialization | bool | default is False | when True, headers and body will be unset and an instance of api_client.ApiResponseWithoutDeserialization will be returned
-
-### path_params
-#### RequestPathParams
-
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-name | NameSchema | |
-
-# NameSchema
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-### Return Types, Responses
-
-Code | Class | Description
-------------- | ------------- | -------------
-n/a | api_client.ApiResponseWithoutDeserialization | When skip_deserialization is True this response is returned
-200 | [ApiResponseFor200](#log_operation.ApiResponseFor200) | search results matching criteria
-404 | [ApiResponseFor404](#log_operation.ApiResponseFor404) | not found
-
-#### log_operation.ApiResponseFor200
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | typing.Union[SchemaFor200ResponseBodyTextPlain, ] | |
-headers | Unset | headers were not defined |
-
-# SchemaFor200ResponseBodyTextPlain
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | |
-
-#### log_operation.ApiResponseFor404
-Name | Type | Description | Notes
-------------- | ------------- | ------------- | -------------
-response | urllib3.HTTPResponse | Raw response |
-body | Unset | body was not defined |
-headers | Unset | headers were not defined |
-
-### Authorization
-
-No authorization required
-
-[[Back to top]](#__pageTop) [[Back to API list]](../../../README.md#documentation-for-api-endpoints) [[Back to Model list]](../../../README.md#documentation-for-models) [[Back to README]](../../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/workflows/models/Operation.md b/libraries/client/cloudharness_cli/docs/workflows/models/Operation.md
deleted file mode 100644
index 8f0418981..000000000
--- a/libraries/client/cloudharness_cli/docs/workflows/models/Operation.md
+++ /dev/null
@@ -1,21 +0,0 @@
-# cloudharness_cli.workflows.model.operation.Operation
-
-represents the status of a distributed API call
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | represents the status of a distributed API call |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**message** | str, | str, | usually set when an error occurred | [optional]
-**name** | str, | str, | operation name | [optional]
-**createTime** | str, datetime, | str, | | [optional] value must conform to RFC-3339 date-time
-**status** | [**OperationStatus**](OperationStatus.md) | [**OperationStatus**](OperationStatus.md) | | [optional]
-**workflow** | str, | str, | low level representation as an Argo json | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/workflows/models/OperationSearchResult.md b/libraries/client/cloudharness_cli/docs/workflows/models/OperationSearchResult.md
deleted file mode 100644
index 59f999cc7..000000000
--- a/libraries/client/cloudharness_cli/docs/workflows/models/OperationSearchResult.md
+++ /dev/null
@@ -1,30 +0,0 @@
-# cloudharness_cli.workflows.model.operation_search_result.OperationSearchResult
-
-a list of operations with meta data about the result
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | a list of operations with meta data about the result |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**meta** | [**SearchResultData**](SearchResultData.md) | [**SearchResultData**](SearchResultData.md) | | [optional]
-**[items](#items)** | list, tuple, | tuple, | | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-# items
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-list, tuple, | tuple, | |
-
-### Tuple Items
-Class Name | Input Type | Accessed Type | Description | Notes
-------------- | ------------- | ------------- | ------------- | -------------
-[**Operation**](Operation.md) | [**Operation**](Operation.md) | [**Operation**](Operation.md) | |
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/workflows/models/OperationStatus.md b/libraries/client/cloudharness_cli/docs/workflows/models/OperationStatus.md
deleted file mode 100644
index 02033a2d9..000000000
--- a/libraries/client/cloudharness_cli/docs/workflows/models/OperationStatus.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# cloudharness_cli.workflows.model.operation_status.OperationStatus
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-str, | str, | | must be one of ["Pending", "Running", "Error", "Succeeded", "Skipped", "Failed", ] if omitted the server will use the default value of "Pending"
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/docs/workflows/models/SearchResultData.md b/libraries/client/cloudharness_cli/docs/workflows/models/SearchResultData.md
deleted file mode 100644
index a6f955d58..000000000
--- a/libraries/client/cloudharness_cli/docs/workflows/models/SearchResultData.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# cloudharness_cli.workflows.model.search_result_data.SearchResultData
-
-describes a search
-
-## Model Type Info
-Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | -------------
-dict, frozendict.frozendict, str, date, datetime, uuid.UUID, int, float, decimal.Decimal, bool, None, list, tuple, bytes, io.FileIO, io.BufferedReader, | frozendict.frozendict, str, decimal.Decimal, BoolClass, NoneClass, tuple, bytes, FileIO | describes a search |
-
-### Dictionary Keys
-Key | Input Type | Accessed Type | Description | Notes
------------- | ------------- | ------------- | ------------- | -------------
-**continueToken** | str, | str, | token to use for pagination | [optional]
-**any_string_name** | dict, frozendict.frozendict, str, date, datetime, int, float, bool, decimal.Decimal, None, list, tuple, bytes, io.FileIO, io.BufferedReader | frozendict.frozendict, str, BoolClass, decimal.Decimal, NoneClass, tuple, bytes, FileIO | any string name can be used but the value must be the correct type | [optional]
-
-[[Back to Model list]](../../README.md#documentation-for-models) [[Back to API list]](../../README.md#documentation-for-api-endpoints) [[Back to README]](../../README.md)
-
diff --git a/libraries/client/cloudharness_cli/requirements.txt b/libraries/client/cloudharness_cli/requirements.txt
index 8695929d9..cc85509ec 100644
--- a/libraries/client/cloudharness_cli/requirements.txt
+++ b/libraries/client/cloudharness_cli/requirements.txt
@@ -1,5 +1,5 @@
-certifi
-frozendict
-python-dateutil
-typing_extensions
-urllib3
\ No newline at end of file
+python_dateutil >= 2.5.3
+setuptools >= 21.0.0
+urllib3 >= 1.25.3, < 2.1.0
+pydantic >= 2
+typing-extensions >= 4.7.1
diff --git a/libraries/client/cloudharness_cli/setup.py b/libraries/client/cloudharness_cli/setup.py
index 890b95c25..2d9766766 100644
--- a/libraries/client/cloudharness_cli/setup.py
+++ b/libraries/client/cloudharness_cli/setup.py
@@ -20,7 +20,8 @@
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
-REQUIRES = ["urllib3 >= 1.26.8", "six >= 1.10", "certifi", "python-dateutil"]
+REQUIRES = ["urllib3 >= 1.25.3, < 2.1.0", "python_dateutil >= 2.5.3", "pydantic >= 2", "typing-extensions >= 4.7.1"]
+
setup(
name=NAME,
@@ -37,4 +38,4 @@
long_description="""\
CloudHarness Python API Client # noqa: E501
"""
-)
\ No newline at end of file
+)
diff --git a/libraries/client/cloudharness_cli/test-requirements.txt b/libraries/client/cloudharness_cli/test-requirements.txt
index a52b63cb7..8e6d8cb13 100644
--- a/libraries/client/cloudharness_cli/test-requirements.txt
+++ b/libraries/client/cloudharness_cli/test-requirements.txt
@@ -1,3 +1,5 @@
-pytest
-pytest-cov
-pytest-randomly
\ No newline at end of file
+pytest~=7.1.3
+pytest-cov>=2.8.1
+pytest-randomly>=3.12.0
+mypy>=1.4.1
+types-python-dateutil>=2.8.19
diff --git a/libraries/client/cloudharness_cli/test/common/test_paths/__init__.py b/libraries/client/cloudharness_cli/test/common/test_paths/__init__.py
deleted file mode 100644
index 1309632d3..000000000
--- a/libraries/client/cloudharness_cli/test/common/test_paths/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import json
-import typing
-
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-
-class ApiTestMixin:
- json_content_type = 'application/json'
- user_agent = 'OpenAPI-Generator/1.0.0/python'
-
- @classmethod
- def assert_pool_manager_request_called_with(
- cls,
- mock_request,
- url: str,
- method: str = 'POST',
- body: typing.Optional[bytes] = None,
- content_type: typing.Optional[str] = None,
- accept_content_type: typing.Optional[str] = None,
- stream: bool = False,
- ):
- headers = {
- 'User-Agent': cls.user_agent
- }
- if accept_content_type:
- headers['Accept'] = accept_content_type
- if content_type:
- headers['Content-Type'] = content_type
- kwargs = dict(
- headers=HTTPHeaderDict(headers),
- preload_content=not stream,
- timeout=None,
- )
- if content_type and method != 'GET':
- kwargs['body'] = body
- mock_request.assert_called_with(
- method,
- url,
- **kwargs
- )
-
- @staticmethod
- def headers_for_content_type(content_type: str) -> typing.Dict[str, str]:
- return {'content-type': content_type}
-
- @classmethod
- def response(
- cls,
- body: typing.Union[str, bytes],
- status: int = 200,
- content_type: str = json_content_type,
- headers: typing.Optional[typing.Dict[str, str]] = None,
- preload_content: bool = True
- ) -> urllib3.HTTPResponse:
- if headers is None:
- headers = {}
- headers.update(cls.headers_for_content_type(content_type))
- return urllib3.HTTPResponse(
- body,
- headers=headers,
- status=status,
- preload_content=preload_content
- )
-
- @staticmethod
- def json_bytes(in_data: typing.Any) -> bytes:
- return json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode('utf-8')
diff --git a/libraries/client/cloudharness_cli/test/common/test_paths/test_accounts_config/test_get.py b/libraries/client/cloudharness_cli/test/common/test_paths/test_accounts_config/test_get.py
deleted file mode 100644
index 5ee88ae2b..000000000
--- a/libraries/client/cloudharness_cli/test/common/test_paths/test_accounts_config/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.common
-from cloudharness_cli.common.paths.accounts_config import get # noqa: E501
-from cloudharness_cli.common import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestAccountsConfig(ApiTestMixin, unittest.TestCase):
- """
- AccountsConfig unit test stubs
- Gets the config for logging in into accounts # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/common/test_paths/test_sentry_getdsn_appname/test_get.py b/libraries/client/cloudharness_cli/test/common/test_paths/test_sentry_getdsn_appname/test_get.py
deleted file mode 100644
index 37e0c1b1a..000000000
--- a/libraries/client/cloudharness_cli/test/common/test_paths/test_sentry_getdsn_appname/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.common
-from cloudharness_cli.common.paths.sentry_getdsn_appname import get # noqa: E501
-from cloudharness_cli.common import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestSentryGetdsnAppname(ApiTestMixin, unittest.TestCase):
- """
- SentryGetdsnAppname unit test stubs
- Gets the Sentry DSN for a given application # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/__init__.py b/libraries/client/cloudharness_cli/test/samples/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_models/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_models/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202.py b/libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202.py
deleted file mode 100644
index dcb1feee7..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.model.inline_response202 import InlineResponse202
-from cloudharness_cli.samples import configuration
-
-
-class TestInlineResponse202(unittest.TestCase):
- """InlineResponse202 unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202_task.py b/libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202_task.py
deleted file mode 100644
index 78e6091e7..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_models/test_inline_response202_task.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.model.inline_response202_task import InlineResponse202Task
-from cloudharness_cli.samples import configuration
-
-
-class TestInlineResponse202Task(unittest.TestCase):
- """InlineResponse202Task unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_models/test_sample_resource.py b/libraries/client/cloudharness_cli/test/samples/test_models/test_sample_resource.py
deleted file mode 100644
index f9666f6c0..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_models/test_sample_resource.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- CloudHarness Sample API
-
- CloudHarness Sample api # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.model.sample_resource import SampleResource
-from cloudharness_cli.samples import configuration
-
-
-class TestSampleResource(unittest.TestCase):
- """SampleResource unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/__init__.py
deleted file mode 100644
index 1309632d3..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import json
-import typing
-
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-
-class ApiTestMixin:
- json_content_type = 'application/json'
- user_agent = 'OpenAPI-Generator/1.0.0/python'
-
- @classmethod
- def assert_pool_manager_request_called_with(
- cls,
- mock_request,
- url: str,
- method: str = 'POST',
- body: typing.Optional[bytes] = None,
- content_type: typing.Optional[str] = None,
- accept_content_type: typing.Optional[str] = None,
- stream: bool = False,
- ):
- headers = {
- 'User-Agent': cls.user_agent
- }
- if accept_content_type:
- headers['Accept'] = accept_content_type
- if content_type:
- headers['Content-Type'] = content_type
- kwargs = dict(
- headers=HTTPHeaderDict(headers),
- preload_content=not stream,
- timeout=None,
- )
- if content_type and method != 'GET':
- kwargs['body'] = body
- mock_request.assert_called_with(
- method,
- url,
- **kwargs
- )
-
- @staticmethod
- def headers_for_content_type(content_type: str) -> typing.Dict[str, str]:
- return {'content-type': content_type}
-
- @classmethod
- def response(
- cls,
- body: typing.Union[str, bytes],
- status: int = 200,
- content_type: str = json_content_type,
- headers: typing.Optional[typing.Dict[str, str]] = None,
- preload_content: bool = True
- ) -> urllib3.HTTPResponse:
- if headers is None:
- headers = {}
- headers.update(cls.headers_for_content_type(content_type))
- return urllib3.HTTPResponse(
- body,
- headers=headers,
- status=status,
- preload_content=preload_content
- )
-
- @staticmethod
- def json_bytes(in_data: typing.Any) -> bytes:
- return json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode('utf-8')
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_error/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_error/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_error/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_error/test_get.py
deleted file mode 100644
index 8f79b1c3a..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_error/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.error import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestError(ApiTestMixin, unittest.TestCase):
- """
- Error unit test stubs
- test sentry is working # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/test_get.py
deleted file mode 100644
index ad8c32ac3..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_async/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.operation_async import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperationAsync(ApiTestMixin, unittest.TestCase):
- """
- OperationAsync unit test stubs
- Send an asynchronous operation # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 202
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/test_get.py
deleted file mode 100644
index d37e0bd3a..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.operation_sync import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperationSync(ApiTestMixin, unittest.TestCase):
- """
- OperationSync unit test stubs
- Send a synchronous operation # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/test_get.py
deleted file mode 100644
index b38ce8138..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_operation_sync_results/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.operation_sync_results import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperationSyncResults(ApiTestMixin, unittest.TestCase):
- """
- OperationSyncResults unit test stubs
- Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/test_get.py
deleted file mode 100644
index 1a79170f9..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_ping/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.ping import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestPing(ApiTestMixin, unittest.TestCase):
- """
- Ping unit test stubs
- test the application is up # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_get.py
deleted file mode 100644
index 3e24f5dd1..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.sampleresources import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestSampleresources(ApiTestMixin, unittest.TestCase):
- """
- Sampleresources unit test stubs
- List All SampleResources # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_post.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_post.py
deleted file mode 100644
index 7f25bf272..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources/test_post.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.sampleresources import post # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestSampleresources(ApiTestMixin, unittest.TestCase):
- """
- Sampleresources unit test stubs
- Create a SampleResource # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = post.ApiForpost(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 201
- response_body = ''
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_delete.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_delete.py
deleted file mode 100644
index 8a005589c..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_delete.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id import delete # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestSampleresourcesSampleresourceId(ApiTestMixin, unittest.TestCase):
- """
- SampleresourcesSampleresourceId unit test stubs
- Delete a SampleResource # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = delete.ApiFordelete(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 204
- response_body = ''
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_get.py
deleted file mode 100644
index eb998be81..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestSampleresourcesSampleresourceId(ApiTestMixin, unittest.TestCase):
- """
- SampleresourcesSampleresourceId unit test stubs
- Get a SampleResource # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_put.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_put.py
deleted file mode 100644
index 5a6f0d667..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_sampleresources_sampleresource_id/test_put.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.sampleresources_sampleresource_id import put # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestSampleresourcesSampleresourceId(ApiTestMixin, unittest.TestCase):
- """
- SampleresourcesSampleresourceId unit test stubs
- Update a SampleResource # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = put.ApiForput(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 202
- response_body = ''
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/test_get.py
deleted file mode 100644
index 285fc19b3..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.valid import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestValid(ApiTestMixin, unittest.TestCase):
- """
- Valid unit test stubs
- Check if the token is valid. Get a token by logging into the base url # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/__init__.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/test_get.py b/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/test_get.py
deleted file mode 100644
index 9e429e679..000000000
--- a/libraries/client/cloudharness_cli/test/samples/test_paths/test_valid_cookie/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.samples
-from cloudharness_cli.samples.paths.valid_cookie import get # noqa: E501
-from cloudharness_cli.samples import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestValidCookie(ApiTestMixin, unittest.TestCase):
- """
- ValidCookie unit test stubs
- Check if the token is valid. Get a token by logging into the base url # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/__init__.py b/libraries/client/cloudharness_cli/test/volumemanager/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_models/__init__.py b/libraries/client/cloudharness_cli/test/volumemanager/test_models/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim.py b/libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim.py
deleted file mode 100644
index db8d7496f..000000000
--- a/libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.volumemanager
-from cloudharness_cli.volumemanager.model.persistent_volume_claim import PersistentVolumeClaim
-from cloudharness_cli.volumemanager import configuration
-
-
-class TestPersistentVolumeClaim(unittest.TestCase):
- """PersistentVolumeClaim unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim_create.py b/libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim_create.py
deleted file mode 100644
index 72080fbbf..000000000
--- a/libraries/client/cloudharness_cli/test/volumemanager/test_models/test_persistent_volume_claim_create.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# coding: utf-8
-
-"""
- Volumes manager API
-
- CloudHarness Volumes manager API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.volumemanager
-from cloudharness_cli.volumemanager.model.persistent_volume_claim_create import PersistentVolumeClaimCreate
-from cloudharness_cli.volumemanager import configuration
-
-
-class TestPersistentVolumeClaimCreate(unittest.TestCase):
- """PersistentVolumeClaimCreate unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/__init__.py b/libraries/client/cloudharness_cli/test/volumemanager/test_paths/__init__.py
deleted file mode 100644
index 1309632d3..000000000
--- a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import json
-import typing
-
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-
-class ApiTestMixin:
- json_content_type = 'application/json'
- user_agent = 'OpenAPI-Generator/1.0.0/python'
-
- @classmethod
- def assert_pool_manager_request_called_with(
- cls,
- mock_request,
- url: str,
- method: str = 'POST',
- body: typing.Optional[bytes] = None,
- content_type: typing.Optional[str] = None,
- accept_content_type: typing.Optional[str] = None,
- stream: bool = False,
- ):
- headers = {
- 'User-Agent': cls.user_agent
- }
- if accept_content_type:
- headers['Accept'] = accept_content_type
- if content_type:
- headers['Content-Type'] = content_type
- kwargs = dict(
- headers=HTTPHeaderDict(headers),
- preload_content=not stream,
- timeout=None,
- )
- if content_type and method != 'GET':
- kwargs['body'] = body
- mock_request.assert_called_with(
- method,
- url,
- **kwargs
- )
-
- @staticmethod
- def headers_for_content_type(content_type: str) -> typing.Dict[str, str]:
- return {'content-type': content_type}
-
- @classmethod
- def response(
- cls,
- body: typing.Union[str, bytes],
- status: int = 200,
- content_type: str = json_content_type,
- headers: typing.Optional[typing.Dict[str, str]] = None,
- preload_content: bool = True
- ) -> urllib3.HTTPResponse:
- if headers is None:
- headers = {}
- headers.update(cls.headers_for_content_type(content_type))
- return urllib3.HTTPResponse(
- body,
- headers=headers,
- status=status,
- preload_content=preload_content
- )
-
- @staticmethod
- def json_bytes(in_data: typing.Any) -> bytes:
- return json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode('utf-8')
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/__init__.py b/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/test_post.py b/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/test_post.py
deleted file mode 100644
index 057c0ec88..000000000
--- a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc/test_post.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.volumemanager
-from cloudharness_cli.volumemanager.paths.pvc import post # noqa: E501
-from cloudharness_cli.volumemanager import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestPvc(ApiTestMixin, unittest.TestCase):
- """
- Pvc unit test stubs
- Create a Persistent Volume Claim in Kubernetes # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = post.ApiForpost(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 201
-
-
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/__init__.py b/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/test_get.py b/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/test_get.py
deleted file mode 100644
index 295f33af8..000000000
--- a/libraries/client/cloudharness_cli/test/volumemanager/test_paths/test_pvc_name/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.volumemanager
-from cloudharness_cli.volumemanager.paths.pvc_name import get # noqa: E501
-from cloudharness_cli.volumemanager import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestPvcName(ApiTestMixin, unittest.TestCase):
- """
- PvcName unit test stubs
- Retrieve a Persistent Volume Claim from the Kubernetes repository. # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/__init__.py b/libraries/client/cloudharness_cli/test/workflows/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_models/__init__.py b/libraries/client/cloudharness_cli/test/workflows/test_models/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation.py b/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation.py
deleted file mode 100644
index 1aa716b1e..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.model.operation import Operation
-from cloudharness_cli.workflows import configuration
-
-
-class TestOperation(unittest.TestCase):
- """Operation unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_search_result.py b/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_search_result.py
deleted file mode 100644
index b8ddad877..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_search_result.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.model.operation_search_result import OperationSearchResult
-from cloudharness_cli.workflows import configuration
-
-
-class TestOperationSearchResult(unittest.TestCase):
- """OperationSearchResult unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_status.py b/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_status.py
deleted file mode 100644
index 8b4bbf958..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_models/test_operation_status.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.model.operation_status import OperationStatus
-from cloudharness_cli.workflows import configuration
-
-
-class TestOperationStatus(unittest.TestCase):
- """OperationStatus unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_models/test_search_result_data.py b/libraries/client/cloudharness_cli/test/workflows/test_models/test_search_result_data.py
deleted file mode 100644
index e3af64024..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_models/test_search_result_data.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# coding: utf-8
-
-"""
- Workflows API
-
- Workflows API # noqa: E501
-
- The version of the OpenAPI document: 0.1.0
- Contact: cloudharness@metacell.us
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.model.search_result_data import SearchResultData
-from cloudharness_cli.workflows import configuration
-
-
-class TestSearchResultData(unittest.TestCase):
- """SearchResultData unit test stubs"""
- _configuration = configuration.Configuration()
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/__init__.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/__init__.py
deleted file mode 100644
index 1309632d3..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_paths/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import json
-import typing
-
-import urllib3
-from urllib3._collections import HTTPHeaderDict
-
-
-class ApiTestMixin:
- json_content_type = 'application/json'
- user_agent = 'OpenAPI-Generator/1.0.0/python'
-
- @classmethod
- def assert_pool_manager_request_called_with(
- cls,
- mock_request,
- url: str,
- method: str = 'POST',
- body: typing.Optional[bytes] = None,
- content_type: typing.Optional[str] = None,
- accept_content_type: typing.Optional[str] = None,
- stream: bool = False,
- ):
- headers = {
- 'User-Agent': cls.user_agent
- }
- if accept_content_type:
- headers['Accept'] = accept_content_type
- if content_type:
- headers['Content-Type'] = content_type
- kwargs = dict(
- headers=HTTPHeaderDict(headers),
- preload_content=not stream,
- timeout=None,
- )
- if content_type and method != 'GET':
- kwargs['body'] = body
- mock_request.assert_called_with(
- method,
- url,
- **kwargs
- )
-
- @staticmethod
- def headers_for_content_type(content_type: str) -> typing.Dict[str, str]:
- return {'content-type': content_type}
-
- @classmethod
- def response(
- cls,
- body: typing.Union[str, bytes],
- status: int = 200,
- content_type: str = json_content_type,
- headers: typing.Optional[typing.Dict[str, str]] = None,
- preload_content: bool = True
- ) -> urllib3.HTTPResponse:
- if headers is None:
- headers = {}
- headers.update(cls.headers_for_content_type(content_type))
- return urllib3.HTTPResponse(
- body,
- headers=headers,
- status=status,
- preload_content=preload_content
- )
-
- @staticmethod
- def json_bytes(in_data: typing.Any) -> bytes:
- return json.dumps(in_data, separators=(",", ":"), ensure_ascii=False).encode('utf-8')
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/__init__.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/test_get.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/test_get.py
deleted file mode 100644
index 016df839f..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.paths.operations import get # noqa: E501
-from cloudharness_cli.workflows import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperations(ApiTestMixin, unittest.TestCase):
- """
- Operations unit test stubs
- lists operations # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/__init__.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_delete.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_delete.py
deleted file mode 100644
index 9a16227f9..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_delete.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.paths.operations_name import delete # noqa: E501
-from cloudharness_cli.workflows import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperationsName(ApiTestMixin, unittest.TestCase):
- """
- OperationsName unit test stubs
- deletes operation by name # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = delete.ApiFordelete(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
- response_body = ''
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_get.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_get.py
deleted file mode 100644
index a8b59d82a..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.paths.operations_name import get # noqa: E501
-from cloudharness_cli.workflows import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperationsName(ApiTestMixin, unittest.TestCase):
- """
- OperationsName unit test stubs
- get operation by name # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/__init__.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/test_get.py b/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/test_get.py
deleted file mode 100644
index eed81d304..000000000
--- a/libraries/client/cloudharness_cli/test/workflows/test_paths/test_operations_name_logs/test_get.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# coding: utf-8
-
-"""
-
-
- Generated by: https://openapi-generator.tech
-"""
-
-import unittest
-from unittest.mock import patch
-
-import urllib3
-
-import cloudharness_cli.workflows
-from cloudharness_cli.workflows.paths.operations_name_logs import get # noqa: E501
-from cloudharness_cli.workflows import configuration, schemas, api_client
-
-from .. import ApiTestMixin
-
-
-class TestOperationsNameLogs(ApiTestMixin, unittest.TestCase):
- """
- OperationsNameLogs unit test stubs
- get operation by name # noqa: E501
- """
- _configuration = configuration.Configuration()
-
- def setUp(self):
- used_api_client = api_client.ApiClient(configuration=self._configuration)
- self.api = get.ApiForget(api_client=used_api_client) # noqa: E501
-
- def tearDown(self):
- pass
-
- response_status = 200
-
-
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/openapitools.json b/openapitools.json
new file mode 100644
index 000000000..b11fef7de
--- /dev/null
+++ b/openapitools.json
@@ -0,0 +1,7 @@
+{
+ "$schema": "node_modules/@openapitools/openapi-generator-cli/config.schema.json",
+ "spaces": 2,
+ "generator-cli": {
+ "version": "7.8.0"
+ }
+}
diff --git a/tools/deployment-cli-tools/ch_cli_tools/openapi.py b/tools/deployment-cli-tools/ch_cli_tools/openapi.py
index 7fc9cfc20..e6f232234 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/openapi.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/openapi.py
@@ -73,11 +73,9 @@ def generate_python_client(module, openapi_file, client_src_path, lib_name=LIB_N
get_dependencies()
module = to_python_module(module)
- command = f"java -jar {CODEGEN} generate " \
- f"-i {openapi_file} " \
- f"-g python " \
- f"-o {client_src_path}/tmp-{module} " \
- f"packageName={lib_name}.{module}"
+ command = f"java -jar {CODEGEN} generate -i {openapi_file} -g python" \
+ f" -o {client_src_path}/tmp-{module} " \
+ f"--additional-properties packageName={lib_name}.{module}"
os.system(command)
diff --git a/tools/deployment-cli-tools/harness-generate b/tools/deployment-cli-tools/harness-generate
index ffec20553..05a40273e 100644
--- a/tools/deployment-cli-tools/harness-generate
+++ b/tools/deployment-cli-tools/harness-generate
@@ -47,9 +47,9 @@ def aggregate_packages(client_src_path, lib_name=LIB_NAME):
TEST_REQUIREMENTS = os.path.join(client_src_path, 'test-requirements.txt')
if not os.path.exists(DOCS_PATH):
- os.mkdir(DOCS_PATH)
+ os.makedirs(DOCS_PATH)
if not os.path.exists(TEST_PATH):
- os.mkdir(TEST_PATH)
+ os.makedirs(TEST_PATH)
if os.path.exists(README):
os.remove(README)
if os.path.exists(REQUIREMENTS):
@@ -159,9 +159,9 @@ if __name__ == "__main__":
args.path) else args.path
get_dependencies()
- if os.path.exists(os.path.join(root_path, "libraries/models")) and (not args.interactive or input("Do you want to generate the main model? [Y/n]").upper() != 'N'):
- generate_model()
+ # if os.path.exists(os.path.join(root_path, "libraries/models")) and (not args.interactive or input("Do you want to generate the main model? [Y/n]").upper() != 'N'):
+ # generate_model()
- generate_servers(root_path, interactive=args.interactive)
+ # generate_servers(root_path, interactive=args.interactive)
generate_clients(root_path, args.client_name, interactive=args.interactive)
From b344129d46f0b9e6ce1e65b6cece35beb042af2b Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 15:12:39 +0200
Subject: [PATCH 74/94] CH-110 improve output on error
---
tools/deployment-cli-tools/ch_cli_tools/dockercompose.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index bc6f7ba67..f014d33c7 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -89,7 +89,8 @@ def generate_docker_compose_yaml(self):
res = subprocess.call(command, shell=True)
if res != 0:
- raise Exception(f"Error generating docker-compose.yaml. See above output for details or try run\n\n{command} --debug")
+ out = subprocess.check_output(["ntpq", "-p"])
+ raise Exception(f"Error generating docker-compose.yaml.\n{out}\n\nSee above output for details or try run\n\n{command} --debug")
self.__post_process_multiple_document_docker_compose(dest_compose_yaml)
From 1ddd02383e73bbc929eefbcf9cc2006df0a131f1 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 15:16:22 +0200
Subject: [PATCH 75/94] CH-126 improve code generation
---
.../cloudharness_cli/test/common/__init__.py | 0
.../test/common/test_accounts_api.py | 38 +++++++++++
.../test/common/test_app_version.py | 52 ++++++++++++++
.../test/common/test_config_api.py | 37 ++++++++++
.../common/test_get_config200_response.py | 53 +++++++++++++++
.../test/common/test_sentry_api.py | 38 +++++++++++
.../cloudharness_cli/test/samples/__init__.py | 0
.../test/samples/test_auth_api.py | 46 +++++++++++++
.../test/samples/test_inline_response202.py | 52 ++++++++++++++
.../samples/test_inline_response202_task.py | 53 +++++++++++++++
.../test/samples/test_resource_api.py | 67 +++++++++++++++++++
.../test/samples/test_sample_resource.py | 55 +++++++++++++++
.../test/samples/test_test_api.py | 46 +++++++++++++
.../test/samples/test_workflows_api.py | 53 +++++++++++++++
.../test/volumemanager/__init__.py | 0
.../test_persistent_volume_claim.py | 54 +++++++++++++++
.../test_persistent_volume_claim_create.py | 52 ++++++++++++++
.../test/volumemanager/test_rest_api.py | 45 +++++++++++++
.../test/workflows/__init__.py | 0
.../workflows/test_create_and_access_api.py | 60 +++++++++++++++++
.../test/workflows/test_operation.py | 56 ++++++++++++++++
.../workflows/test_operation_search_result.py | 61 +++++++++++++++++
.../test/workflows/test_operation_status.py | 34 ++++++++++
.../test/workflows/test_search_result_data.py | 52 ++++++++++++++
tools/deployment-cli-tools/harness-generate | 23 +++++--
25 files changed, 1020 insertions(+), 7 deletions(-)
create mode 100644 libraries/client/cloudharness_cli/test/common/__init__.py
create mode 100644 libraries/client/cloudharness_cli/test/common/test_accounts_api.py
create mode 100644 libraries/client/cloudharness_cli/test/common/test_app_version.py
create mode 100644 libraries/client/cloudharness_cli/test/common/test_config_api.py
create mode 100644 libraries/client/cloudharness_cli/test/common/test_get_config200_response.py
create mode 100644 libraries/client/cloudharness_cli/test/common/test_sentry_api.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/__init__.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_auth_api.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_inline_response202.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_inline_response202_task.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_resource_api.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_sample_resource.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_test_api.py
create mode 100644 libraries/client/cloudharness_cli/test/samples/test_workflows_api.py
create mode 100644 libraries/client/cloudharness_cli/test/volumemanager/__init__.py
create mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim.py
create mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim_create.py
create mode 100644 libraries/client/cloudharness_cli/test/volumemanager/test_rest_api.py
create mode 100644 libraries/client/cloudharness_cli/test/workflows/__init__.py
create mode 100644 libraries/client/cloudharness_cli/test/workflows/test_create_and_access_api.py
create mode 100644 libraries/client/cloudharness_cli/test/workflows/test_operation.py
create mode 100644 libraries/client/cloudharness_cli/test/workflows/test_operation_search_result.py
create mode 100644 libraries/client/cloudharness_cli/test/workflows/test_operation_status.py
create mode 100644 libraries/client/cloudharness_cli/test/workflows/test_search_result_data.py
diff --git a/libraries/client/cloudharness_cli/test/common/__init__.py b/libraries/client/cloudharness_cli/test/common/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/libraries/client/cloudharness_cli/test/common/test_accounts_api.py b/libraries/client/cloudharness_cli/test/common/test_accounts_api.py
new file mode 100644
index 000000000..5fa693cc6
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/common/test_accounts_api.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.common.api.accounts_api import AccountsApi
+
+
+class TestAccountsApi(unittest.TestCase):
+ """AccountsApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = AccountsApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_get_config(self) -> None:
+ """Test case for get_config
+
+ Gets the config for logging in into accounts
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/common/test_app_version.py b/libraries/client/cloudharness_cli/test/common/test_app_version.py
new file mode 100644
index 000000000..2078e5b48
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/common/test_app_version.py
@@ -0,0 +1,52 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.common.models.app_version import AppVersion
+
+class TestAppVersion(unittest.TestCase):
+ """AppVersion unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> AppVersion:
+ """Test AppVersion
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `AppVersion`
+ """
+ model = AppVersion()
+ if include_optional:
+ return AppVersion(
+ build = '',
+ tag = ''
+ )
+ else:
+ return AppVersion(
+ )
+ """
+
+ def testAppVersion(self):
+ """Test AppVersion"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/common/test_config_api.py b/libraries/client/cloudharness_cli/test/common/test_config_api.py
new file mode 100644
index 000000000..50a9eed8d
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/common/test_config_api.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.common.api.config_api import ConfigApi
+
+
+class TestConfigApi(unittest.TestCase):
+ """ConfigApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = ConfigApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_get_version(self) -> None:
+ """Test case for get_version
+
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/common/test_get_config200_response.py b/libraries/client/cloudharness_cli/test/common/test_get_config200_response.py
new file mode 100644
index 000000000..231d59dd5
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/common/test_get_config200_response.py
@@ -0,0 +1,53 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.common.models.get_config200_response import GetConfig200Response
+
+class TestGetConfig200Response(unittest.TestCase):
+ """GetConfig200Response unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> GetConfig200Response:
+ """Test GetConfig200Response
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `GetConfig200Response`
+ """
+ model = GetConfig200Response()
+ if include_optional:
+ return GetConfig200Response(
+ url = '',
+ realm = '',
+ client_id = ''
+ )
+ else:
+ return GetConfig200Response(
+ )
+ """
+
+ def testGetConfig200Response(self):
+ """Test GetConfig200Response"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/common/test_sentry_api.py b/libraries/client/cloudharness_cli/test/common/test_sentry_api.py
new file mode 100644
index 000000000..c10641151
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/common/test_sentry_api.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+
+"""
+ CH common service API
+
+ Cloud Harness Platform - Reference CH service API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.common.api.sentry_api import SentryApi
+
+
+class TestSentryApi(unittest.TestCase):
+ """SentryApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = SentryApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_getdsn(self) -> None:
+ """Test case for getdsn
+
+ Gets the Sentry DSN for a given application
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/__init__.py b/libraries/client/cloudharness_cli/test/samples/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/libraries/client/cloudharness_cli/test/samples/test_auth_api.py b/libraries/client/cloudharness_cli/test/samples/test_auth_api.py
new file mode 100644
index 000000000..a657816fe
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_auth_api.py
@@ -0,0 +1,46 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.api.auth_api import AuthApi
+
+
+class TestAuthApi(unittest.TestCase):
+ """AuthApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = AuthApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_valid_cookie(self) -> None:
+ """Test case for valid_cookie
+
+ Check if the token is valid. Get a token by logging into the base url
+ """
+ pass
+
+ def test_valid_token(self) -> None:
+ """Test case for valid_token
+
+ Check if the token is valid. Get a token by logging into the base url
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_inline_response202.py b/libraries/client/cloudharness_cli/test/samples/test_inline_response202.py
new file mode 100644
index 000000000..ef63ef0b5
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_inline_response202.py
@@ -0,0 +1,52 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.models.inline_response202 import InlineResponse202
+
+class TestInlineResponse202(unittest.TestCase):
+ """InlineResponse202 unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> InlineResponse202:
+ """Test InlineResponse202
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `InlineResponse202`
+ """
+ model = InlineResponse202()
+ if include_optional:
+ return InlineResponse202(
+ task = {"name":"my-op","href":"http://workflows.cloudharness.metacell.us/api/operation/my-op"}
+ )
+ else:
+ return InlineResponse202(
+ )
+ """
+
+ def testInlineResponse202(self):
+ """Test InlineResponse202"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_inline_response202_task.py b/libraries/client/cloudharness_cli/test/samples/test_inline_response202_task.py
new file mode 100644
index 000000000..e82296d44
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_inline_response202_task.py
@@ -0,0 +1,53 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task
+
+class TestInlineResponse202Task(unittest.TestCase):
+ """InlineResponse202Task unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> InlineResponse202Task:
+ """Test InlineResponse202Task
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `InlineResponse202Task`
+ """
+ model = InlineResponse202Task()
+ if include_optional:
+ return InlineResponse202Task(
+ href = 'http://workflows.cloudharness.metacell.us/api/operation/my-op',
+ name = 'my-op'
+ )
+ else:
+ return InlineResponse202Task(
+ )
+ """
+
+ def testInlineResponse202Task(self):
+ """Test InlineResponse202Task"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_resource_api.py b/libraries/client/cloudharness_cli/test/samples/test_resource_api.py
new file mode 100644
index 000000000..42f998892
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_resource_api.py
@@ -0,0 +1,67 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.api.resource_api import ResourceApi
+
+
+class TestResourceApi(unittest.TestCase):
+ """ResourceApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = ResourceApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_create_sample_resource(self) -> None:
+ """Test case for create_sample_resource
+
+ Create a SampleResource
+ """
+ pass
+
+ def test_delete_sample_resource(self) -> None:
+ """Test case for delete_sample_resource
+
+ Delete a SampleResource
+ """
+ pass
+
+ def test_get_sample_resource(self) -> None:
+ """Test case for get_sample_resource
+
+ Get a SampleResource
+ """
+ pass
+
+ def test_get_sample_resources(self) -> None:
+ """Test case for get_sample_resources
+
+ List All SampleResources
+ """
+ pass
+
+ def test_update_sample_resource(self) -> None:
+ """Test case for update_sample_resource
+
+ Update a SampleResource
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_sample_resource.py b/libraries/client/cloudharness_cli/test/samples/test_sample_resource.py
new file mode 100644
index 000000000..2a6a2ffbc
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_sample_resource.py
@@ -0,0 +1,55 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.models.sample_resource import SampleResource
+
+class TestSampleResource(unittest.TestCase):
+ """SampleResource unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> SampleResource:
+ """Test SampleResource
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `SampleResource`
+ """
+ model = SampleResource()
+ if include_optional:
+ return SampleResource(
+ a = 1.337,
+ b = 1.337,
+ id = 1.337
+ )
+ else:
+ return SampleResource(
+ a = 1.337,
+ )
+ """
+
+ def testSampleResource(self):
+ """Test SampleResource"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_test_api.py b/libraries/client/cloudharness_cli/test/samples/test_test_api.py
new file mode 100644
index 000000000..ab0b8cc3f
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_test_api.py
@@ -0,0 +1,46 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.api.test_api import TestApi
+
+
+class TestTestApi(unittest.TestCase):
+ """TestApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = TestApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_error(self) -> None:
+ """Test case for error
+
+ test sentry is working
+ """
+ pass
+
+ def test_ping(self) -> None:
+ """Test case for ping
+
+ test the application is up
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/samples/test_workflows_api.py b/libraries/client/cloudharness_cli/test/samples/test_workflows_api.py
new file mode 100644
index 000000000..138274b79
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/samples/test_workflows_api.py
@@ -0,0 +1,53 @@
+# coding: utf-8
+
+"""
+ CloudHarness Sample API
+
+ CloudHarness Sample api
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.samples.api.workflows_api import WorkflowsApi
+
+
+class TestWorkflowsApi(unittest.TestCase):
+ """WorkflowsApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = WorkflowsApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_submit_async(self) -> None:
+ """Test case for submit_async
+
+ Send an asynchronous operation
+ """
+ pass
+
+ def test_submit_sync(self) -> None:
+ """Test case for submit_sync
+
+ Send a synchronous operation
+ """
+ pass
+
+ def test_submit_sync_with_results(self) -> None:
+ """Test case for submit_sync_with_results
+
+ Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/__init__.py b/libraries/client/cloudharness_cli/test/volumemanager/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim.py b/libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim.py
new file mode 100644
index 000000000..6d56b4714
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+
+"""
+ Volumes manager API
+
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.volumemanager.models.persistent_volume_claim import PersistentVolumeClaim
+
+class TestPersistentVolumeClaim(unittest.TestCase):
+ """PersistentVolumeClaim unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> PersistentVolumeClaim:
+ """Test PersistentVolumeClaim
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `PersistentVolumeClaim`
+ """
+ model = PersistentVolumeClaim()
+ if include_optional:
+ return PersistentVolumeClaim(
+ name = 'pvc-1',
+ namespace = 'ch',
+ accessmode = 'ReadWriteMany',
+ size = '2Gi (see also https://github.com/kubernetes/community/blob/master/contributors/design-proposals/scheduling/resources.md#resource-quantities)'
+ )
+ else:
+ return PersistentVolumeClaim(
+ )
+ """
+
+ def testPersistentVolumeClaim(self):
+ """Test PersistentVolumeClaim"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim_create.py b/libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim_create.py
new file mode 100644
index 000000000..6c036b801
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/volumemanager/test_persistent_volume_claim_create.py
@@ -0,0 +1,52 @@
+# coding: utf-8
+
+"""
+ Volumes manager API
+
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.volumemanager.models.persistent_volume_claim_create import PersistentVolumeClaimCreate
+
+class TestPersistentVolumeClaimCreate(unittest.TestCase):
+ """PersistentVolumeClaimCreate unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> PersistentVolumeClaimCreate:
+ """Test PersistentVolumeClaimCreate
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `PersistentVolumeClaimCreate`
+ """
+ model = PersistentVolumeClaimCreate()
+ if include_optional:
+ return PersistentVolumeClaimCreate(
+ name = 'pvc-1',
+ size = '2Gi (see also https://github.com/kubernetes/community/blob/master/contributors/design-proposals/scheduling/resources.md#resource-quantities)'
+ )
+ else:
+ return PersistentVolumeClaimCreate(
+ )
+ """
+
+ def testPersistentVolumeClaimCreate(self):
+ """Test PersistentVolumeClaimCreate"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/volumemanager/test_rest_api.py b/libraries/client/cloudharness_cli/test/volumemanager/test_rest_api.py
new file mode 100644
index 000000000..6aedd71df
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/volumemanager/test_rest_api.py
@@ -0,0 +1,45 @@
+# coding: utf-8
+
+"""
+ Volumes manager API
+
+ CloudHarness Volumes manager API
+
+ The version of the OpenAPI document: 0.1.0
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.volumemanager.api.rest_api import RestApi
+
+
+class TestRestApi(unittest.TestCase):
+ """RestApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = RestApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_pvc_name_get(self) -> None:
+ """Test case for pvc_name_get
+
+ Retrieve a Persistent Volume Claim from the Kubernetes repository.
+ """
+ pass
+
+ def test_pvc_post(self) -> None:
+ """Test case for pvc_post
+
+ Create a Persistent Volume Claim in Kubernetes
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/__init__.py b/libraries/client/cloudharness_cli/test/workflows/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_create_and_access_api.py b/libraries/client/cloudharness_cli/test/workflows/test_create_and_access_api.py
new file mode 100644
index 000000000..0c1abdb14
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/workflows/test_create_and_access_api.py
@@ -0,0 +1,60 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.workflows.api.create_and_access_api import CreateAndAccessApi
+
+
+class TestCreateAndAccessApi(unittest.TestCase):
+ """CreateAndAccessApi unit test stubs"""
+
+ def setUp(self) -> None:
+ self.api = CreateAndAccessApi()
+
+ def tearDown(self) -> None:
+ pass
+
+ def test_delete_operation(self) -> None:
+ """Test case for delete_operation
+
+ deletes operation by name
+ """
+ pass
+
+ def test_get_operation(self) -> None:
+ """Test case for get_operation
+
+ get operation by name
+ """
+ pass
+
+ def test_list_operations(self) -> None:
+ """Test case for list_operations
+
+ lists operations
+ """
+ pass
+
+ def test_log_operation(self) -> None:
+ """Test case for log_operation
+
+ get operation by name
+ """
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_operation.py b/libraries/client/cloudharness_cli/test/workflows/test_operation.py
new file mode 100644
index 000000000..6f5a07271
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/workflows/test_operation.py
@@ -0,0 +1,56 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.workflows.models.operation import Operation
+
+class TestOperation(unittest.TestCase):
+ """Operation unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> Operation:
+ """Test Operation
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `Operation`
+ """
+ model = Operation()
+ if include_optional:
+ return Operation(
+ message = '',
+ name = '',
+ create_time = '2016-08-29T09:12:33.001Z',
+ status = 'Pending',
+ workflow = ''
+ )
+ else:
+ return Operation(
+ )
+ """
+
+ def testOperation(self):
+ """Test Operation"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_operation_search_result.py b/libraries/client/cloudharness_cli/test/workflows/test_operation_search_result.py
new file mode 100644
index 000000000..f16fa22ce
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/workflows/test_operation_search_result.py
@@ -0,0 +1,61 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult
+
+class TestOperationSearchResult(unittest.TestCase):
+ """OperationSearchResult unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> OperationSearchResult:
+ """Test OperationSearchResult
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `OperationSearchResult`
+ """
+ model = OperationSearchResult()
+ if include_optional:
+ return OperationSearchResult(
+ meta = cloudharness_cli.workflows.models.search_result_data.SearchResultData(
+ continue_token = '', ),
+ items = [
+ cloudharness_cli.workflows.models.operation.Operation(
+ message = '',
+ name = '',
+ create_time = '2016-08-29T09:12:33.001Z',
+ status = 'Pending',
+ workflow = '', )
+ ]
+ )
+ else:
+ return OperationSearchResult(
+ )
+ """
+
+ def testOperationSearchResult(self):
+ """Test OperationSearchResult"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_operation_status.py b/libraries/client/cloudharness_cli/test/workflows/test_operation_status.py
new file mode 100644
index 000000000..faa8c9adf
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/workflows/test_operation_status.py
@@ -0,0 +1,34 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.workflows.models.operation_status import OperationStatus
+
+class TestOperationStatus(unittest.TestCase):
+ """OperationStatus unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def testOperationStatus(self):
+ """Test OperationStatus"""
+ # inst = OperationStatus()
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/libraries/client/cloudharness_cli/test/workflows/test_search_result_data.py b/libraries/client/cloudharness_cli/test/workflows/test_search_result_data.py
new file mode 100644
index 000000000..26fc126be
--- /dev/null
+++ b/libraries/client/cloudharness_cli/test/workflows/test_search_result_data.py
@@ -0,0 +1,52 @@
+# coding: utf-8
+
+"""
+ Workflows API
+
+ Workflows API
+
+ The version of the OpenAPI document: 0.1.0
+ Contact: cloudharness@metacell.us
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+ Do not edit the class manually.
+""" # noqa: E501
+
+
+import unittest
+
+from cloudharness_cli.workflows.models.search_result_data import SearchResultData
+
+class TestSearchResultData(unittest.TestCase):
+ """SearchResultData unit test stubs"""
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def make_instance(self, include_optional) -> SearchResultData:
+ """Test SearchResultData
+ include_optional is a boolean, when False only required
+ params are included, when True both required and
+ optional params are included """
+ # uncomment below to create an instance of `SearchResultData`
+ """
+ model = SearchResultData()
+ if include_optional:
+ return SearchResultData(
+ continue_token = ''
+ )
+ else:
+ return SearchResultData(
+ )
+ """
+
+ def testSearchResultData(self):
+ """Test SearchResultData"""
+ # inst_req_only = self.make_instance(include_optional=False)
+ # inst_req_and_optional = self.make_instance(include_optional=True)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/deployment-cli-tools/harness-generate b/tools/deployment-cli-tools/harness-generate
index 05a40273e..12d40e8a3 100644
--- a/tools/deployment-cli-tools/harness-generate
+++ b/tools/deployment-cli-tools/harness-generate
@@ -22,7 +22,7 @@ def get_application_paths(openapi_files):
return [os.path.basename(os.path.dirname(os.path.dirname(path))) for path in openapi_files]
-def generate_servers(root_path, interactive=False):
+def generate_servers(root_path, interactive=False, server=None):
"""
Generates server stubs
"""
@@ -32,6 +32,9 @@ def generate_servers(root_path, interactive=False):
if not interactive or input("Do you want to generate " + openapi_files[i] + "? [Y/n]").upper() != 'N':
openapi_file = openapi_files[i]
application_root = os.path.dirname(os.path.dirname(openapi_file))
+ appname = os.path.basename(application_root)
+ if server and server != appname:
+ continue
if os.path.exists(os.path.join(application_root, "api", "genapi.sh")):
# fastapi server --> use the genapi.sh script
generate_fastapi_server(application_root)
@@ -153,15 +156,21 @@ if __name__ == "__main__":
help='Specify image registry prefix')
parser.add_argument('-i', '--interactive', dest='interactive', action="store_true",
help='Asks before generate')
+ parser.add_argument('-s', '--server', dest='server', action="store",
+ help='Generate only a specific server (provide application name) stubs', default=())
+ parser.add_argument('-c', '--clients', dest='clients', action="store_true",
+ help='Generate only client libraries')
+ parser.add_argument('-m', '--models', dest='models', action="store_true",
+ help='Generate only model library')
args, unknown = parser.parse_known_args(sys.argv[1:])
root_path = os.path.join(os.getcwd(), args.path) if not os.path.isabs(
args.path) else args.path
get_dependencies()
- # if os.path.exists(os.path.join(root_path, "libraries/models")) and (not args.interactive or input("Do you want to generate the main model? [Y/n]").upper() != 'N'):
- # generate_model()
-
- # generate_servers(root_path, interactive=args.interactive)
-
- generate_clients(root_path, args.client_name, interactive=args.interactive)
+ if args.models and os.path.exists(os.path.join(root_path, "libraries/models")) and (not args.interactive or input("Do you want to generate the main model? [Y/n]").upper() != 'N'):
+ generate_model()
+ if not (args.clients or args.models) or args.server:
+ generate_servers(root_path, interactive=args.interactive, server=args.server)
+ if not (args.server or args.models) or args.clients:
+ generate_clients(root_path, args.client_name, interactive=args.interactive)
From 97ff173c7fdfdac878b11f156a64180a1a4c7113 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 15:21:30 +0200
Subject: [PATCH 76/94] CH-110 improve output on error
---
tools/deployment-cli-tools/ch_cli_tools/dockercompose.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index f014d33c7..a4a3e1b8e 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -89,8 +89,8 @@ def generate_docker_compose_yaml(self):
res = subprocess.call(command, shell=True)
if res != 0:
- out = subprocess.check_output(["ntpq", "-p"])
- raise Exception(f"Error generating docker-compose.yaml.\n{out}\n\nSee above output for details or try run\n\n{command} --debug")
+ out = subprocess.check_output(f"{command} --debug", shell=True, stderr=subprocess.STDOUT)
+ raise Exception(f"Error generating docker-compose.yaml.\n{out}\n\nSee above output for details")
self.__post_process_multiple_document_docker_compose(dest_compose_yaml)
From a5477a9d3cacb6d94321d1defabd6d573818ab69 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Fri, 13 Sep 2024 15:30:51 +0200
Subject: [PATCH 77/94] CH-110 fix tests for docker-compose
---
test/codefresh.yml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/test/codefresh.yml b/test/codefresh.yml
index dbdbd3cb1..c651ebd94 100644
--- a/test/codefresh.yml
+++ b/test/codefresh.yml
@@ -19,13 +19,14 @@ steps:
git: "github"
stage: "clone"
-
test:
title: "Running test"
type: "freestyle" # Run any command
- image: "python:3.9" # The image in which command will be executed
+ image: "python:3.12" # The image in which command will be executed
working_directory: "${{clone}}" # Running command where code cloned
commands:
+ - curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
+ - bash get_helm.sh
- pip install tox
- rm -rf libraries/models/.tox
- rm -rf libraries/cloudharness-common/.tox
@@ -34,4 +35,3 @@ steps:
- tox -c libraries/cloudharness-common/
- tox -c tools/deployment-cli-tools
stage: "test"
-
From 8bdb61924757d851bef4c4f02f23bd320c70e8e1 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:07:16 +0200
Subject: [PATCH 78/94] CH-66 change secrets in configmap
---
.../helm/templates/configmap.yaml | 16 ++++------------
libraries/models/cloudharness_model/util.py | 8 ++++----
2 files changed, 8 insertions(+), 16 deletions(-)
diff --git a/deployment-configuration/helm/templates/configmap.yaml b/deployment-configuration/helm/templates/configmap.yaml
index e63382061..2888fda8d 100644
--- a/deployment-configuration/helm/templates/configmap.yaml
+++ b/deployment-configuration/helm/templates/configmap.yaml
@@ -1,13 +1,5 @@
{{- /*
-to replace the secrets values we create a dict with the structure:
- app:
- :
- harness:
- secrets:
-
-thus with an empty secrets node
-and then it's mergeOverwrite the copy of the .Values we created
-resulting in a copy of the .Values with all secrets being ""
+We replace the secrets with empty values in the configmap
*/ -}}
kind: ConfigMap
apiVersion: v1
@@ -19,7 +11,7 @@ data:
allvalues.yaml: |
{{- $values_copy := deepCopy .Values }}
{{- range $key, $val := .Values.apps }}
- {{- $new_secrets := dict "apps" (dict $key (dict "harness" (dict "secrets"))) }}
- {{- $tmp := mergeOverwrite $values_copy $new_secrets }}
+ {{- $app := get $values_copy.apps $key }}
+ {{- $tmp := set $app.harness "secrets" dict }}
{{- end }}
-{{ $values_copy | toYaml | indent 4 }}
+{{ $values_copy | toYaml | indent 4 }}
\ No newline at end of file
diff --git a/libraries/models/cloudharness_model/util.py b/libraries/models/cloudharness_model/util.py
index 8fe0a648c..8b21d7fbf 100644
--- a/libraries/models/cloudharness_model/util.py
+++ b/libraries/models/cloudharness_model/util.py
@@ -135,13 +135,13 @@ def deserialize_model(data, klass):
if attr in instance.attribute_map:
try:
setattr(instance, attr, _deserialize(value, instance.openapi_types[attr]))
- except:
+ except Exception as e:
logging.warning(
- "Deserialization error: could not set attribute `%s` to value `%s` in class `%s`.", attr, value, klass.__name__)
+ "Deserialization error: could not set attribute `%s` to value `%s` in class `%s`.", attr, value, klass.__name__, exc_info=True)
from .models.base_model_ import Model
setattr(instance, attr, Model.from_dict(value))
logging.debug("Instance is %s", instance, exc_info=True)
-
+
except Exception as e:
logging.error("Deserialize error", exc_info=True)
raise DeserializationException(
@@ -172,4 +172,4 @@ def _deserialize_dict(data, boxed_type):
"""
from cloudharness_model.models.base_model_ import Model
return Model.from_dict({k: _deserialize(v, boxed_type)
- for k, v in six.iteritems(data)})
+ for k, v in six.iteritems(data)})
From ea209173c61b1c6b1a1258efd425f1c3aab210dc Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:08:08 +0200
Subject: [PATCH 79/94] Fix merge issues
---
.../ch_cli_tools/codefresh.py | 56 +++++++++----------
.../deployment-cli-tools/ch_cli_tools/helm.py | 56 +++----------------
2 files changed, 34 insertions(+), 78 deletions(-)
diff --git a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
index c5c821114..e4dca5143 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
@@ -246,46 +246,42 @@ def codefresh_steps_from_base_path(base_path, build_step, fixed_context=None, in
)
-<< << << < HEAD
-== == == =
->>>>>> > 2dc0b2c9fc6924771b81a82f3d089d9ecaff49a5
+ def add_unit_test_step(app_config: ApplicationHarnessConfig):
+ # Create a run step for each application with tests/unit.yaml file using the corresponding image built at the previous step
-def add_unit_test_step(app_config: ApplicationHarnessConfig):
- # Create a run step for each application with tests/unit.yaml file using the corresponding image built at the previous step
+ test_config: ApplicationTestConfig = app_config.test
+ app_name = app_config.name
- test_config: ApplicationTestConfig = app_config.test
- app_name = app_config.name
+ if test_config.unit.enabled and test_config.unit.commands:
+ tag = app_specific_tag_variable(app_name)
+ steps[CD_UNIT_TEST_STEP]['steps'][f"{app_name}_ut"] = dict(
+ title=f"Unit tests for {app_name}",
+ commands=test_config.unit.commands,
+ image=image_tag_with_variables(app_name, tag, base_image_name),
+ )
- if test_config.unit.enabled and test_config.unit.commands:
- tag = app_specific_tag_variable(app_name)
- steps[CD_UNIT_TEST_STEP]['steps'][f"{app_name}_ut"] = dict(
- title=f"Unit tests for {app_name}",
- commands=test_config.unit.commands,
- image=image_tag_with_variables(app_name, tag, base_image_name),
- )
-
- if helm_values[KEY_TASK_IMAGES]:
- codefresh_steps_from_base_path(join(root_path, BASE_IMAGES_PATH), CD_BUILD_STEP_BASE,
- fixed_context=relpath(root_path, os.getcwd()), include=helm_values[KEY_TASK_IMAGES].keys())
- codefresh_steps_from_base_path(join(root_path, STATIC_IMAGES_PATH), CD_BUILD_STEP_STATIC,
- include=helm_values[KEY_TASK_IMAGES].keys())
+ if helm_values[KEY_TASK_IMAGES]:
+ codefresh_steps_from_base_path(join(root_path, BASE_IMAGES_PATH), CD_BUILD_STEP_BASE,
+ fixed_context=relpath(root_path, os.getcwd()), include=helm_values[KEY_TASK_IMAGES].keys())
+ codefresh_steps_from_base_path(join(root_path, STATIC_IMAGES_PATH), CD_BUILD_STEP_STATIC,
+ include=helm_values[KEY_TASK_IMAGES].keys())
- codefresh_steps_from_base_path(join(
- root_path, APPS_PATH), CD_BUILD_STEP_PARALLEL)
+ codefresh_steps_from_base_path(join(
+ root_path, APPS_PATH), CD_BUILD_STEP_PARALLEL)
if CD_E2E_TEST_STEP in steps:
- name = "test-e2e"
- codefresh_steps_from_base_path(join(
- root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), publish=False)
- steps[CD_E2E_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
+ name = "test-e2e"
+ codefresh_steps_from_base_path(join(
+ root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), publish=False)
+ steps[CD_E2E_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
if CD_API_TEST_STEP in steps:
- name = "test-api"
- codefresh_steps_from_base_path(join(
- root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False)
- steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
+ name = "test-api"
+ codefresh_steps_from_base_path(join(
+ root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False)
+ steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
if not codefresh:
logging.warning(
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index 05f83a3c9..df2b20807 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -155,54 +155,14 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
-
-<< << << < HEAD
-== == == =
-
-
-def __clear_unused_db_configuration(self, harness_config):
- database_config = harness_config[KEY_DATABASE]
- database_type = database_config.get('type', None)
- if database_type is None:
- del harness_config[KEY_DATABASE]
- return
- db_specific_keys = [k for k, v in database_config.items()
- if isinstance(v, dict) and 'image' in v and 'ports' in v]
- for db in db_specific_keys:
- if database_type != db:
- del database_config[db]
-
- def image_tag(self, image_name, build_context_path=None, dependencies=()):
- tag = self.tag
- if tag is None and not self.local:
- logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}")
- ignore_path = os.path.join(build_context_path, '.dockerignore')
- ignore = set(DEFAULT_IGNORE)
- if os.path.exists(ignore_path):
- with open(ignore_path) as f:
- ignore = ignore.union({line.strip() for line in f if line.strip() and not line.startswith('#')})
- logging.info(f"Ignoring {ignore}")
- tag = generate_tag_from_content(build_context_path, ignore)
- logging.info(f"Content hash: {tag}")
- dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path)
- tag = sha1((tag + "".join(self.all_images.get(n, '') for n in dependencies)).encode("utf-8")).hexdigest()
- logging.info(f"Generated tag: {tag}")
- app_name = image_name.split("/")[-1] # the image name can have a prefix
- self.all_images[app_name] = tag
- return self.registry + image_name + (f':{tag}' if tag else '')
-
-
->>>>>> > 2dc0b2c9fc6924771b81a82f3d089d9ecaff49a5
-
-
-def create_app_values_spec(self, app_name, app_path, base_image_name=None):
- logging.info('Generating values script for ' + app_name)
-
- specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml')
- if os.path.exists(specific_template_path):
- logging.info("Specific values template found: " +
- specific_template_path)
- values = get_template(specific_template_path)
+ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
+ logging.info('Generating values script for ' + app_name)
+
+ specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml')
+ if os.path.exists(specific_template_path):
+ logging.info("Specific values template found: " +
+ specific_template_path)
+ values = get_template(specific_template_path)
else:
values = {}
From 4fe5c4c7d0f2f235bfb47df4672bcf0f0235e5a2 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:08:49 +0200
Subject: [PATCH 80/94] Remove wrong dependency
---
libraries/models/setup.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/libraries/models/setup.py b/libraries/models/setup.py
index 946a10f32..d78224f8d 100644
--- a/libraries/models/setup.py
+++ b/libraries/models/setup.py
@@ -16,8 +16,7 @@
"pyhumps >= 3.8.0",
"python-dateutil >= 2.8.2",
"PyYAML >= 6.0.1",
- "six >= 1.16.0",
- "swagger_ui_bundle >= 1.1.0",
+ "six >= 1.16.0"
]
print(REQUIREMENTS)
setup(name=NAME, version=VERSION,
From b9c3979972e86c0fc5a95e52039383246d58d2c6 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:09:36 +0200
Subject: [PATCH 81/94] CH-147 fix linting issue
---
.../deploy/resources/hub/jupyterhub_config.py | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py
index 688ba72a8..2033ff1b9 100755
--- a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py
+++ b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py
@@ -1,11 +1,5 @@
# load the config object (satisfies linters)
-from z2jh import (
- get_config,
- get_name,
- get_name_env,
- get_secret_value,
- set_config_if_not_none,
-)
+
c = get_config() # noqa
import glob
@@ -31,6 +25,13 @@
configuration_directory = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, configuration_directory)
+from z2jh import ( # noqa
+ get_config,
+ get_name,
+ get_name_env,
+ get_secret_value,
+ set_config_if_not_none,
+)
def camelCaseify(s):
"""convert snake_case to camelCase
From e838008ffdfd5281cc42d2d5688dae14bfd0b509 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:37:31 +0200
Subject: [PATCH 82/94] CH-17 fix some dependencies
---
.../flask-server/backend/requirements.txt | 6 ++++--
.../flask-server/backend/setup.py | 7 ++++---
applications/samples/backend/requirements.txt | 14 +++-----------
applications/samples/backend/setup.py | 7 +++++--
applications/samples/deploy/values-test.yaml | 2 --
5 files changed, 16 insertions(+), 20 deletions(-)
diff --git a/application-templates/flask-server/backend/requirements.txt b/application-templates/flask-server/backend/requirements.txt
index b3db72c8a..43562728e 100644
--- a/application-templates/flask-server/backend/requirements.txt
+++ b/application-templates/flask-server/backend/requirements.txt
@@ -1,5 +1,7 @@
connexion[swagger-ui]==2.14.2
-swagger-ui-bundle >= 0.0.2
+Flask == 2.2.5
+swagger-ui-bundle==0.0.9
python_dateutil >= 2.6.0
setuptools >= 21.0.0
-Flask<3.0.0
+
+
diff --git a/application-templates/flask-server/backend/setup.py b/application-templates/flask-server/backend/setup.py
index e7700bac9..998506e3c 100644
--- a/application-templates/flask-server/backend/setup.py
+++ b/application-templates/flask-server/backend/setup.py
@@ -14,11 +14,12 @@
# http://pypi.python.org/pypi/setuptools
REQUIRES = [
- "connexion>=2.0.2",
- "swagger-ui-bundle>=0.0.2",
+ "connexion[swagger-ui]<3.0.0",
+ "Flask>=2.2.5",
"python_dateutil>=2.6.0",
"pyjwt>=2.6.0",
- "cloudharness"
+ "swagger-ui-bundle>=0.0.2",
+ "cloudharness",
]
setup(
diff --git a/applications/samples/backend/requirements.txt b/applications/samples/backend/requirements.txt
index be4c8a3a0..f9d4fe449 100644
--- a/applications/samples/backend/requirements.txt
+++ b/applications/samples/backend/requirements.txt
@@ -1,11 +1,3 @@
-connexion[swagger-ui] >= 2.6.0; python_version>="3.6"
-# 2.3 is the last version that supports python 3.4-3.5
-connexion[swagger-ui] <= 2.3.0; python_version=="3.5" or python_version=="3.4"
-# connexion requires werkzeug but connexion < 2.4.0 does not install werkzeug
-# we must peg werkzeug versions below to fix connexion
-# https://github.com/zalando/connexion/pull/1044
-werkzeug == 0.16.1; python_version=="3.5" or python_version=="3.4"
-swagger-ui-bundle >= 0.0.2
-python_dateutil >= 2.6.0
-setuptools >= 21.0.0
-Flask == 2.1.1
+connexion[swagger-ui]==2.14.2
+Flask == 2.2.5
+swagger-ui-bundle==0.0.9
\ No newline at end of file
diff --git a/applications/samples/backend/setup.py b/applications/samples/backend/setup.py
index fe7e0ce52..be69d39f3 100644
--- a/applications/samples/backend/setup.py
+++ b/applications/samples/backend/setup.py
@@ -14,10 +14,13 @@
# http://pypi.python.org/pypi/setuptools
REQUIRES = [
- "connexion>=2.0.2",
+ "connexion[swagger-ui]==2.14.2",
+ "Flask >= 2.2.5",
"python_dateutil>=2.6.0",
"pyjwt>=2.6.0",
- "cloudharness"
+ "swagger-ui-bundle>=0.0.2",
+ "cloudharness",
+
]
setup(
diff --git a/applications/samples/deploy/values-test.yaml b/applications/samples/deploy/values-test.yaml
index 14274fd60..ebf25ca3a 100644
--- a/applications/samples/deploy/values-test.yaml
+++ b/applications/samples/deploy/values-test.yaml
@@ -3,9 +3,7 @@ harness:
soft:
- workflows
- events
- - accounts
- common
- - nfsserver
- jupyterhub
accounts:
roles:
From 4a041803e0608d6f080d94fef3d837a49d00a481 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:37:58 +0200
Subject: [PATCH 83/94] CH-17 fix python backend debug
---
deployment-configuration/values-template-local.yaml | 1 +
deployment-configuration/values-template.yaml | 1 +
infrastructure/base-images/cloudharness-base-debian/Dockerfile | 3 +++
infrastructure/base-images/cloudharness-base/Dockerfile | 3 +++
tools/deployment-cli-tools/ch_cli_tools/skaffold.py | 1 +
5 files changed, 9 insertions(+)
create mode 100644 deployment-configuration/values-template-local.yaml
diff --git a/deployment-configuration/values-template-local.yaml b/deployment-configuration/values-template-local.yaml
new file mode 100644
index 000000000..98b2266e1
--- /dev/null
+++ b/deployment-configuration/values-template-local.yaml
@@ -0,0 +1 @@
+debug: true
diff --git a/deployment-configuration/values-template.yaml b/deployment-configuration/values-template.yaml
index e69de29bb..4dee15469 100644
--- a/deployment-configuration/values-template.yaml
+++ b/deployment-configuration/values-template.yaml
@@ -0,0 +1 @@
+debug: false
diff --git a/infrastructure/base-images/cloudharness-base-debian/Dockerfile b/infrastructure/base-images/cloudharness-base-debian/Dockerfile
index 5b600d8a9..30e203483 100644
--- a/infrastructure/base-images/cloudharness-base-debian/Dockerfile
+++ b/infrastructure/base-images/cloudharness-base-debian/Dockerfile
@@ -26,4 +26,7 @@ RUN pip install -e /libraries/models --no-cache-dir
RUN pip install -e /libraries/cloudharness-common --no-cache-dir
RUN pip install -e /libraries/client/cloudharness_cli --no-cache-dir
+ARG $DEBUG
+RUN if [[ -z "$DEBUG" ]] ; pip install debugpy --prefer-binary ; else echo "Debug not supported" ; fi
+
WORKDIR /
\ No newline at end of file
diff --git a/infrastructure/base-images/cloudharness-base/Dockerfile b/infrastructure/base-images/cloudharness-base/Dockerfile
index ac7f94ac5..25392d992 100644
--- a/infrastructure/base-images/cloudharness-base/Dockerfile
+++ b/infrastructure/base-images/cloudharness-base/Dockerfile
@@ -33,4 +33,7 @@ COPY libraries/client/cloudharness_cli /libraries/client/cloudharness_cli
RUN pip install -e /libraries/cloudharness-common
RUN pip install -e /libraries/client/cloudharness_cli
+ARG $DEBUG
+RUN if [[ -z "$DEBUG" ]] ; pip install debugpy --prefer-binary ; else echo "Debug not supported" ; fi
+
WORKDIR /
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
index fa8f2cd93..aefc86079 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
@@ -48,6 +48,7 @@ def build_artifact(
'REGISTRY': helm_values.registry.name,
'TAG': helm_values.tag,
'NOCACHE': str(time.time()),
+ 'DEBUG': 'true' if helm_values.local or helm_values.debug else ''
}
if additional_build_args:
From 5a4700ffccf2da2fd5e14581b793185afb09ae29 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 14:38:20 +0200
Subject: [PATCH 84/94] Update test CI/CD
---
deployment/codefresh-test.yaml | 388 +++++++++++++++------------------
1 file changed, 178 insertions(+), 210 deletions(-)
diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml
index 6a4dc7770..900e3db71 100644
--- a/deployment/codefresh-test.yaml
+++ b/deployment/codefresh-test.yaml
@@ -1,17 +1,17 @@
-version: "1.0"
+version: '1.0'
stages:
- - prepare
- - build
- - unittest
- - deploy
- - qa
+- prepare
+- build
+- unittest
+- deploy
+- qa
steps:
main_clone:
title: Clone main repository
type: git-clone
stage: prepare
- repo: "${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}"
- revision: "${{CF_BRANCH}}"
+ repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}'
+ revision: '${{CF_BRANCH}}'
git: github
prepare_deployment:
title: Prepare helm chart
@@ -19,17 +19,17 @@ steps:
stage: prepare
working_directory: .
commands:
- - bash install.sh
- - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}}
- -rs ${{REGISTRY_SECRET}} -e test --write-env -N -i samples
- - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export
- - cat ${{CF_VOLUME_PATH}}/env_vars_to_export
+ - bash install.sh
+ - harness-deployment . -n test-${{NAMESPACE_BASENAME}} -d ${{DOMAIN}} -r ${{REGISTRY}}
+ -rs ${{REGISTRY_SECRET}} -e test --write-env -N -i samples
+ - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export
+ - cat ${{CF_VOLUME_PATH}}/env_vars_to_export
prepare_deployment_view:
commands:
- - helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}}
+ - helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}}
environment:
- - ACTION=auth
- - KUBE_CONTEXT=test-${{CF_BUILD_ID}}
+ - ACTION=auth
+ - KUBE_CONTEXT=test-${{CF_BUILD_ID}}
image: codefresh/cfstep-helm:3.6.2
stage: prepare
title: View helm chart
@@ -42,47 +42,43 @@ steps:
type: build
stage: build
dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
image_name: cloudharness/cloudharness-base
title: Cloudharness base
working_directory: ./.
- tag: "${{CLOUDHARNESS_BASE_TAG}}"
+ tag: '${{CLOUDHARNESS_BASE_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}')
== false
cloudharness-frontend-build:
type: build
stage: build
dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
image_name: cloudharness/cloudharness-frontend-build
title: Cloudharness frontend build
working_directory: ./.
- tag: "${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}"
+ tag: '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}',
+ buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}',
'{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true
- forceNoCache:
- includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}',
+ forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}',
'{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false
build_static_images:
title: Build static images
@@ -93,25 +89,23 @@ steps:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/cloudharness-flask
title: Cloudharness flask
working_directory: ./infrastructure/common-images/cloudharness-flask
- tag: "${{CLOUDHARNESS_FLASK_TAG}}"
+ tag: '${{CLOUDHARNESS_FLASK_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}')
== false
build_application_images:
type: parallel
@@ -121,24 +115,22 @@ steps:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
image_name: cloudharness/accounts
title: Accounts
working_directory: ./applications/accounts
- tag: "${{ACCOUNTS_TAG}}"
+ tag: '${{ACCOUNTS_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}')
== false
jupyterhub:
type: build
@@ -166,217 +158,199 @@ steps:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
image_name: cloudharness/samples
title: Samples
working_directory: ./applications/samples
- tag: "${{SAMPLES_TAG}}"
+ tag: '${{SAMPLES_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}')
== false
samples-print-file:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/samples-print-file
title: Samples print file
working_directory: ./applications/samples/tasks/print-file
- tag: "${{SAMPLES_PRINT_FILE_TAG}}"
+ tag: '${{SAMPLES_PRINT_FILE_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}')
== false
samples-secret:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/samples-secret
title: Samples secret
working_directory: ./applications/samples/tasks/secret
- tag: "${{SAMPLES_SECRET_TAG}}"
+ tag: '${{SAMPLES_SECRET_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}')
== false
samples-sum:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/samples-sum
title: Samples sum
working_directory: ./applications/samples/tasks/sum
- tag: "${{SAMPLES_SUM_TAG}}"
+ tag: '${{SAMPLES_SUM_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}')
== false
common:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
image_name: cloudharness/common
title: Common
working_directory: ./applications/common/server
- tag: "${{COMMON_TAG}}"
+ tag: '${{COMMON_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}')
== false
workflows-send-result-event:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/workflows-send-result-event
title: Workflows send result event
working_directory: ./applications/workflows/tasks/send-result-event
- tag: "${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}"
+ tag: '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}',
+ buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}',
'{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true
- forceNoCache:
- includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}',
+ forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}',
'{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false
workflows-extract-download:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
image_name: cloudharness/workflows-extract-download
title: Workflows extract download
working_directory: ./applications/workflows/tasks/extract-download
- tag: "${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}"
+ tag: '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}',
+ buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}',
'{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true
- forceNoCache:
- includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}',
+ forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}',
'{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false
workflows-notify-queue:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/workflows-notify-queue
title: Workflows notify queue
working_directory: ./applications/workflows/tasks/notify-queue
- tag: "${{WORKFLOWS_NOTIFY_QUEUE_TAG}}"
+ tag: '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}',
+ buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}',
'{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true
- forceNoCache:
- includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}',
+ forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}',
'{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false
workflows:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}}
image_name: cloudharness/workflows
title: Workflows
working_directory: ./applications/workflows/server
- tag: "${{WORKFLOWS_TAG}}"
+ tag: '${{WORKFLOWS_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}')
== false
tests_unit:
stage: unittest
@@ -385,8 +359,8 @@ steps:
samples_ut:
title: Unit tests for samples
commands:
- - pytest /usr/src/app/samples/test
- image: "${{REGISTRY}}/cloudharness/samples:${{SAMPLES_TAG}}"
+ - pytest /usr/src/app/samples/test
+ image: '${{REGISTRY}}/cloudharness/samples:${{SAMPLES_TAG}}'
deployment:
stage: deploy
type: helm
@@ -396,14 +370,14 @@ steps:
helm_version: 3.6.2
chart_name: deployment/helm
release_name: test-${{NAMESPACE_BASENAME}}
- kube_context: "${{CLUSTER_NAME}}"
+ kube_context: '${{CLUSTER_NAME}}'
namespace: test-${{NAMESPACE_BASENAME}}
- chart_version: "${{CF_SHORT_REVISION}}"
+ chart_version: '${{CF_SHORT_REVISION}}'
cmd_ps: --timeout 600s --create-namespace
custom_value_files:
- - ./deployment/helm/values.yaml
+ - ./deployment/helm/values.yaml
custom_values:
- - apps_samples_harness_secrets_asecret=${{ASECRET}}
+ - apps_samples_harness_secrets_asecret=${{ASECRET}}
build_test_images:
title: Build test images
type: parallel
@@ -413,119 +387,115 @@ steps:
type: build
stage: build
dockerfile: Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
image_name: cloudharness/test-e2e
title: Test e2e
working_directory: ./test/test-e2e
- tag: "${{TEST_E2E_TAG}}"
+ tag: '${{TEST_E2E_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}')
== false
test-api:
type: build
stage: build
dockerfile: test/test-api/Dockerfile
- registry: "${{CODEFRESH_REGISTRY}}"
+ registry: '${{CODEFRESH_REGISTRY}}'
buildkit: true
build_arguments:
- - DOMAIN=${{DOMAIN}}
- - NOCACHE=${{CF_BUILD_ID}}
- - REGISTRY=${{REGISTRY}}/cloudharness/
- - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
+ - DOMAIN=${{DOMAIN}}
+ - NOCACHE=${{CF_BUILD_ID}}
+ - REGISTRY=${{REGISTRY}}/cloudharness/
+ - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}}
image_name: cloudharness/test-api
title: Test api
working_directory: ./.
- tag: "${{TEST_API_TAG}}"
+ tag: '${{TEST_API_TAG}}'
when:
condition:
any:
- buildDoesNotExist:
- includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}')
+ buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}')
== true
- forceNoCache:
- includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}')
+ forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}')
== false
wait_deployment:
stage: qa
title: Wait deployment to be ready
image: codefresh/kubectl
commands:
- - kubectl config use-context ${{CLUSTER_NAME}}
- - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}}
- - kubectl rollout status deployment/accounts
- - kubectl rollout status deployment/samples
- - kubectl rollout status deployment/common
- - kubectl rollout status deployment/workflows
- - sleep 60
+ - kubectl config use-context ${{CLUSTER_NAME}}
+ - kubectl config set-context --current --namespace=test-${{NAMESPACE_BASENAME}}
+ - kubectl rollout status deployment/accounts
+ - kubectl rollout status deployment/samples
+ - kubectl rollout status deployment/common
+ - kubectl rollout status deployment/workflows
+ - sleep 60
tests_api:
stage: qa
title: Api tests
working_directory: /home/test
- image: "${{REGISTRY}}/cloudharness/test-api:${{TEST_API_TAG}}"
+ image: '${{REGISTRY}}/cloudharness/test-api:${{TEST_API_TAG}}'
fail_fast: false
commands:
- - echo $APP_NAME
+ - echo $APP_NAME
scale:
samples_api_test:
title: samples api test
volumes:
- - "${{CF_REPO_NAME}}/applications/samples:/home/test"
- - "${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml"
+ - '${{CF_REPO_NAME}}/applications/samples:/home/test'
+ - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml'
environment:
- - APP_URL=https://samples.${{DOMAIN}}/api
- - USERNAME=sample@testuser.com
- - PASSWORD=test
+ - APP_URL=https://samples.${{DOMAIN}}/api
+ - USERNAME=sample@testuser.com
+ - PASSWORD=test
commands:
- - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
- https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --hypothesis-suppress-health-check=too_slow
- --hypothesis-deadline=180000 --request-timeout=180000 --hypothesis-max-examples=2
- --show-errors-tracebacks
- - pytest -v test/api
+ - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
+ https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --hypothesis-suppress-health-check=too_slow
+ --hypothesis-deadline=180000 --request-timeout=180000 --hypothesis-max-examples=2
+ --show-errors-tracebacks
+ - pytest -v test/api
common_api_test:
title: common api test
volumes:
- - "${{CF_REPO_NAME}}/applications/common:/home/test"
- - "${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml"
+ - '${{CF_REPO_NAME}}/applications/common:/home/test'
+ - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml'
environment:
- - APP_URL=https://common.${{DOMAIN}}/api
+ - APP_URL=https://common.${{DOMAIN}}/api
commands:
- - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
- https://common.${{DOMAIN}}/api -c all
+ - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
+ https://common.${{DOMAIN}}/api -c all
workflows_api_test:
title: workflows api test
volumes:
- - "${{CF_REPO_NAME}}/applications/workflows:/home/test"
- - "${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml"
+ - '${{CF_REPO_NAME}}/applications/workflows:/home/test'
+ - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml'
environment:
- - APP_URL=https://workflows.${{DOMAIN}}/api
+ - APP_URL=https://workflows.${{DOMAIN}}/api
commands:
- - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
- https://workflows.${{DOMAIN}}/api -c all
+ - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
+ https://workflows.${{DOMAIN}}/api -c all
hooks:
on_fail:
exec:
image: alpine
commands:
- - cf_export FAILED=failed
+ - cf_export FAILED=failed
tests_e2e:
stage: qa
title: End to end tests
working_directory: /home/test
- image: "${{REGISTRY}}/cloudharness/test-e2e:${{TEST_E2E_TAG}}"
+ image: '${{REGISTRY}}/cloudharness/test-e2e:${{TEST_E2E_TAG}}'
fail_fast: false
commands:
- - yarn test
+ - yarn test
scale:
jupyterhub_e2e_test:
title: jupyterhub e2e test
@@ -533,22 +503,20 @@ steps:
- '${{CF_REPO_NAME}}/applications/jupyterhub/test/e2e:/home/test/__tests__/jupyterhub'
environment:
- APP_URL=https://hub.${{DOMAIN}}
- - USERNAME=sample@testuser.com
- - PASSWORD=test
samples_e2e_test:
title: samples e2e test
volumes:
- - "${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples"
+ - '${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples'
environment:
- - APP_URL=https://samples.${{DOMAIN}}
- - USERNAME=sample@testuser.com
- - PASSWORD=test
+ - APP_URL=https://samples.${{DOMAIN}}
+ - USERNAME=sample@testuser.com
+ - PASSWORD=test
hooks:
on_fail:
exec:
image: alpine
commands:
- - cf_export FAILED=failed
+ - cf_export FAILED=failed
approval:
type: pending-approval
stage: qa
@@ -556,7 +524,7 @@ steps:
description: The pipeline will fail after ${{WAIT_ON_FAIL}} minutes
timeout:
timeUnit: minutes
- duration: "${{WAIT_ON_FAIL}}"
+ duration: '${{WAIT_ON_FAIL}}'
finalState: denied
when:
condition:
@@ -568,5 +536,5 @@ steps:
image: codefresh/kubectl
stage: qa
commands:
- - kubectl config use-context ${{CLUSTER_NAME}}
- - kubectl delete ns test-${{NAMESPACE_BASENAME}}
+ - kubectl config use-context ${{CLUSTER_NAME}}
+ - kubectl delete ns test-${{NAMESPACE_BASENAME}}
From 65bcc2b2e6cd6662ba4185da20aa58dcb4f11f96 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 15:02:58 +0200
Subject: [PATCH 85/94] CH-17 fix build issue
---
infrastructure/base-images/cloudharness-base-debian/Dockerfile | 2 +-
infrastructure/base-images/cloudharness-base/Dockerfile | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/infrastructure/base-images/cloudharness-base-debian/Dockerfile b/infrastructure/base-images/cloudharness-base-debian/Dockerfile
index 30e203483..6955614ab 100644
--- a/infrastructure/base-images/cloudharness-base-debian/Dockerfile
+++ b/infrastructure/base-images/cloudharness-base-debian/Dockerfile
@@ -27,6 +27,6 @@ RUN pip install -e /libraries/cloudharness-common --no-cache-dir
RUN pip install -e /libraries/client/cloudharness_cli --no-cache-dir
ARG $DEBUG
-RUN if [[ -z "$DEBUG" ]] ; pip install debugpy --prefer-binary ; else echo "Debug not supported" ; fi
+RUN if [[ -z "$DEBUG" ]] ; then pip install debugpy --prefer-binary ; else echo "Debug not supported" ; fi
WORKDIR /
\ No newline at end of file
diff --git a/infrastructure/base-images/cloudharness-base/Dockerfile b/infrastructure/base-images/cloudharness-base/Dockerfile
index 25392d992..a4537877b 100644
--- a/infrastructure/base-images/cloudharness-base/Dockerfile
+++ b/infrastructure/base-images/cloudharness-base/Dockerfile
@@ -34,6 +34,6 @@ RUN pip install -e /libraries/cloudharness-common
RUN pip install -e /libraries/client/cloudharness_cli
ARG $DEBUG
-RUN if [[ -z "$DEBUG" ]] ; pip install debugpy --prefer-binary ; else echo "Debug not supported" ; fi
+RUN if [[ -z "$DEBUG" ]] ; then pip install debugpy --prefer-binary ; else echo "Debug not supported" ; fi
WORKDIR /
From 1562e95ba0cfb40324156b57f8f25038bd099e32 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 19:17:38 +0200
Subject: [PATCH 86/94] CH-17 fix broken API tests
---
applications/samples/api/openapi.yaml | 3 ---
.../backend/samples/controllers/auth_controller.py | 8 +++++++-
applications/samples/deploy/values.yaml | 6 +++++-
deployment/codefresh-test.yaml | 8 +++++---
.../tests/test_infrastructure.py | 2 +-
tools/cloudharness-test/cloudharness_test/api.py | 4 ++--
.../cloudharness_test/apitest_init.py | 14 +++++++++-----
7 files changed, 29 insertions(+), 16 deletions(-)
diff --git a/applications/samples/api/openapi.yaml b/applications/samples/api/openapi.yaml
index 3332cd2b6..f51b328df 100644
--- a/applications/samples/api/openapi.yaml
+++ b/applications/samples/api/openapi.yaml
@@ -23,7 +23,6 @@ paths:
description: This won't happen
"500":
description: Sentry entry should come!
- deprecated: true
operationId: error
summary: test sentry is working
x-openapi-router-controller: samples.controllers.test_controller
@@ -231,13 +230,11 @@ paths:
schema:
type: object
description: Operation result
- deprecated: true
operationId: submit_sync
summary: Send a synchronous operation
x-openapi-router-controller: samples.controllers.workflows_controller
/operation_sync_results:
get:
- deprecated: true
tags:
- workflows
parameters:
diff --git a/applications/samples/backend/samples/controllers/auth_controller.py b/applications/samples/backend/samples/controllers/auth_controller.py
index e2930680d..e144a976c 100644
--- a/applications/samples/backend/samples/controllers/auth_controller.py
+++ b/applications/samples/backend/samples/controllers/auth_controller.py
@@ -13,6 +13,8 @@ def valid_token(): # noqa: E501
:rtype: List[Valid]
"""
+ from cloudharness.middleware import get_authentication_token
+ token = get_authentication_token()
return 'OK!'
@@ -24,4 +26,8 @@ def valid_cookie(): # noqa: E501
:rtype: List[Valid]
"""
- return 'OK!'
+ from cloudharness.middleware import get_authentication_token
+ from cloudharness.auth import decode_token
+ token = get_authentication_token()
+ assert decode_token(token)
+ return 'OK'
diff --git a/applications/samples/deploy/values.yaml b/applications/samples/deploy/values.yaml
index 3d1717ec8..1006e7a1f 100644
--- a/applications/samples/deploy/values.yaml
+++ b/applications/samples/deploy/values.yaml
@@ -79,11 +79,15 @@ harness:
- all
runParams:
- "--skip-deprecated-operations"
+ - "--exclude-operation-id=submit_sync"
+ - "--exclude-operation-id=submit_sync_with_results"
+ - "--exclude-operation-id=error"
- "--hypothesis-suppress-health-check=too_slow"
- "--hypothesis-deadline=180000"
- "--request-timeout=180000"
- "--hypothesis-max-examples=2"
- - "--show-errors-tracebacks"
+ - "--show-trace"
+ - "--exclude-checks=ignored_auth" # ignored_auth is not working on schemathesis 3.36.0
dockerfile:
buildArgs:
diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml
index 900e3db71..f7ecda38c 100644
--- a/deployment/codefresh-test.yaml
+++ b/deployment/codefresh-test.yaml
@@ -13,6 +13,7 @@ steps:
repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}'
revision: '${{CF_BRANCH}}'
git: github
+
prepare_deployment:
title: Prepare helm chart
image: python:3.12
@@ -458,9 +459,10 @@ steps:
- PASSWORD=test
commands:
- st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url
- https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --hypothesis-suppress-health-check=too_slow
- --hypothesis-deadline=180000 --request-timeout=180000 --hypothesis-max-examples=2
- --show-errors-tracebacks
+ https://samples.${{DOMAIN}}/api -c all --skip-deprecated-operations --exclude-operation-id=submit_sync
+ --exclude-operation-id=submit_sync_with_results --exclude-operation-id=error
+ --hypothesis-suppress-health-check=too_slow --hypothesis-deadline=180000
+ --request-timeout=180000 --hypothesis-max-examples=2 --show-trace --exclude-checks=ignored_auth
- pytest -v test/api
common_api_test:
title: common api test
diff --git a/libraries/cloudharness-common/tests/test_infrastructure.py b/libraries/cloudharness-common/tests/test_infrastructure.py
index ab060d8e8..dfcf6e1fc 100644
--- a/libraries/cloudharness-common/tests/test_infrastructure.py
+++ b/libraries/cloudharness-common/tests/test_infrastructure.py
@@ -1,8 +1,8 @@
-from cloudharness.infrastructure import k8s
from .test_env import set_test_environment
set_test_environment()
+from cloudharness.infrastructure import k8s # noqa: E402
kubectl_enabled = False
diff --git a/tools/cloudharness-test/cloudharness_test/api.py b/tools/cloudharness-test/cloudharness_test/api.py
index 216a669c2..af80f6bd3 100644
--- a/tools/cloudharness-test/cloudharness_test/api.py
+++ b/tools/cloudharness-test/cloudharness_test/api.py
@@ -42,7 +42,7 @@ def run_api_tests(root_paths, helm_values: HarnessMainConfig, base_domain, inclu
api_filename = get_api_filename(app_dir)
if not app_config.domain and not app_config.subdomain:
- logging.warn(
+ logging.warning(
"Application %s has a api specification but no subdomain/domain is specified", appname)
continue
@@ -65,7 +65,7 @@ def run_api_tests(root_paths, helm_values: HarnessMainConfig, base_domain, inclu
for path in root_paths:
# use local schema if available to simplify test development
if os.path.exists(os.path.join(path, schema_file)):
- app_env["APP_SCHEMA_FILE"] = schema_file
+ app_env["APP_SCHEMA_FILE"] = os.path.abspath(schema_file)
if api_config.autotest:
logging.info("Running auto api tests")
diff --git a/tools/cloudharness-test/cloudharness_test/apitest_init.py b/tools/cloudharness-test/cloudharness_test/apitest_init.py
index e39e4d920..e137464b7 100644
--- a/tools/cloudharness-test/cloudharness_test/apitest_init.py
+++ b/tools/cloudharness-test/cloudharness_test/apitest_init.py
@@ -10,16 +10,22 @@
app_schema = os.environ.get("APP_SCHEMA_FILE", None)
app_url = os.environ.get("APP_URL", "http://samples.ch.local/api")
logging.info("Start schemathesis tests on %s", app_url)
+ schema = None
if app_schema:
# Test locally with harness-test -- use local schema for convenience during test development
openapi_uri = app_schema
- schema = st.from_file(openapi_uri)
- else:
+ try:
+ schema = st.from_file(openapi_uri)
+ except st.exceptions.SchemaError:
+ logging.exception("The local schema file %s cannot be loaded. Attempting loading from URL", openapi_uri)
+
+ if not schema:
# remote testing: might be /api/openapi.json or /openapi.json
try:
openapi_uri = openapi_uri = app_url + "/openapi.json"
+ logging.info("Using openapi spec at %s", openapi_uri)
schema = st.from_uri(openapi_uri)
- except st.exceptions.SchemaError as e:
+ except st.exceptions.SchemaError:
# Use alternative configuration
try:
openapi_uri = app_url.replace("/api", "") + "/openapi.json"
@@ -33,8 +39,6 @@
raise Exception(
f"Cannot setup api tests: {openapi_uri}: {e}") from e
- logging.info("Using openapi spec at %s", openapi_uri)
-
if "USERNAME" in os.environ and "PASSWORD" in os.environ:
logging.info("Setting token from username and password")
From 0f88e072dc7222fb5b5d1937f978df75d70c1b88 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 19:17:49 +0200
Subject: [PATCH 87/94] Improve testing docs
---
docs/testing.md | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/testing.md b/docs/testing.md
index 27356fea1..0a941f901 100644
--- a/docs/testing.md
+++ b/docs/testing.md
@@ -83,11 +83,11 @@ harness:
- "--hypothesis-deadline=60000"
- "--request-timeout=60000"
- "--hypothesis-max-examples=2"
- - "--show-errors-tracebacks"
+ - "--show-trace"
```
-See [the model documentation](model/ApiTestsConfig.md) for more insights about parameters.
-
+See [the model documentation](model/ApiTestsConfig.md) for more insights about test parameters.
+See the [Schemathesis documentation](https://schemathesis.readthedocs.io/en/stable/cli.html#basic-usage) for more information about command line parameters (runParams).
### Write API tests
From 2535a1628ab252397a7eefd146c98c67b668a252 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Mon, 16 Sep 2024 19:18:42 +0200
Subject: [PATCH 88/94] Improve cookie auth automations
---
.../cloudharness-common/cloudharness/middleware/django.py | 3 +--
libraries/cloudharness-common/cloudharness/middleware/flask.py | 3 ++-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/libraries/cloudharness-common/cloudharness/middleware/django.py b/libraries/cloudharness-common/cloudharness/middleware/django.py
index a3b528e1f..4a24ac861 100644
--- a/libraries/cloudharness-common/cloudharness/middleware/django.py
+++ b/libraries/cloudharness-common/cloudharness/middleware/django.py
@@ -1,4 +1,3 @@
-from django.conf import settings
from cloudharness.middleware import set_authentication_token
@@ -13,7 +12,7 @@ def __call__(self, request):
# retrieve the bearer token from the header
# and save it for use in the AuthClient
- set_authentication_token(request.headers.get('Authorization'))
+ set_authentication_token(request.headers.get('Authorization', '').split(' ')[-1] or request.cookies.get('kc-access', None))
response = self.get_response(request)
diff --git a/libraries/cloudharness-common/cloudharness/middleware/flask.py b/libraries/cloudharness-common/cloudharness/middleware/flask.py
index 670e93603..cad901a1e 100644
--- a/libraries/cloudharness-common/cloudharness/middleware/flask.py
+++ b/libraries/cloudharness-common/cloudharness/middleware/flask.py
@@ -15,6 +15,7 @@ def __call__(self, environ, start_response):
# retrieve the bearer token from the header
# and save it for use in the AuthClient
- set_authentication_token(request.headers.get('Authorization'))
+ #
+ set_authentication_token(request.headers.get('Authorization', '').split(' ')[-1] or request.cookies.get('kc-access', None))
return self.app(environ, start_response)
From 18c47cd0f2d04038248742909809d158b1957667 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Tue, 17 Sep 2024 10:26:47 +0200
Subject: [PATCH 89/94] Lint fixes
---
.../jupyterhub/deploy/resources/hub/jupyterhub_config.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py
index 2033ff1b9..c3318bac7 100755
--- a/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py
+++ b/applications/jupyterhub/deploy/resources/hub/jupyterhub_config.py
@@ -25,7 +25,7 @@
configuration_directory = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, configuration_directory)
-from z2jh import ( # noqa
+from z2jh import ( # noqa
get_config,
get_name,
get_name_env,
@@ -33,6 +33,7 @@
set_config_if_not_none,
)
+
def camelCaseify(s):
"""convert snake_case to camelCase
From 0eae79b7470db8cd78d989e1987618d75f0e3374 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Tue, 17 Sep 2024 10:26:57 +0200
Subject: [PATCH 90/94] Lint fixes
---
lint-check.sh | 2 +-
.../ch_cli_tools/codefresh.py | 23 ++++----
.../deployment-cli-tools/ch_cli_tools/helm.py | 2 +-
.../deployment-cli-tools/harness-application | 2 +-
tools/deployment-cli-tools/harness-deployment | 59 +++++++++----------
5 files changed, 42 insertions(+), 46 deletions(-)
diff --git a/lint-check.sh b/lint-check.sh
index 426bfaff8..869f51b77 100644
--- a/lint-check.sh
+++ b/lint-check.sh
@@ -4,6 +4,6 @@ diff_output=$(autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --diff --excl
# Check if the output is non-empty
if [ -n "$diff_output" ]; then
echo $diff_output
- echo "Code style issues found in the above files. Please run autopep8 to fix them."
+ echo "Code style issues found in the above files. Please run autopep8 to fix."
exit 1
fi
\ No newline at end of file
diff --git a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
index e4dca5143..e41fdb5d5 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py
@@ -245,9 +245,6 @@ def codefresh_steps_from_base_path(base_path, build_step, fixed_context=None, in
environment=e2e_test_environment(app_config)
)
-
-
-
def add_unit_test_step(app_config: ApplicationHarnessConfig):
# Create a run step for each application with tests/unit.yaml file using the corresponding image built at the previous step
@@ -264,24 +261,24 @@ def add_unit_test_step(app_config: ApplicationHarnessConfig):
if helm_values[KEY_TASK_IMAGES]:
codefresh_steps_from_base_path(join(root_path, BASE_IMAGES_PATH), CD_BUILD_STEP_BASE,
- fixed_context=relpath(root_path, os.getcwd()), include=helm_values[KEY_TASK_IMAGES].keys())
+ fixed_context=relpath(root_path, os.getcwd()), include=helm_values[KEY_TASK_IMAGES].keys())
codefresh_steps_from_base_path(join(root_path, STATIC_IMAGES_PATH), CD_BUILD_STEP_STATIC,
- include=helm_values[KEY_TASK_IMAGES].keys())
+ include=helm_values[KEY_TASK_IMAGES].keys())
codefresh_steps_from_base_path(join(
root_path, APPS_PATH), CD_BUILD_STEP_PARALLEL)
if CD_E2E_TEST_STEP in steps:
- name = "test-e2e"
- codefresh_steps_from_base_path(join(
- root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), publish=False)
- steps[CD_E2E_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
+ name = "test-e2e"
+ codefresh_steps_from_base_path(join(
+ root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), publish=False)
+ steps[CD_E2E_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
if CD_API_TEST_STEP in steps:
- name = "test-api"
- codefresh_steps_from_base_path(join(
- root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False)
- steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
+ name = "test-api"
+ codefresh_steps_from_base_path(join(
+ root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False)
+ steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name)
if not codefresh:
logging.warning(
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index df2b20807..d1725c67e 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -161,7 +161,7 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml')
if os.path.exists(specific_template_path):
logging.info("Specific values template found: " +
- specific_template_path)
+ specific_template_path)
values = get_template(specific_template_path)
else:
values = {}
diff --git a/tools/deployment-cli-tools/harness-application b/tools/deployment-cli-tools/harness-application
index ac4824cb4..551d0ae1d 100644
--- a/tools/deployment-cli-tools/harness-application
+++ b/tools/deployment-cli-tools/harness-application
@@ -25,7 +25,7 @@ if __name__ == "__main__":
description='Creates a new Application.')
parser.add_argument('name', metavar='name', type=str,
help='Application name')
- parser.add_argument('-t', '--template', dest='templates', action="append", default=['base',],
+ parser.add_argument('-t', '--template', dest='templates', action="append", default=['base', ],
help="""Add a template name.
Available templates:
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index 40d4b09a9..908caed0e 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -62,11 +62,10 @@ if __name__ == "__main__":
parser.add_argument('-N', '--no-cd', dest='no_cd_gen', action="store_const", default=None, const=True,
help=f'Do not generate ci/cd files')
parser.add_argument('-we', '--write-env', dest='write_env', action="store_const", default=None, const=True,
- help=f'Write build env to .env file in {DEPLOYMENT_PATH}')
+ help=f'Write build env to .env file in {DEPLOYMENT_PATH}')
parser.add_argument('--docker-compose', dest='docker_compose', action="store_true",
help='Generate docker-compose.yaml and dedicated Skaffold configuration')
-
args, unknown = parser.parse_known_args(sys.argv[1:])
root_paths = [os.path.join(os.getcwd(), path) for path in args.paths]
@@ -87,36 +86,36 @@ if __name__ == "__main__":
if not args.docker_compose:
helm_values = create_helm_chart(
- root_paths,
- tag=args.tag,
- registry=args.registry,
- domain=args.domain,
- local=args.local,
- secured=not args.unsecured,
- output_path=args.output_path,
- exclude=args.exclude,
- include=args.include,
- registry_secret=args.registry_secret,
- tls=not args.no_tls,
- env=envs,
- namespace=args.namespace
- )
+ root_paths,
+ tag=args.tag,
+ registry=args.registry,
+ domain=args.domain,
+ local=args.local,
+ secured=not args.unsecured,
+ output_path=args.output_path,
+ exclude=args.exclude,
+ include=args.include,
+ registry_secret=args.registry_secret,
+ tls=not args.no_tls,
+ env=envs,
+ namespace=args.namespace
+ )
else:
helm_values = create_docker_compose_configuration(
- root_paths,
- tag=args.tag,
- registry=args.registry,
- domain=args.domain,
- local=args.local,
- secured=not args.unsecured,
- output_path=args.output_path,
- exclude=args.exclude,
- include=args.include,
- registry_secret=args.registry_secret,
- tls=not args.no_tls,
- env=envs,
- namespace=args.namespace,
- )
+ root_paths,
+ tag=args.tag,
+ registry=args.registry,
+ domain=args.domain,
+ local=args.local,
+ secured=not args.unsecured,
+ output_path=args.output_path,
+ exclude=args.exclude,
+ include=args.include,
+ registry_secret=args.registry_secret,
+ tls=not args.no_tls,
+ env=envs,
+ namespace=args.namespace,
+ )
merged_root_paths = preprocess_build_overrides(
root_paths=root_paths, helm_values=helm_values)
From 35292005ea5e624c78d9258e9b175c04791f7e31 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Tue, 17 Sep 2024 14:21:29 +0200
Subject: [PATCH 91/94] CH-110 small changes to docker compose generation
---
deployment-configuration/compose/Chart.yaml | 14 +++---
deployment-configuration/compose/README.md | 2 +-
deployment-configuration/compose/values.yaml | 47 +------------------
deployment-configuration/helm/Chart.yaml | 12 ++---
deployment-configuration/helm/values.yaml | 7 +--
.../skaffold-template.yaml | 3 +-
.../ch_cli_tools/dockercompose.py | 4 +-
.../ch_cli_tools/skaffold.py | 22 +++++----
8 files changed, 32 insertions(+), 79 deletions(-)
diff --git a/deployment-configuration/compose/Chart.yaml b/deployment-configuration/compose/Chart.yaml
index 83bf4933e..1b396ffe1 100644
--- a/deployment-configuration/compose/Chart.yaml
+++ b/deployment-configuration/compose/Chart.yaml
@@ -1,10 +1,8 @@
apiVersion: v1
-appVersion: "0.0.1"
-description: CloudHarness Docker Compose
-name: cloudharness
-version: 0.0.1
+appVersion: 2.4.0-compose
+description: CloudHarness Docker compose Helm Chart
maintainers:
- - name: Filippo Ledda
- email: filippo@metacell.us
- - name: Zoran Sinnema
- email: zoran@metacell.us
+- {email: filippo@metacell.us, name: Filippo Ledda}
+- {email: vincent@metacell.us, name: Vincent Aranega}
+name: cloudharness
+version: 0.0.1
\ No newline at end of file
diff --git a/deployment-configuration/compose/README.md b/deployment-configuration/compose/README.md
index 391b61c6b..5387ebbe6 100644
--- a/deployment-configuration/compose/README.md
+++ b/deployment-configuration/compose/README.md
@@ -1,3 +1,3 @@
# CloudHarness Docker Compose: deploy CloudHarness to Docker Compose
-Helm is used to define templates about how the CloudHarness deployment on Docker Compose. For further information about Helm, see https://helm.sh.
+Helm is used to define templates about how the CloudHarness deployment on Docker Compose. For further information about Helm, see https://helm.sh.
\ No newline at end of file
diff --git a/deployment-configuration/compose/values.yaml b/deployment-configuration/compose/values.yaml
index 434dcac7c..0fa87e29f 100644
--- a/deployment-configuration/compose/values.yaml
+++ b/deployment-configuration/compose/values.yaml
@@ -21,52 +21,7 @@ apps: {}
env:
# -- Cloud Harness version
- name: CH_VERSION
- value: 0.0.1
- # -- Cloud harness chart version
- - name: CH_CHART_VERSION
- value: 0.0.1
-privenv:
- # -- Defines a secret as private environment variable that is injected in containers.
- - name: CH_SECRET
- value: In God we trust; all others must bring data. ― W. Edwards Deming
-ingress:
- # -- Flag to enable/disalbe ingress controller.
- enabled: true
- # -- K8s Name of ingress.
- name: cloudharness-ingress
- # -- Enables/disables SSL redirect.
- ssl_redirect: true
- letsencrypt:
- # -- Email for letsencrypt.
- email: filippo@metacell.us
-backup:
- # -- Flag to enable/disable backups.
- active: false
- # -- Number of days to keep backups.
- keep_days: "7"
- # -- Number of weeks to keep backups.
- keep_weeks: "4"
- # -- Number of months to keep backups.
- keep_months: "6"
- # -- Schedule as cronjob expression.
- schedule: "*/5 * * * *"
- # -- The file suffix added to backup files.
- suffix: ".gz"
- # -- The volume size for backups (all backups share the same volume)
- volumesize: "2Gi"
- # -- Target directory of backups, the mount point of the persistent volume.
- dir: "/backups"
- resources:
- requests:
- # -- K8s memory resource definition.
- memory: "32Mi"
- # -- K8s cpu resource definition.
- cpu: "25m"
- limits:
- # -- K8s memory resource definition.
- memory: "64Mi"
- # -- K8s cpu resource definition.
- cpu: "50m"
+ value: 2.4.0
proxy:
timeout:
# -- Timeout for proxy connections in seconds.
diff --git a/deployment-configuration/helm/Chart.yaml b/deployment-configuration/helm/Chart.yaml
index f294c3e78..31dbc5ff6 100644
--- a/deployment-configuration/helm/Chart.yaml
+++ b/deployment-configuration/helm/Chart.yaml
@@ -1,10 +1,8 @@
apiVersion: v1
-appVersion: "0.0.1"
+appVersion: 2.4.0
description: CloudHarness Helm Chart
-name: cloudharness
-version: 0.0.1
maintainers:
- - name: Filippo Ledda
- email: filippo@metacell.us
- - name: Zoran Sinnema
- email: zoran@metacell.us
+- {email: filippo@metacell.us, name: Filippo Ledda}
+- {email: zoran@metacell.us, name: Zoran Sinnema}
+name: cloudharness
+version: 0.0.1
\ No newline at end of file
diff --git a/deployment-configuration/helm/values.yaml b/deployment-configuration/helm/values.yaml
index 59fd7a67e..86ada06f8 100644
--- a/deployment-configuration/helm/values.yaml
+++ b/deployment-configuration/helm/values.yaml
@@ -21,10 +21,7 @@ apps: {}
env:
# -- Cloud Harness version
- name: CH_VERSION
- value: 0.0.1
- # -- Cloud harness chart version
- - name: CH_CHART_VERSION
- value: 0.0.1
+ value: 2.4.0
privenv:
# -- Defines a secret as private environment variable that is injected in containers.
- name: CH_SECRET
@@ -38,7 +35,7 @@ ingress:
ssl_redirect: true
letsencrypt:
# -- Email for letsencrypt.
- email: filippo@metacell.us
+ email: cloudharness@metacell.us
backup:
# -- Flag to enable/disable backups.
active: false
diff --git a/deployment-configuration/skaffold-template.yaml b/deployment-configuration/skaffold-template.yaml
index 6331c4d8b..2b2a3a25f 100644
--- a/deployment-configuration/skaffold-template.yaml
+++ b/deployment-configuration/skaffold-template.yaml
@@ -1,8 +1,7 @@
apiVersion: skaffold/v2beta28
kind: Config
build:
- tagPolicy:
- sha256: {}
+ tagPolicy: {}
local:
useBuildkit: true
concurrency: 4
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index a4a3e1b8e..e714f1f1b 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -16,7 +16,9 @@
from .models import HarnessMainConfig
-from .configurationgenerator import ConfigurationGenerator, validate_helm_values, KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates
+from .configurationgenerator import ConfigurationGenerator, \
+ validate_helm_values, values_from_legacy, values_set_legacy, get_included_with_dependencies, create_env_variables, collect_apps_helm_templates, \
+ KEY_HARNESS, KEY_SERVICE, KEY_DATABASE, KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, KEY_DEPLOYMENT
def create_docker_compose_configuration(root_paths, tag: Union[str, int, None] = 'latest', registry='', local=True, domain=None, exclude=(), secured=True,
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
index aefc86079..0c471f422 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
@@ -9,8 +9,8 @@
from cloudharness_utils.constants import APPS_PATH, DEPLOYMENT_CONFIGURATION_PATH, \
BASE_IMAGES_PATH, STATIC_IMAGES_PATH, HELM_ENGINE, COMPOSE_ENGINE
from .helm import KEY_APPS, KEY_HARNESS, KEY_DEPLOYMENT, KEY_TASK_IMAGES
-from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, \
- find_file_paths, guess_build_dependencies_from_dockerfile, merge_to_yaml_file, get_json_template, get_image_name
+from .utils import get_template, dict_merge, find_dockerfiles_paths, app_name_from_path, yaml, \
+ find_file_paths, guess_build_dependencies_from_dockerfile, get_json_template, get_image_name
from . import HERE
@@ -22,12 +22,13 @@ def relpath_if(p1, p2):
def create_skaffold_configuration(root_paths, helm_values: HarnessMainConfig, output_path='.', manage_task_images=True, backend_deploy=HELM_ENGINE):
- skaffold_conf = get_template('skaffold-template.yaml', True)
+ backend = backend_deploy or HELM_ENGINE
+ template_name = 'skaffold-template.yaml'
+ skaffold_conf = get_template(template_name, True)
apps = helm_values.apps
base_image_name = (helm_values.registry.name or "") + helm_values.name
artifacts = {}
overrides = {}
- backend = backend_deploy or HELM_ENGINE
def remove_tag(image_name):
return image_name.split(":")[0]
@@ -101,7 +102,7 @@ def process_build_dockerfile(
for root_path in root_paths:
skaffold_conf = dict_merge(skaffold_conf, get_template(
- join(root_path, DEPLOYMENT_CONFIGURATION_PATH, 'skaffold-template.yaml')))
+ join(root_path, DEPLOYMENT_CONFIGURATION_PATH, template_name)))
base_dockerfiles = find_dockerfiles_paths(
join(root_path, BASE_IMAGES_PATH))
@@ -219,15 +220,19 @@ def identify_unicorn_based_main(candidates):
'images': [artifact['image'] for artifact in artifacts.values() if artifact['image']]
}
}
+ if backend == COMPOSE_ENGINE or not helm_values.tag:
skaffold_conf['build']['tagPolicy'] = {
'envTemplate': {
'template': '"{{.TAG}}"'
}
}
+ else:
+ skaffold_conf['build']['tagPolicy'] = {"sha256": {}}
skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
- merge_to_yaml_file(skaffold_conf, os.path.join(
- output_path, 'skaffold.yaml'))
+
+ with open('skaffold.yaml', "w") as f:
+ yaml.dump(skaffold_conf, f)
return skaffold_conf
@@ -244,8 +249,7 @@ def git_clone_hook(conf: GitDependencyConfig, context_path: str):
def create_vscode_debug_configuration(root_paths, helm_values):
- logging.info(
- "Creating VS code cloud build configuration.\nCloud build extension is needed to debug.")
+ logging.info("Creating VS code cloud build configuration.\nCloud build extension is needed to debug.")
vscode_launch_path = '.vscode/launch.json'
From 08b964570f59b530424dbfb6aec29aa0184998f9 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Tue, 17 Sep 2024 17:43:59 +0200
Subject: [PATCH 92/94] CH-100 refactoring, small fixes
---
.dockerignore | 3 +-
.gitignore | 2 +-
.../compose/templates/auto-compose.yaml | 6 +-
.../compose/templates/auto-gatekeepers.yaml | 6 +-
.../ch_cli_tools/configurationgenerator.py | 23 ++++---
.../ch_cli_tools/dockercompose.py | 11 ++--
.../deployment-cli-tools/ch_cli_tools/helm.py | 2 +-
.../ch_cli_tools/skaffold.py | 2 +-
tools/deployment-cli-tools/harness-deployment | 64 +++++++------------
.../tests/test_dockercompose.py | 4 +-
10 files changed, 57 insertions(+), 66 deletions(-)
diff --git a/.dockerignore b/.dockerignore
index f98de6ccc..a5b6ffeda 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -19,4 +19,5 @@ __pycache__
/application-templates
/deployment-configuration
/cloud-harness
-.openapi-generator
\ No newline at end of file
+.openapi-generator
+docker-compose.yaml
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 1c0fcc2e4..f4d38460f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,7 +8,7 @@ node_modules
*.DS_Store
deployment/helm
deployment/compose
-deployment/docker-compose.yaml
+docker-compose.yaml
*.egg-info
*.idea
/build
diff --git a/deployment-configuration/compose/templates/auto-compose.yaml b/deployment-configuration/compose/templates/auto-compose.yaml
index 46b076612..b00850854 100644
--- a/deployment-configuration/compose/templates/auto-compose.yaml
+++ b/deployment-configuration/compose/templates/auto-compose.yaml
@@ -163,9 +163,9 @@ services:
{{- end }}
{{- end }}
volumes:
- - ./compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
+ - ./deployment/compose/allvalues.yaml:/opt/cloudharness/resources/allvalues.yaml:ro
{{- range $file_name, $_ := $app_config.harness.secrets }}
- - ./compose/resources/generated/auth/{{ $file_name }}:/opt/cloudharness/resources/auth/{{ $file_name }}
+ - ./deployment/compose/resources/generated/auth/{{ $file_name }}:/opt/cloudharness/resources/auth/{{ $file_name }}
{{- end }}
{{- if or $deployment.volume $app_config.harness.resources }}
{{- with $deployment.volume }}
@@ -176,7 +176,7 @@ services:
{{- with $app_config.harness.resources }}
{{- range .}}
- type: bind
- source: ./compose/resources/generated/{{ $app_name }}/{{ .src }}
+ source: ./deployment/compose/resources/generated/{{ $app_name }}/{{ .src }}
target: {{ .dst }}
{{- end }}
{{- end}}
diff --git a/deployment-configuration/compose/templates/auto-gatekeepers.yaml b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
index fcd7d19a7..4e6b7ac5c 100644
--- a/deployment-configuration/compose/templates/auto-gatekeepers.yaml
+++ b/deployment-configuration/compose/templates/auto-gatekeepers.yaml
@@ -23,9 +23,9 @@
environment:
- PROXY_CONFIG_FILE=/opt/proxy.yml
volumes:
- - ./compose/resources/generated/{{ $gk_name }}/proxy.yml:/opt/proxy.yml
- - ./compose/resources/generated/{{ $gk_name }}/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
- - ./compose/resources/generated/{{ $gk_name }}/access-denied.html.tmpl:/templates/access-denied.html.tmpl
+ - ./deployment/compose/resources/generated/{{ $gk_name }}/proxy.yml:/opt/proxy.yml
+ - ./deployment/compose/resources/generated/{{ $gk_name }}/cacert.crt:/etc/pki/ca-trust/source/anchors/cacert.crt
+ - ./deployment/compose/resources/generated/{{ $gk_name }}/access-denied.html.tmpl:/templates/access-denied.html.tmpl
labels:
- "traefik.enable=true"
- "traefik.http.services.{{ $gk_name }}.loadbalancer.server.port={{ .app.harness.service.port }}"
diff --git a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
index 2228e3af8..56c4f9ac3 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/configurationgenerator.py
@@ -1,7 +1,7 @@
"""
Utilities to create a helm chart from a CloudHarness directory structure
"""
-from typing import Union
+from typing import List, Union
import yaml
import os
import shutil
@@ -10,7 +10,7 @@
import tarfile
from docker import from_env as DockerClient
from pathlib import Path
-
+import abc
from . import HERE, CH_ROOT
from cloudharness_utils.constants import TEST_IMAGES_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \
@@ -32,10 +32,11 @@
DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage')
-class ConfigurationGenerator(object):
- def __init__(self, root_paths, tag: Union[str, int, None] = 'latest', registry='', local=True, domain=None, exclude=(), secured=True,
- output_path='./deployment', include=None, registry_secret=None, tls=True, env=None,
- namespace=None, templates_path=HELM_PATH):
+class ConfigurationGenerator(object, metaclass=abc.ABCMeta):
+
+ def __init__(self, root_paths: List[str], tag: Union[str, int, None] = 'latest', registry='', local=True, domain=None, exclude=(), secured=True,
+ output_path='./deployment', include: List[str] = None, registry_secret: str = None, tls: str = True, env: str = None,
+ namespace: str = None, templates_path: str = HELM_PATH):
assert domain, 'A domain must be specified'
self.root_paths = [Path(r) for r in root_paths]
self.tag = tag
@@ -63,6 +64,10 @@ def __init__(self, root_paths, tag: Union[str, int, None] = 'latest', registry='
self.base_images = {}
self.all_images = {}
+ @abc.abstractmethod
+ def create_app_values_spec(self, app_name, app_path, base_image_name=None, helm_values={}):
+ ...
+
def __init_deployment(self):
"""
Create the base helm chart
@@ -94,11 +99,11 @@ def _process_applications(self, helm_values, base_image_name):
app_base_path = root_path / APPS_PATH
app_values = self.collect_app_values(
- app_base_path, base_image_name=base_image_name)
+ app_base_path, base_image_name=base_image_name, helm_values=helm_values)
helm_values[KEY_APPS] = dict_merge(helm_values[KEY_APPS],
app_values)
- def collect_app_values(self, app_base_path, base_image_name=None):
+ def collect_app_values(self, app_base_path, base_image_name=None, helm_values=None):
values = {}
for app_path in app_base_path.glob("*/"): # We get the sub-files that are directories
@@ -108,7 +113,7 @@ def collect_app_values(self, app_base_path, base_image_name=None):
continue
app_key = app_name.replace('-', '_')
- app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name)
+ app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name, helm_values=helm_values)
# dockerfile_path = next(app_path.rglob('**/Dockerfile'), None)
# # for dockerfile_path in app_path.rglob('**/Dockerfile'):
diff --git a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
index e714f1f1b..87ecf5b4d 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/dockercompose.py
@@ -84,7 +84,7 @@ def process_values(self) -> HarnessMainConfig:
def generate_docker_compose_yaml(self):
compose_templates = self.dest_deployment_path
- dest_compose_yaml = self.dest_deployment_path.parent / "docker-compose.yaml"
+ dest_compose_yaml = self.dest_deployment_path.parent.parent / "docker-compose.yaml"
logging.info(f'Generate docker compose configuration in: {dest_compose_yaml}, using templates from {compose_templates}')
command = f"helm template {compose_templates} > {dest_compose_yaml}"
@@ -204,7 +204,7 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- def create_app_values_spec(self, app_name, app_path, base_image_name=None):
+ def create_app_values_spec(self, app_name, app_path, base_image_name=None, helm_values={}):
logging.info('Generating values script for ' + app_name)
deploy_path = app_path / 'deploy'
@@ -231,9 +231,10 @@ def create_app_values_spec(self, app_name, app_path, base_image_name=None):
image_paths = [path for path in find_dockerfiles_paths(
app_path) if 'tasks/' not in path and 'subapps' not in path]
- # Inject entry points commands
- for image_path in image_paths:
- self.inject_entry_points_commands(values, image_path, app_path)
+ # Inject entry points commands to enable debug
+ if helm_values.get("debug", False):
+ for image_path in image_paths:
+ self.inject_entry_points_commands(values, image_path, app_path)
if len(image_paths) > 1:
logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name,
diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py
index d1725c67e..b81d0f191 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/helm.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py
@@ -155,7 +155,7 @@ def __finish_helm_values(self, values):
create_env_variables(values)
return values, self.include
- def create_app_values_spec(self, app_name, app_path, base_image_name=None):
+ def create_app_values_spec(self, app_name, app_path, base_image_name=None, helm_values={}):
logging.info('Generating values script for ' + app_name)
specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml')
diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
index 0c471f422..22320082a 100644
--- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
+++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py
@@ -231,7 +231,7 @@ def identify_unicorn_based_main(candidates):
skaffold_conf['build']['artifacts'] = [v for v in artifacts.values()]
- with open('skaffold.yaml', "w") as f:
+ with open(os.path.join(output_path, 'skaffold.yaml'), "w") as f:
yaml.dump(skaffold_conf, f)
return skaffold_conf
diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment
index 908caed0e..f4c9fe3db 100644
--- a/tools/deployment-cli-tools/harness-deployment
+++ b/tools/deployment-cli-tools/harness-deployment
@@ -11,7 +11,7 @@ from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_d
from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file
from ch_cli_tools.preprocessing import preprocess_build_overrides
from ch_cli_tools.utils import merge_app_directories
-from cloudharness_utils.constants import DEPLOYMENT_PATH, COMPOSE_ENGINE
+from cloudharness_utils.constants import DEPLOYMENT_PATH, COMPOSE_ENGINE, HELM_ENGINE
HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/')
ROOT = os.path.dirname(os.path.dirname(HERE)).replace(os.path.sep, '/')
@@ -79,48 +79,33 @@ if __name__ == "__main__":
else:
if args.merge:
- logging.warn(
+ logging.warning(
"Merge (-m, --merge) argument is deprecated. Directory merging is now set automatically")
merge_app_directories(root_paths, destination=args.merge)
root_paths = [args.merge]
- if not args.docker_compose:
- helm_values = create_helm_chart(
- root_paths,
- tag=args.tag,
- registry=args.registry,
- domain=args.domain,
- local=args.local,
- secured=not args.unsecured,
- output_path=args.output_path,
- exclude=args.exclude,
- include=args.include,
- registry_secret=args.registry_secret,
- tls=not args.no_tls,
- env=envs,
- namespace=args.namespace
- )
- else:
- helm_values = create_docker_compose_configuration(
- root_paths,
- tag=args.tag,
- registry=args.registry,
- domain=args.domain,
- local=args.local,
- secured=not args.unsecured,
- output_path=args.output_path,
- exclude=args.exclude,
- include=args.include,
- registry_secret=args.registry_secret,
- tls=not args.no_tls,
- env=envs,
- namespace=args.namespace,
- )
+ chart_fn = create_helm_chart if not args.docker_compose else create_docker_compose_configuration
+
+ helm_values = chart_fn(
+ root_paths,
+ tag=args.tag,
+ registry=args.registry,
+ domain=args.domain,
+ local=args.local,
+ secured=not args.unsecured,
+ output_path=args.output_path,
+ exclude=args.exclude,
+ include=args.include,
+ registry_secret=args.registry_secret,
+ tls=not args.no_tls,
+ env=envs,
+ namespace=args.namespace
+ )
merged_root_paths = preprocess_build_overrides(
root_paths=root_paths, helm_values=helm_values)
- if not args.no_cd_gen and envs:
+ if not args.no_cd_gen and envs and not args.docker_compose:
create_codefresh_deployment_scripts(
merged_root_paths,
include=args.include,
@@ -132,13 +117,12 @@ if __name__ == "__main__":
if args.write_env:
write_env_file(helm_values, os.path.join(root_paths[-1], DEPLOYMENT_PATH, ".env"))
+ create_skaffold_configuration(merged_root_paths, helm_values, backend_deploy=COMPOSE_ENGINE if args.docker_compose else HELM_ENGINE)
+
if not args.docker_compose:
- create_skaffold_configuration(merged_root_paths, helm_values)
- else:
- create_skaffold_configuration(merged_root_paths, helm_values, backend_deploy=COMPOSE_ENGINE)
- create_vscode_debug_configuration(root_paths, helm_values)
+ create_vscode_debug_configuration(root_paths, helm_values)
- hosts_info(helm_values)
+ hosts_info(helm_values)
if args.deploy:
deploy(args.namespace, args.output_path)
diff --git a/tools/deployment-cli-tools/tests/test_dockercompose.py b/tools/deployment-cli-tools/tests/test_dockercompose.py
index 3e2c23005..13959434c 100644
--- a/tools/deployment-cli-tools/tests/test_dockercompose.py
+++ b/tools/deployment-cli-tools/tests/test_dockercompose.py
@@ -67,7 +67,7 @@ def test_collect_compose_values(tmp_path):
compose_path = out_folder / COMPOSE_PATH
# Check files
- assert exists(out_folder / 'docker-compose.yaml')
+ assert exists(out_folder.parent / 'docker-compose.yaml')
assert exists(compose_path)
assert exists(compose_path / 'values.yaml')
assert exists(compose_path / 'allvalues.yaml')
@@ -136,7 +136,7 @@ def test_collect_compose_values_noreg_noinclude(tmp_path):
compose_path = out_path / COMPOSE_PATH
# Check files
- assert exists(out_path / 'docker-compose.yaml')
+ assert exists(out_path.parent / 'docker-compose.yaml')
assert exists(compose_path)
assert exists(compose_path / 'values.yaml')
assert exists(compose_path / 'allvalues.yaml')
From 7eb8ca393c159db2791c2f5911b8b12f4f7687c5 Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Tue, 17 Sep 2024 18:12:44 +0200
Subject: [PATCH 93/94] Linting fix
---
lint-check.sh | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/lint-check.sh b/lint-check.sh
index 869f51b77..58260bfe6 100644
--- a/lint-check.sh
+++ b/lint-check.sh
@@ -1,9 +1,10 @@
#!/bin/bash
# Run autopep8 with --diff and capture the output
-diff_output=$(autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --diff --exclude '**/cloudharness_cli/**/*,**/models/*,**/model/*' .)
+diff_output=$(python -m autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --diff --exclude '**/cloudharness_cli/**/*,**/models/*,**/model/*' .)
# Check if the output is non-empty
if [ -n "$diff_output" ]; then
- echo $diff_output
- echo "Code style issues found in the above files. Please run autopep8 to fix."
+ printf "%s\n" "$diff_output"
+ echo "Code style issues found in the above files. To fix you can run: "
+ echo "autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --in-place --exclude '**/cloudharness_cli/**/*,**/models/*,**/model/*'"
exit 1
fi
\ No newline at end of file
From 8bb9a1b06e2059aa0b6d6f77725ebbb73dd853af Mon Sep 17 00:00:00 2001
From: Filippo Ledda
Date: Tue, 17 Sep 2024 18:14:44 +0200
Subject: [PATCH 94/94] Linting fix
---
libraries/cloudharness-common/cloudharness/applications.py | 4 ++--
lint-check.sh | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/libraries/cloudharness-common/cloudharness/applications.py b/libraries/cloudharness-common/cloudharness/applications.py
index cea37e323..855f8c82b 100644
--- a/libraries/cloudharness-common/cloudharness/applications.py
+++ b/libraries/cloudharness-common/cloudharness/applications.py
@@ -42,10 +42,10 @@ def get_db_connection_string(self, **kwargs) -> str:
elif self.db_type == 'postgres':
database_name = kwargs.get('database_name', self.harness.database.postgres['initialdb'])
return f"postgres://{self.db_name}:{self.harness.database.postgres.ports[0]['port']}/" \
- f"{database_name}?user={self.harness.database.user}&password={self.harness.database['pass']}"
+ f"{database_name}?user={self.harness.database.user}&password={self.harness.database['pass']}"
elif self.db_type == 'neo4j':
return f"{self.harness.database.neo4j.get('ports')[1]['name']}://{self.db_name}:" \
- f"{self.harness.database.neo4j.get('ports')[1]['port']}/"
+ f"{self.harness.database.neo4j.get('ports')[1]['port']}/"
else:
raise NotImplementedError(
f'Database connection string discovery not yet supported for database type {self.db_type}')
diff --git a/lint-check.sh b/lint-check.sh
index 58260bfe6..88fa6ed2f 100644
--- a/lint-check.sh
+++ b/lint-check.sh
@@ -5,6 +5,6 @@ diff_output=$(python -m autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --d
if [ -n "$diff_output" ]; then
printf "%s\n" "$diff_output"
echo "Code style issues found in the above files. To fix you can run: "
- echo "autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --in-place --exclude '**/cloudharness_cli/**/*,**/models/*,**/model/*'"
+ echo "autopep8 --select=E1,E2,E3,W,E4,E7,E502 --recursive --in-place --exclude '**/cloudharness_cli/**/*,**/models/*,**/model/*' ."
exit 1
fi
\ No newline at end of file