From ba41600f365053ac5be34e4a6d3799ce6b9c9a0c Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Mon, 20 Apr 2020 14:46:59 +0200 Subject: [PATCH] First workable version --- .dockerignore | 1 + .gitignore | 9 + README.md | 247 +++++++ applications/README.md | 43 ++ applications/accounts/Docker-compose.yaml | 40 ++ applications/accounts/Dockerfile | 3 + .../accounts/deploy/resources/realm.json | 627 +++++++++++++++++ .../accounts/deploy/templates/configmap.yaml | 11 + .../deploy/templates/keycloak-postgres.yaml | 77 +++ .../accounts/deploy/templates/keycloak.yaml | 78 +++ applications/accounts/deploy/values.yaml | 24 + .../accounts/keycloak-gatekeeper/Dockerfile | 37 + applications/accounts/standalone.xml | 614 +++++++++++++++++ .../argo/deploy/templates/argo-sa.yaml | 53 ++ applications/argo/deploy/values.yaml | 9 + .../events/deploy/resources/broker/init.sh | 36 + .../deploy/resources/broker/log4j.properties | 76 ++ .../deploy/resources/broker/server.properties | 134 ++++ .../events/deploy/resources/zookeeper/init.sh | 15 + .../resources/zookeeper/log4j.properties | 8 + .../resources/zookeeper/zookeeper.properties | 12 + .../events/deploy/templates/broker-config.yml | 9 + .../events/deploy/templates/deployments.yml | 342 +++++++++ .../events/deploy/templates/roles.yml | 66 ++ .../events/deploy/templates/services.yml | 61 ++ .../events/deploy/templates/zoo-config.yml | 7 + applications/events/deploy/values.yaml | 9 + applications/samples/README.md | 3 + applications/samples/api/config.json | 3 + applications/samples/api/samples.yaml | 116 ++++ applications/samples/deploy/values.yaml | 4 + applications/samples/server/.dockerignore | 72 ++ applications/samples/server/.gitignore | 66 ++ .../samples/server/.openapi-generator-ignore | 23 + applications/samples/server/.travis.yml | 14 + applications/samples/server/Dockerfile | 16 + applications/samples/server/README.md | 49 ++ .../samples/server/api_samples/__init__.py | 0 .../samples/server/api_samples/__main__.py | 18 + .../api_samples/controllers/__init__.py | 0 .../controllers/auth_controller.py | 16 + .../controllers/security_controller_.py | 17 + .../controllers/workflows_controller.py | 42 ++ .../samples/server/api_samples/encoder.py | 20 + .../server/api_samples/models/__init__.py | 8 + .../server/api_samples/models/base_model_.py | 69 ++ .../api_samples/models/inline_response202.py | 66 ++ .../models/inline_response202_task.py | 92 +++ .../server/api_samples/models/valid.py | 64 ++ .../server/api_samples/openapi/openapi.yaml | 134 ++++ .../server/api_samples/test/__init__.py | 16 + .../api_samples/test/test_auth_controller.py | 34 + .../test/test_workflows_controller.py | 66 ++ .../server/api_samples/typing_utils.py | 32 + .../samples/server/api_samples/util.py | 142 ++++ applications/samples/server/git_push.sh | 58 ++ applications/samples/server/requirements.txt | 7 + applications/samples/server/setup.py | 39 ++ .../samples/server/test-requirements.txt | 4 + applications/samples/server/tox.ini | 9 + applications/samples/src/.dockerignore | 72 ++ applications/samples/src/.gitignore | 66 ++ .../samples/src/.openapi-generator-ignore | 27 + applications/samples/src/.travis.yml | 14 + applications/samples/src/Dockerfile | 18 + applications/samples/src/README.md | 49 ++ .../samples/src/api_samples/__init__.py | 0 .../samples/src/api_samples/__main__.py | 18 + .../src/api_samples/controllers/__init__.py | 0 .../controllers/auth_controller.py | 16 + .../controllers/workflows_controller.py | 71 ++ .../samples/src/api_samples/encoder.py | 20 + .../src/api_samples/models/__init__.py | 8 + .../src/api_samples/models/base_model_.py | 69 ++ .../api_samples/models/inline_response202.py | 66 ++ .../models/inline_response202_task.py | 92 +++ .../samples/src/api_samples/models/valid.py | 64 ++ .../src/api_samples/openapi/openapi.yaml | 134 ++++ .../src/api_samples/service/__init__.py | 0 .../api_samples/service/security_service.py | 16 + .../samples/src/api_samples/test/__init__.py | 16 + .../api_samples/test/test_auth_controller.py | 29 + .../test/test_default_controller.py | 40 ++ .../test/test_workflows_controller.py | 40 ++ .../samples/src/api_samples/typing_utils.py | 32 + applications/samples/src/api_samples/util.py | 142 ++++ applications/samples/src/git_push.sh | 58 ++ applications/samples/src/requirements.txt | 8 + applications/samples/src/setup.py | 41 ++ .../samples/src/test-requirements.txt | 4 + applications/samples/src/tox.ini | 9 + applications/samples/src/www/index.html | 1 + applications/samples/tasks/sum/Dockerfile | 10 + applications/samples/tasks/sum/main.py | 18 + applications/workflows/README.md | 3 + applications/workflows/api/config.json | 3 + applications/workflows/api/workflows.yaml | 174 +++++ applications/workflows/deploy/values.yaml | 6 + applications/workflows/package-lock.json | 3 + applications/workflows/server/.dockerignore | 72 ++ applications/workflows/server/.gitignore | 66 ++ .../server/.openapi-generator-ignore | 23 + applications/workflows/server/.travis.yml | 14 + applications/workflows/server/Dockerfile | 16 + applications/workflows/server/README.md | 49 ++ applications/workflows/server/git_push.sh | 58 ++ .../workflows/server/requirements.txt | 7 + applications/workflows/server/setup.py | 39 ++ .../workflows/server/test-requirements.txt | 4 + applications/workflows/server/tox.ini | 9 + .../server/workflows_api/__init__.py | 0 .../server/workflows_api/__main__.py | 18 + .../workflows_api/controllers/__init__.py | 0 .../create_and_access_controller.py | 65 ++ .../controllers/security_controller_.py | 3 + .../workflows/server/workflows_api/encoder.py | 20 + .../server/workflows_api/models/__init__.py | 9 + .../workflows_api/models/base_model_.py | 69 ++ .../server/workflows_api/models/operation.py | 176 +++++ .../models/operation_search_result.py | 94 +++ .../workflows_api/models/operation_status.py | 46 ++ .../models/search_result_data.py | 66 ++ .../server/workflows_api/openapi/openapi.yaml | 204 ++++++ .../server/workflows_api/test/__init__.py | 16 + .../test/test_create_and_access_controller.py | 83 +++ .../server/workflows_api/typing_utils.py | 32 + .../workflows/server/workflows_api/util.py | 142 ++++ applications/workflows/src/.dockerignore | 72 ++ applications/workflows/src/.gitignore | 66 ++ .../workflows/src/.openapi-generator-ignore | 27 + applications/workflows/src/.travis.yml | 14 + applications/workflows/src/Dockerfile | 18 + applications/workflows/src/README.md | 49 ++ applications/workflows/src/__init__.py | 0 applications/workflows/src/git_push.sh | 58 ++ applications/workflows/src/requirements.txt | 7 + applications/workflows/src/setup.py | 39 ++ .../workflows/src/test-requirements.txt | 4 + applications/workflows/src/tox.ini | 9 + .../workflows/src/workflows_api/__init__.py | 0 .../workflows/src/workflows_api/__main__.py | 18 + .../src/workflows_api/controllers/__init__.py | 0 .../create_and_access_controller.py | 90 +++ .../workflows/src/workflows_api/encoder.py | 20 + .../src/workflows_api/models/__init__.py | 9 + .../src/workflows_api/models/base_model_.py | 69 ++ .../src/workflows_api/models/operation.py | 174 +++++ .../models/operation_search_result.py | 91 +++ .../workflows_api/models/operation_status.py | 47 ++ .../workflows_api/models/operation_type.py | 44 ++ .../models/search_result_data.py | 66 ++ .../src/workflows_api/openapi/openapi.yaml | 204 ++++++ .../src/workflows_api/service/__init__.py | 0 .../workflows_api/service/workflow_service.py | 52 ++ .../src/workflows_api/test/__init__.py | 16 + .../test/test_create_and_access_controller.py | 83 +++ .../src/workflows_api/typing_utils.py | 32 + .../workflows/src/workflows_api/util.py | 142 ++++ .../tasks/extract-download/Dockerfile | 8 + .../workflows/tasks/extract-download/main.py | 16 + .../workflows/tasks/extract-s3/Dockerfile | 1 + .../workflows/tasks/print-file/Dockerfile | 7 + .../workflows/tasks/print-file/main.py | 15 + .../tasks/send-result-event/Dockerfile | 9 + .../workflows/tasks/send-result-event/main.py | 44 ++ .../tasks/send-result-event/requirements.txt | 1 + blueprint/applications/README.md | 4 + client/cloudharness_cli/README.md | 207 ++++++ .../cloudharness_cli.egg-info/PKG-INFO | 12 + .../cloudharness_cli.egg-info/SOURCES.txt | 32 + .../dependency_links.txt | 1 + .../cloudharness_cli.egg-info/requires.txt | 4 + .../cloudharness_cli.egg-info/top_level.txt | 1 + .../cloudharness_cli/__init__.py | 0 .../cloudharness_cli/samples/__init__.py | 36 + .../cloudharness_cli/samples/api/__init__.py | 7 + .../cloudharness_cli/samples/api/auth_api.py | 143 ++++ .../samples/api/workflows_api.py | 357 ++++++++++ .../cloudharness_cli/samples/api_client.py | 648 ++++++++++++++++++ .../cloudharness_cli/samples/configuration.py | 386 +++++++++++ .../cloudharness_cli/samples/exceptions.py | 121 ++++ .../samples/models/__init__.py | 20 + .../samples/models/inline_response202.py | 121 ++++ .../samples/models/inline_response202_task.py | 149 ++++ .../cloudharness_cli/samples/models/valid.py | 121 ++++ .../cloudharness_cli/samples/rest.py | 297 ++++++++ .../cloudharness_cli/workflows/__init__.py | 36 + .../workflows/api/__init__.py | 6 + .../workflows/api/create_and_access_api.py | 500 ++++++++++++++ .../cloudharness_cli/workflows/api_client.py | 648 ++++++++++++++++++ .../workflows/configuration.py | 374 ++++++++++ .../cloudharness_cli/workflows/exceptions.py | 121 ++++ .../workflows/models/__init__.py | 21 + .../workflows/models/operation.py | 231 +++++++ .../models/operation_search_result.py | 147 ++++ .../workflows/models/operation_status.py | 105 +++ .../workflows/models/search_result_data.py | 123 ++++ .../cloudharness_cli/workflows/rest.py | 297 ++++++++ .../cloudharness_cli/docs/samples/AuthApi.md | 69 ++ .../docs/samples/InlineResponse202.md | 10 + .../docs/samples/InlineResponse202Task.md | 11 + client/cloudharness_cli/docs/samples/Valid.md | 10 + .../docs/samples/WorkflowsApi.md | 167 +++++ .../docs/workflows/CreateAndAccessApi.md | 243 +++++++ .../docs/workflows/Operation.md | 15 + .../docs/workflows/OperationSearchResult.md | 12 + .../docs/workflows/OperationStatus.md | 9 + .../docs/workflows/SearchResultData.md | 11 + client/cloudharness_cli/requirements.txt | 6 + client/cloudharness_cli/setup.py | 42 ++ client/cloudharness_cli/test-requirements.txt | 3 + .../cloudharness_cli/test/samples/__init__.py | 0 .../test/samples/test_auth_api.py | 41 ++ .../test/samples/test_inline_response202.py | 55 ++ .../samples/test_inline_response202_task.py | 54 ++ .../test/samples/test_valid.py | 53 ++ .../test/samples/test_workflows_api.py | 55 ++ .../test/workflows/__init__.py | 0 .../workflows/test_create_and_access_api.py | 62 ++ .../test/workflows/test_operation.py | 57 ++ .../workflows/test_operation_search_result.py | 62 ++ .../test/workflows/test_operation_status.py | 52 ++ .../test/workflows/test_search_result_data.py | 53 ++ deployment.yaml | 0 infrastructure/README.md | 18 + infrastructure/base-images/README.md | 5 + .../cloudharness-base-debian/Dockerfile | 8 + .../base-images/cloudharness-base/Dockerfile | 12 + infrastructure/common-images/README.md | 3 + libraries/cloudharness-common/.coveragerc | 3 + libraries/cloudharness-common/.gitignore | 3 + libraries/cloudharness-common/.travis.yml | 7 + libraries/cloudharness-common/MANIFEST.in | 1 + libraries/cloudharness-common/README.md | 29 + .../cloudharness/__init__.py | 16 + .../cloudharness/auth/__init__.py | 1 + .../cloudharness/auth/keycloak/__init__.py | 40 ++ .../cloudharness/errors.py | 25 + .../cloudharness/events/__init__.py | 0 .../cloudharness/events/client.py | 113 +++ .../cloudharness/persistence/__init__.py | 0 .../persistence/graph_database/__init__.py | 0 .../graph_database/neo4j/__init__.py | 0 .../persistence/nosql_database/__init__.py | 0 .../persistence/sql_database/__init__.py | 0 .../cloudharness/utils/__init__.py | 0 .../cloudharness/utils/env.py | 130 ++++ .../cloudharness/utils/resources/values.yaml | 245 +++++++ .../cloudharness/utils/settings.py | 2 + .../cloudharness/workflows/__init__.py | 0 .../cloudharness/workflows/argo.py | 236 +++++++ .../cloudharness/workflows/operations.py | 325 +++++++++ .../cloudharness/workflows/tasks.py | 130 ++++ .../cloudharness/workflows/utils.py | 17 + .../cloudharness-common/requirements.txt | 18 + libraries/cloudharness-common/setup.py | 35 + .../cloudharness-common/test-requirements.txt | 7 + .../cloudharness-common/tests/__init__.py | 0 .../cloudharness-common/tests/test_env.py | 31 + .../tests/test_integration.py | 2 + .../tests/test_workflow.py | 91 +++ libraries/cloudharness-common/tox.ini | 9 + libraries/package-lock.json | 3 + requirements.txt | 2 + utilities/.gitignore | 5 + utilities/Dockerfile | 13 + utilities/MANIFEST.in | 7 + utilities/README.md | 3 + .../cloudharness_utilities.egg-info/PKG-INFO | 12 + .../SOURCES.txt | 39 ++ .../dependency_links.txt | 1 + .../requires.txt | 2 + .../top_level.txt | 1 + utilities/cloudharness_utilities/__init__.py | 11 + .../application-template/api/config.json | 3 + .../application-template/api/openapi.yaml | 47 ++ .../application-template/deploy/values.yaml | 4 + utilities/cloudharness_utilities/build.py | 97 +++ utilities/cloudharness_utilities/codefresh.py | 71 ++ utilities/cloudharness_utilities/constants.py | 35 + .../deployment-configuration/README.md | 8 + .../codefresh-build-template.yaml | 8 + .../codefresh-template.yaml | 67 ++ .../deployment-configuration/helm/.helmignore | 22 + .../deployment-configuration/helm/Chart.yaml | 10 + .../deployment-configuration/helm/README.md | 121 ++++ .../helm/templates/NOTES.txt | 11 + .../helm/templates/_helpers.tpl | 113 +++ .../helm/templates/auto-deployments.yaml | 62 ++ .../helm/templates/auto-gatekeepers.yaml | 116 ++++ .../helm/templates/auto-services.yaml | 35 + .../helm/templates/certs/letsencrypt.yaml | 16 + .../helm/templates/ingress.yaml | 62 ++ .../helm/templates/secrets.yaml | 12 + .../deployment-configuration/helm/values.yaml | 0 .../value-template.yaml | 12 + .../values-template.yaml | 26 + utilities/cloudharness_utilities/helm.py | 202 ++++++ utilities/cloudharness_utilities/openapi.py | 55 ++ utilities/cloudharness_utilities/utils.py | 152 ++++ utilities/harness-application | 31 + utilities/harness-codefresh | 27 + utilities/harness-deployment | 57 ++ utilities/harness-generate | 144 ++++ utilities/requirements.txt | 2 + utilities/setup.py | 44 ++ utilities/tests/resources/conf-source1/a.yaml | 4 + utilities/tests/resources/conf-source1/b.yaml | 4 + .../tests/resources/conf-source1/sub/a.yaml | 4 + .../tests/resources/conf-source1/sub/b.yaml | 4 + utilities/tests/resources/conf-source1/t.txt | 1 + utilities/tests/resources/conf-source2/a.yaml | 4 + utilities/tests/resources/conf-source2/c.yaml | 4 + .../tests/resources/conf-source2/sub/a.yaml | 4 + .../tests/resources/conf-source2/sub/c.yaml | 4 + utilities/tests/resources/conf-source2/t.txt | 1 + utilities/tests/test_utils.py | 48 ++ 317 files changed, 18288 insertions(+) create mode 100644 .dockerignore create mode 100644 .gitignore create mode 100644 README.md create mode 100644 applications/README.md create mode 100644 applications/accounts/Docker-compose.yaml create mode 100644 applications/accounts/Dockerfile create mode 100644 applications/accounts/deploy/resources/realm.json create mode 100644 applications/accounts/deploy/templates/configmap.yaml create mode 100644 applications/accounts/deploy/templates/keycloak-postgres.yaml create mode 100644 applications/accounts/deploy/templates/keycloak.yaml create mode 100644 applications/accounts/deploy/values.yaml create mode 100644 applications/accounts/keycloak-gatekeeper/Dockerfile create mode 100644 applications/accounts/standalone.xml create mode 100644 applications/argo/deploy/templates/argo-sa.yaml create mode 100644 applications/argo/deploy/values.yaml create mode 100644 applications/events/deploy/resources/broker/init.sh create mode 100644 applications/events/deploy/resources/broker/log4j.properties create mode 100644 applications/events/deploy/resources/broker/server.properties create mode 100644 applications/events/deploy/resources/zookeeper/init.sh create mode 100644 applications/events/deploy/resources/zookeeper/log4j.properties create mode 100644 applications/events/deploy/resources/zookeeper/zookeeper.properties create mode 100644 applications/events/deploy/templates/broker-config.yml create mode 100644 applications/events/deploy/templates/deployments.yml create mode 100644 applications/events/deploy/templates/roles.yml create mode 100644 applications/events/deploy/templates/services.yml create mode 100644 applications/events/deploy/templates/zoo-config.yml create mode 100644 applications/events/deploy/values.yaml create mode 100644 applications/samples/README.md create mode 100644 applications/samples/api/config.json create mode 100644 applications/samples/api/samples.yaml create mode 100644 applications/samples/deploy/values.yaml create mode 100644 applications/samples/server/.dockerignore create mode 100644 applications/samples/server/.gitignore create mode 100644 applications/samples/server/.openapi-generator-ignore create mode 100644 applications/samples/server/.travis.yml create mode 100644 applications/samples/server/Dockerfile create mode 100644 applications/samples/server/README.md create mode 100644 applications/samples/server/api_samples/__init__.py create mode 100644 applications/samples/server/api_samples/__main__.py create mode 100644 applications/samples/server/api_samples/controllers/__init__.py create mode 100644 applications/samples/server/api_samples/controllers/auth_controller.py create mode 100644 applications/samples/server/api_samples/controllers/security_controller_.py create mode 100644 applications/samples/server/api_samples/controllers/workflows_controller.py create mode 100644 applications/samples/server/api_samples/encoder.py create mode 100644 applications/samples/server/api_samples/models/__init__.py create mode 100644 applications/samples/server/api_samples/models/base_model_.py create mode 100644 applications/samples/server/api_samples/models/inline_response202.py create mode 100644 applications/samples/server/api_samples/models/inline_response202_task.py create mode 100644 applications/samples/server/api_samples/models/valid.py create mode 100644 applications/samples/server/api_samples/openapi/openapi.yaml create mode 100644 applications/samples/server/api_samples/test/__init__.py create mode 100644 applications/samples/server/api_samples/test/test_auth_controller.py create mode 100644 applications/samples/server/api_samples/test/test_workflows_controller.py create mode 100644 applications/samples/server/api_samples/typing_utils.py create mode 100644 applications/samples/server/api_samples/util.py create mode 100644 applications/samples/server/git_push.sh create mode 100644 applications/samples/server/requirements.txt create mode 100644 applications/samples/server/setup.py create mode 100644 applications/samples/server/test-requirements.txt create mode 100644 applications/samples/server/tox.ini create mode 100644 applications/samples/src/.dockerignore create mode 100644 applications/samples/src/.gitignore create mode 100644 applications/samples/src/.openapi-generator-ignore create mode 100644 applications/samples/src/.travis.yml create mode 100644 applications/samples/src/Dockerfile create mode 100644 applications/samples/src/README.md create mode 100644 applications/samples/src/api_samples/__init__.py create mode 100644 applications/samples/src/api_samples/__main__.py create mode 100644 applications/samples/src/api_samples/controllers/__init__.py create mode 100644 applications/samples/src/api_samples/controllers/auth_controller.py create mode 100644 applications/samples/src/api_samples/controllers/workflows_controller.py create mode 100644 applications/samples/src/api_samples/encoder.py create mode 100644 applications/samples/src/api_samples/models/__init__.py create mode 100644 applications/samples/src/api_samples/models/base_model_.py create mode 100644 applications/samples/src/api_samples/models/inline_response202.py create mode 100644 applications/samples/src/api_samples/models/inline_response202_task.py create mode 100644 applications/samples/src/api_samples/models/valid.py create mode 100644 applications/samples/src/api_samples/openapi/openapi.yaml create mode 100644 applications/samples/src/api_samples/service/__init__.py create mode 100644 applications/samples/src/api_samples/service/security_service.py create mode 100644 applications/samples/src/api_samples/test/__init__.py create mode 100644 applications/samples/src/api_samples/test/test_auth_controller.py create mode 100644 applications/samples/src/api_samples/test/test_default_controller.py create mode 100644 applications/samples/src/api_samples/test/test_workflows_controller.py create mode 100644 applications/samples/src/api_samples/typing_utils.py create mode 100644 applications/samples/src/api_samples/util.py create mode 100644 applications/samples/src/git_push.sh create mode 100644 applications/samples/src/requirements.txt create mode 100644 applications/samples/src/setup.py create mode 100644 applications/samples/src/test-requirements.txt create mode 100644 applications/samples/src/tox.ini create mode 100644 applications/samples/src/www/index.html create mode 100644 applications/samples/tasks/sum/Dockerfile create mode 100644 applications/samples/tasks/sum/main.py create mode 100644 applications/workflows/README.md create mode 100644 applications/workflows/api/config.json create mode 100644 applications/workflows/api/workflows.yaml create mode 100644 applications/workflows/deploy/values.yaml create mode 100644 applications/workflows/package-lock.json create mode 100644 applications/workflows/server/.dockerignore create mode 100644 applications/workflows/server/.gitignore create mode 100644 applications/workflows/server/.openapi-generator-ignore create mode 100644 applications/workflows/server/.travis.yml create mode 100644 applications/workflows/server/Dockerfile create mode 100644 applications/workflows/server/README.md create mode 100644 applications/workflows/server/git_push.sh create mode 100644 applications/workflows/server/requirements.txt create mode 100644 applications/workflows/server/setup.py create mode 100644 applications/workflows/server/test-requirements.txt create mode 100644 applications/workflows/server/tox.ini create mode 100644 applications/workflows/server/workflows_api/__init__.py create mode 100644 applications/workflows/server/workflows_api/__main__.py create mode 100644 applications/workflows/server/workflows_api/controllers/__init__.py create mode 100644 applications/workflows/server/workflows_api/controllers/create_and_access_controller.py create mode 100644 applications/workflows/server/workflows_api/controllers/security_controller_.py create mode 100644 applications/workflows/server/workflows_api/encoder.py create mode 100644 applications/workflows/server/workflows_api/models/__init__.py create mode 100644 applications/workflows/server/workflows_api/models/base_model_.py create mode 100644 applications/workflows/server/workflows_api/models/operation.py create mode 100644 applications/workflows/server/workflows_api/models/operation_search_result.py create mode 100644 applications/workflows/server/workflows_api/models/operation_status.py create mode 100644 applications/workflows/server/workflows_api/models/search_result_data.py create mode 100644 applications/workflows/server/workflows_api/openapi/openapi.yaml create mode 100644 applications/workflows/server/workflows_api/test/__init__.py create mode 100644 applications/workflows/server/workflows_api/test/test_create_and_access_controller.py create mode 100644 applications/workflows/server/workflows_api/typing_utils.py create mode 100644 applications/workflows/server/workflows_api/util.py create mode 100644 applications/workflows/src/.dockerignore create mode 100644 applications/workflows/src/.gitignore create mode 100644 applications/workflows/src/.openapi-generator-ignore create mode 100644 applications/workflows/src/.travis.yml create mode 100644 applications/workflows/src/Dockerfile create mode 100644 applications/workflows/src/README.md create mode 100644 applications/workflows/src/__init__.py create mode 100644 applications/workflows/src/git_push.sh create mode 100644 applications/workflows/src/requirements.txt create mode 100644 applications/workflows/src/setup.py create mode 100644 applications/workflows/src/test-requirements.txt create mode 100644 applications/workflows/src/tox.ini create mode 100644 applications/workflows/src/workflows_api/__init__.py create mode 100644 applications/workflows/src/workflows_api/__main__.py create mode 100644 applications/workflows/src/workflows_api/controllers/__init__.py create mode 100644 applications/workflows/src/workflows_api/controllers/create_and_access_controller.py create mode 100644 applications/workflows/src/workflows_api/encoder.py create mode 100644 applications/workflows/src/workflows_api/models/__init__.py create mode 100644 applications/workflows/src/workflows_api/models/base_model_.py create mode 100644 applications/workflows/src/workflows_api/models/operation.py create mode 100644 applications/workflows/src/workflows_api/models/operation_search_result.py create mode 100644 applications/workflows/src/workflows_api/models/operation_status.py create mode 100644 applications/workflows/src/workflows_api/models/operation_type.py create mode 100644 applications/workflows/src/workflows_api/models/search_result_data.py create mode 100644 applications/workflows/src/workflows_api/openapi/openapi.yaml create mode 100644 applications/workflows/src/workflows_api/service/__init__.py create mode 100644 applications/workflows/src/workflows_api/service/workflow_service.py create mode 100644 applications/workflows/src/workflows_api/test/__init__.py create mode 100644 applications/workflows/src/workflows_api/test/test_create_and_access_controller.py create mode 100644 applications/workflows/src/workflows_api/typing_utils.py create mode 100644 applications/workflows/src/workflows_api/util.py create mode 100644 applications/workflows/tasks/extract-download/Dockerfile create mode 100644 applications/workflows/tasks/extract-download/main.py create mode 100644 applications/workflows/tasks/extract-s3/Dockerfile create mode 100644 applications/workflows/tasks/print-file/Dockerfile create mode 100644 applications/workflows/tasks/print-file/main.py create mode 100644 applications/workflows/tasks/send-result-event/Dockerfile create mode 100644 applications/workflows/tasks/send-result-event/main.py create mode 100644 applications/workflows/tasks/send-result-event/requirements.txt create mode 100644 blueprint/applications/README.md create mode 100644 client/cloudharness_cli/README.md create mode 100644 client/cloudharness_cli/cloudharness_cli.egg-info/PKG-INFO create mode 100644 client/cloudharness_cli/cloudharness_cli.egg-info/SOURCES.txt create mode 100644 client/cloudharness_cli/cloudharness_cli.egg-info/dependency_links.txt create mode 100644 client/cloudharness_cli/cloudharness_cli.egg-info/requires.txt create mode 100644 client/cloudharness_cli/cloudharness_cli.egg-info/top_level.txt create mode 100644 client/cloudharness_cli/cloudharness_cli/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/api_client.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/configuration.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/exceptions.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/models/valid.py create mode 100644 client/cloudharness_cli/cloudharness_cli/samples/rest.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/api_client.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/configuration.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py create mode 100644 client/cloudharness_cli/cloudharness_cli/workflows/rest.py create mode 100644 client/cloudharness_cli/docs/samples/AuthApi.md create mode 100644 client/cloudharness_cli/docs/samples/InlineResponse202.md create mode 100644 client/cloudharness_cli/docs/samples/InlineResponse202Task.md create mode 100644 client/cloudharness_cli/docs/samples/Valid.md create mode 100644 client/cloudharness_cli/docs/samples/WorkflowsApi.md create mode 100644 client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md create mode 100644 client/cloudharness_cli/docs/workflows/Operation.md create mode 100644 client/cloudharness_cli/docs/workflows/OperationSearchResult.md create mode 100644 client/cloudharness_cli/docs/workflows/OperationStatus.md create mode 100644 client/cloudharness_cli/docs/workflows/SearchResultData.md create mode 100644 client/cloudharness_cli/requirements.txt create mode 100644 client/cloudharness_cli/setup.py create mode 100644 client/cloudharness_cli/test-requirements.txt create mode 100644 client/cloudharness_cli/test/samples/__init__.py create mode 100644 client/cloudharness_cli/test/samples/test_auth_api.py create mode 100644 client/cloudharness_cli/test/samples/test_inline_response202.py create mode 100644 client/cloudharness_cli/test/samples/test_inline_response202_task.py create mode 100644 client/cloudharness_cli/test/samples/test_valid.py create mode 100644 client/cloudharness_cli/test/samples/test_workflows_api.py create mode 100644 client/cloudharness_cli/test/workflows/__init__.py create mode 100644 client/cloudharness_cli/test/workflows/test_create_and_access_api.py create mode 100644 client/cloudharness_cli/test/workflows/test_operation.py create mode 100644 client/cloudharness_cli/test/workflows/test_operation_search_result.py create mode 100644 client/cloudharness_cli/test/workflows/test_operation_status.py create mode 100644 client/cloudharness_cli/test/workflows/test_search_result_data.py create mode 100644 deployment.yaml create mode 100644 infrastructure/README.md create mode 100644 infrastructure/base-images/README.md create mode 100644 infrastructure/base-images/cloudharness-base-debian/Dockerfile create mode 100644 infrastructure/base-images/cloudharness-base/Dockerfile create mode 100644 infrastructure/common-images/README.md create mode 100644 libraries/cloudharness-common/.coveragerc create mode 100644 libraries/cloudharness-common/.gitignore create mode 100644 libraries/cloudharness-common/.travis.yml create mode 100644 libraries/cloudharness-common/MANIFEST.in create mode 100644 libraries/cloudharness-common/README.md create mode 100644 libraries/cloudharness-common/cloudharness/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/auth/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/auth/keycloak/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/errors.py create mode 100644 libraries/cloudharness-common/cloudharness/events/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/events/client.py create mode 100644 libraries/cloudharness-common/cloudharness/persistence/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/persistence/graph_database/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/persistence/graph_database/neo4j/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/persistence/nosql_database/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/persistence/sql_database/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/utils/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/utils/env.py create mode 100644 libraries/cloudharness-common/cloudharness/utils/resources/values.yaml create mode 100644 libraries/cloudharness-common/cloudharness/utils/settings.py create mode 100644 libraries/cloudharness-common/cloudharness/workflows/__init__.py create mode 100644 libraries/cloudharness-common/cloudharness/workflows/argo.py create mode 100644 libraries/cloudharness-common/cloudharness/workflows/operations.py create mode 100644 libraries/cloudharness-common/cloudharness/workflows/tasks.py create mode 100644 libraries/cloudharness-common/cloudharness/workflows/utils.py create mode 100644 libraries/cloudharness-common/requirements.txt create mode 100644 libraries/cloudharness-common/setup.py create mode 100644 libraries/cloudharness-common/test-requirements.txt create mode 100644 libraries/cloudharness-common/tests/__init__.py create mode 100644 libraries/cloudharness-common/tests/test_env.py create mode 100644 libraries/cloudharness-common/tests/test_integration.py create mode 100644 libraries/cloudharness-common/tests/test_workflow.py create mode 100644 libraries/cloudharness-common/tox.ini create mode 100644 libraries/package-lock.json create mode 100644 requirements.txt create mode 100644 utilities/.gitignore create mode 100644 utilities/Dockerfile create mode 100644 utilities/MANIFEST.in create mode 100644 utilities/README.md create mode 100644 utilities/cloudharness_utilities.egg-info/PKG-INFO create mode 100644 utilities/cloudharness_utilities.egg-info/SOURCES.txt create mode 100644 utilities/cloudharness_utilities.egg-info/dependency_links.txt create mode 100644 utilities/cloudharness_utilities.egg-info/requires.txt create mode 100644 utilities/cloudharness_utilities.egg-info/top_level.txt create mode 100644 utilities/cloudharness_utilities/__init__.py create mode 100644 utilities/cloudharness_utilities/application-template/api/config.json create mode 100644 utilities/cloudharness_utilities/application-template/api/openapi.yaml create mode 100644 utilities/cloudharness_utilities/application-template/deploy/values.yaml create mode 100644 utilities/cloudharness_utilities/build.py create mode 100644 utilities/cloudharness_utilities/codefresh.py create mode 100644 utilities/cloudharness_utilities/constants.py create mode 100644 utilities/cloudharness_utilities/deployment-configuration/README.md create mode 100644 utilities/cloudharness_utilities/deployment-configuration/codefresh-build-template.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/codefresh-template.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/.helmignore create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/Chart.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/README.md create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/NOTES.txt create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/_helpers.tpl create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-deployments.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-gatekeepers.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-services.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/certs/letsencrypt.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/ingress.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/templates/secrets.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/helm/values.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/value-template.yaml create mode 100644 utilities/cloudharness_utilities/deployment-configuration/values-template.yaml create mode 100644 utilities/cloudharness_utilities/helm.py create mode 100644 utilities/cloudharness_utilities/openapi.py create mode 100644 utilities/cloudharness_utilities/utils.py create mode 100644 utilities/harness-application create mode 100644 utilities/harness-codefresh create mode 100644 utilities/harness-deployment create mode 100644 utilities/harness-generate create mode 100644 utilities/requirements.txt create mode 100644 utilities/setup.py create mode 100644 utilities/tests/resources/conf-source1/a.yaml create mode 100644 utilities/tests/resources/conf-source1/b.yaml create mode 100644 utilities/tests/resources/conf-source1/sub/a.yaml create mode 100644 utilities/tests/resources/conf-source1/sub/b.yaml create mode 100644 utilities/tests/resources/conf-source1/t.txt create mode 100644 utilities/tests/resources/conf-source2/a.yaml create mode 100644 utilities/tests/resources/conf-source2/c.yaml create mode 100644 utilities/tests/resources/conf-source2/sub/a.yaml create mode 100644 utilities/tests/resources/conf-source2/sub/c.yaml create mode 100644 utilities/tests/resources/conf-source2/t.txt create mode 100644 utilities/tests/test_utils.py diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..600e365e --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +**/node_modules \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..76933e9d --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +*.jar +.idea +node_modules +.openapi-generator +*.pyc +.vscode +.coverage +*.DS_Store +deployment diff --git a/README.md b/README.md new file mode 100644 index 00000000..e4d31503 --- /dev/null +++ b/README.md @@ -0,0 +1,247 @@ +# CloudHarness +CloudHarness is a base infrastructure facilitator for micro-service based applications deployed on Kubernetes. + +What building your cluster application with CloudHarness gives to you: +* Common framework and utilities to develop and deploy micro-service application + * REST-API scaffolding building based on OpenApi (Python-Flask) + * Helm chart automatic generation + * Automatic build and push of images + * Continuous deployment script generation (Codefresh) +* Prebuilt support applications and shared library to: + * Log in and user management - based on Keycloak + * Submit batch and asynchronous workflows - based on Argo + * Orchestrate Micro-services - based on Kafka + +## Get started + +### Prerequisites + +Python 3.7+ must be installed. + +It is recommended to setup a virtual environment. +With conda: + ```bash + conda create --name ch python=3.7 + conda activate ch + ``` + +Install requirements: + +```bash +pip install -r requirements.txt +``` +### Generate deployment + +To generate a deployment, run either `harness-deployment` or `harness-codefresh` depending on the type of deployment you +like. See [below](#Deployment) to know more. + +### Create new REST application +To create a new REST application, run `harness-application` from the root. + +### Generate server and client code from openapi +To (re)generate the code for your applications, run `harness-generate` from the root. +The script will look for all openapi applications, and regenerate the Flask server code and documentation. +Note: the script will eventually override any manually modified file. To avoid that, define a file openapi-generator-ignore. + +## Extend CloudHarness +CloudHarness is born to be extended. In order to extend CloudHarness you just need to mirror the folder structure: +* **applications**: place here your custom applications, or override default ones +* **deployment-configuration**: override the helm chart default values and templates +* **infrastructure**: define base images to use in your application + +or simply copy the *blueprint* folder + +## Deployment + + + +### Manually deploy on a kube cluster +The Kubernetes client `kubectl` must be set up and working on the local machine, +for instance with a Google Cloud cluster or a local Minikube. + +1. Locally build the images with `harness-deployment -b -l +[--registry localhost:5000] [--tag 0.0.1]` +1. Create the namespace `kubectl create ns ch` +1. Create the namespace `kubectl create ns argo-workflows` +1. (Optional) Try the helm chart with `helm install helm --name ch --namespace ch --dry-run` +1. Install the helm chart with `helm install --name=ch deployment/helm --namespace ch` (`helm install ch helm --namespace ch` on helm 3) +1. Install Argo (see below) + +To upgrade an already existing chart, run +`helm upgrade ch deployment/helm --namespace ch --install --force --reset-values` + +### Continuous deployment with Codefresh +The codefresh pipeline setup is provided in `codefresh/codefresh.yaml`. +The pipeline will take care of building the images from the source code and deploy the helm chart. +Log in to codefresh and run the pipeline associated to the repository. +To setup a new pipeline, simply indicate the remote yaml path `deployment/codefresh/codefresh.yaml` + +In order to update the deployment, run +``` +harness-codefresh . +``` +More information about how to run the script below + +### Relevant files and directory structure +Deployment files are automatically generated with the script +`harness-deployment`. + +all the resources intended to install and deploy the platform on Kubernetes. + - `codefresh`: codefresh build related files (automatically generated) + - `deployment-configuration`: override deployment templates + +What this script does is to go through all the defined applications and use templates to define all the required +definitions and variables. + +General templates are defined inside `deployment-configuration`. + +Applications can override templates values by defining a file `values.yaml` in the same directory of the Docker file. + +#### Note: Docker registry +With the `--build` flag we are locally building the images. In order to make the deploy work, we need to specify a +registry that is visible from inside the cluster. The parameter `--registry` allows to specify a registry in which +images are pushed after the build. +Any public registry will work. The suggested way to go is to install a registry on localhost:5000 inside +the kube cluster and push on that registry, also forwarded to localhost. + +More info inside `./registry/README.md`. + +### Argo installation + +Argo is not yet part of the helm chart + +In order to install it in the cluster, run + +``` +kubectl create ns argo +kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo/v2.4.3/manifests/install.yaml +kubectl apply -f argo/argo-service-account.yaml -n argo-workflows +kubectl create rolebinding argo-workflows --clusterrole=admin --serviceaccount=argo-workflows:argo-workflows -n argo-workflows +``` + +See also https://argoproj.github.io/docs/argo/demo.html#2-install-the-controller-and-ui + + + +### Details about deployment generation + +The following deployment files are generated by `harness-deployment`: + +- Helm chart configuration for custom deployment: **./helm/values.yaml** +- Codefresh build and deploment definition: **./codefresh/codefresh.yaml** + +The script `harness-codefresh` generates a build script to be used by codefresh. + +The control on the content of those files can be achieved primarily by setting up a +custom `values.yaml` and deploy/templates in the application folder. +The files under +`deployment-configuration` can be also modified for general overrides. + +Things to notice: + +- Each image created during the build step will have to be deployed to a k8s cluster. +- A Helm chart was created under `deployment/helm` path to handle deployments. +- To populate the chart we use a `values.yaml` file. +- Depending on whether we want to deploy to minikube or GKE a slightly different file is required. +- `harness-deployment` handles the creation of both files at ones. + +How to: + +- Add a file named `values.yaml` to the application and put some values on it. +- Run `harness-deployment` +- Check `./deployment/codefresh/codefresh.yaml` and `./deployment/helm/ch/values.yaml` + +For example: + +```yaml +# ./applications/docs/values.yaml +harvest: false +enabled: true +port: 8080 +subdomain: docs +``` + +Will generate entries in the following files: + +1 + +```yaml +# ./deployment/helm/ch/values.yaml +docs: + enabled: true + harvest: false + image: + name: ch-docs + tag: 0.0.1 + name: docs + port: 8080 + subdomain: docs +``` + +2 + +```yaml +# ./deployment/codefresh/values.yaml +docs: + enabled: true + harvest: false + image: + name: ch-docs + tag: ${{CF_SHORT_REVISION}}-${{CF_BUILD_TIMESTAMP}} + name: ch-docs + port: 8080 + subdomain: docs +``` + +3 Ingress entry is generated if subdomain is specified: +```yaml + - host: "docs.cloudharness.metacell.us" + http: + paths: + - path: / + backend: + serviceName: "docs" + servicePort: 8080 +``` + +### Build + +The script `harness-deployment` allows to optionally build the + application's Docker images. Those Docker images are needed if we plan to deploy +outside Codefresh, for instance for local testing with Minikube. + +#### How to build + +Run `harness-deployment -b -l` (all images are built unless `-i` option is provided). + +For further information, run `harness-deployment --help` + + + +#### Build conventions + +The build script scans inside `./applications` for dockerfile definitions. It walks `application` folder recursively and creates a docker image for each dockerfile it finds. + +Name convention for the images is as follows: + +`./application/some-folder/anotherone/a-third-one/Dockerfile` -> `some-folder-anotherone-a-third-one` + +The `src` folder is removed from the final image name. + +## How to add a new CloudHarness custom application + +1. Add the application inside `applications/[APPLICATION_NAME]` with a Dockerfile in it +1. Define *deploy/values.yaml* inside the file in order to specify custom values for the application +1. (optional) define specific helm templates on *deploy/values.yaml* +1. Run `harness-deployment` +1. Define the helm templates for the application inside `deploy/templates`. In the helm template, it is recommended to use the automatically generated values `helm/ch/values.yaml` + +See more about the Helm chart installation in the specific [README](utilities/cloudharness-deploy/README.md). + + +## How to add an external application + +A CloudHarness application can specify a Kubernetes deployment also using externally defined public images. +Create a new CloudHarness application with the helm templates inside the *deploy* subdirectory + + diff --git a/applications/README.md b/applications/README.md new file mode 100644 index 00000000..77bcd284 --- /dev/null +++ b/applications/README.md @@ -0,0 +1,43 @@ +# CloudHarness Applications + +Here we put applications intended to run on the cluster. + +Each application is intended as a http micro service running on the cluster. +An application installation is uniquely defined by a Docker file. + +## Define a REST application with openapi +The preferred way to define an application is through the openapi specification. The code for the Python-flask service +and the Python client +are automatically generated with the script `utilities/openapi-generate.py` + +1. Add the application inside `applications/[APPLICATION_NAME]` +1. Add the openapi yaml specification inside `applications/[APPLICATION_NAME]/api/[APPLICATION_NAME].yaml` +1. Define openapi configuration `applications/[APPLICATION_NAME]/api/config.json`. The name of the package (say, +`PACKAGE_NAME`) can be configured here. By convention, the package name is `[APPLICATION_NAME]` +1. Run `python utilities/openapi-generate.py` to generate code stubs + +After generating the codeChange the Dockerfile in order to inherit from the main Docker file: + +```dockerfile +ARG REGISTRY +ARG TAG=latest +FROM ${REGISTRY}cloudharness-base:${TAG} +``` + +The only code that should be modified shall go inside `src/[PACKAGE_NAME]/controllers`. +After modifying the controllers, add the following line to `.openapi-generator-ignore`: + +``` +*/controllers/* +Dockerfile +``` + +## Define an application without openapi +1. Add the application inside `applications/[APPLICATION_NAME]` with a Docker file in it. The Docker file must inherit +from `r.cfcr.io/tarelli/cloudharness-base` in order to get access to cloudharness libraries. +1. Define values.yaml inside the file in order to specify custom values for the application + + +## Update deployment + +See the [deployment section](../deployment/README.md) \ No newline at end of file diff --git a/applications/accounts/Docker-compose.yaml b/applications/accounts/Docker-compose.yaml new file mode 100644 index 00000000..13e7ac0d --- /dev/null +++ b/applications/accounts/Docker-compose.yaml @@ -0,0 +1,40 @@ +version: '3.2' + +services: + postgres: + image: postgres + environment: + POSTGRES_DB: keycloak + POSTGRES_USER: keycloak + POSTGRES_PASSWORD: password + keycloak: + image: quay.io/keycloak/keycloak + environment: + DB_VENDOR: POSTGRES + DB_ADDR: postgres + DB_DATABASE: keycloak + DB_USER: keycloak + DB_SCHEMA: public + DB_PASSWORD: password + KEYCLOAK_USER: admin + KEYCLOAK_PASSWORD: Pa55w0rd + + ports: + - 8080:8080 + depends_on: + - postgres + volumes: + - type: bind + source: ./themes/custom + target: /opt/jboss/keycloak/themes/custom + # disable cache + - type: bind + source: ./standalone.xml + target: /opt/jboss/keycloak/standalone/configuration/standalone.xml + + # - type: bind + # source: ./keycloak + # target: /opt/jboss/keycloak/themes/keycloak + # - type: bind + # source: ./base + # target: /opt/jboss/keycloak/themes/base diff --git a/applications/accounts/Dockerfile b/applications/accounts/Dockerfile new file mode 100644 index 00000000..ab9ddca6 --- /dev/null +++ b/applications/accounts/Dockerfile @@ -0,0 +1,3 @@ +FROM quay.io/keycloak/keycloak:9.0.2 +# Customize keycloak look +COPY themes/custom /opt/jboss/keycloak/themes/custom \ No newline at end of file diff --git a/applications/accounts/deploy/resources/realm.json b/applications/accounts/deploy/resources/realm.json new file mode 100644 index 00000000..75532b5f --- /dev/null +++ b/applications/accounts/deploy/resources/realm.json @@ -0,0 +1,627 @@ +{ + "id": {{ .Values.namespace | quote }}, + "realm": {{ .Values.namespace | quote }}, + "enabled": true, + "sslRequired": "external", + "loginTheme": "custom", + "accountTheme": "custom", + "adminTheme": "custom", + "emailTheme": "custom", + "registrationAllowed": true, + "registrationEmailAsUsername": true, + "rememberMe": true, + "verifyEmail": true, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": true, + "editUsernameAllowed": true, + "roles": { + "realm": [ + { + "id": "70835ad6-1454-4bc5-86a4-f1597e776b75", + "name": {{ .Values.apps.accounts.admin.role | quote }}, + "composite": false, + "clientRole": false, + "containerId": {{ .Values.namespace | quote }}, + "attributes": {} + }, + { + "id": "498353dd-88eb-4a5e-99b8-d912e0f20f23", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": {{ .Values.namespace | quote }}, + "attributes": {} + }, + { + "id": "f99970f1-958b-4bb8-8b39-0d7498b0ecc4", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": {{ .Values.namespace | quote }}, + "attributes": {} + } + ] + }, + "clients": [ + { + "id": "9a6a2560-c6be-4493-8bd5-3fdc4522d82b", + "clientId": {{ .Values.apps.accounts.client.id | quote }}, + "baseUrl": {{ printf "https://%s" .Values.domain | quote }}, + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": {{ .Values.apps.accounts.client.secret | quote }}, + "redirectUris": [ + "*" + ], + "webOrigins": [ + "*" + ], + "standardFlowEnabled": true, + "directAccessGrantsEnabled": true, + "protocol": "openid-connect", + "attributes": { + "access.token.lifespan": "3600" + }, + "fullScopeAllowed": true, + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "111caf43-3d26-484d-8dc9-7fa911ac221c", + "clientId": {{ .Values.apps.accounts.webclient.id | quote }}, + "baseUrl": {{ printf "https://%s.%s" .Values.apps.events.subdomain .Values.domain | quote }}, + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": {{ .Values.apps.accounts.webclient.secret | quote }}, + "redirectUris": [ + "*" + ], + "webOrigins": [ + "*" + ], + "standardFlowEnabled": true, + "directAccessGrantsEnabled": true, + "protocol": "openid-connect", + "fullScopeAllowed": true, + "defaultClientScopes": [ + "web-origins", + "role_list", + "{{ .Values.apps.accounts.admin.role }}-scope", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + } + ], + "clientScopes": [ + { + "id": "a8cddc84-c506-4196-8f2d-1bd5e8769f3c", + "name": "{{ .Values.apps.accounts.admin.role }}-scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "45fc2547-1761-420b-b6a8-7dc882a51507", + "name": "{{ .Values.apps.accounts.admin.role }}-audience", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-mapper", + "consentRequired": false, + "config": { + "included.client.audience": {{ .Values.apps.accounts.webclient.id | quote }}, + "id.token.claim": "true", + "access.token.claim": "true" + } + } + ] + }, + { + "id": "35c37cdc-6841-41e7-b90f-2964fc563998", + "name": "microprofile-jwt", + "description": "Microprofile - JWT built-in scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "611fb1bc-56cd-49d2-a11b-ddf05bd220db", + "name": "upn", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "upn", + "jsonType.label": "String" + } + }, + { + "id": "63850e7d-1031-447a-a8af-3df588a39350", + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "user.attribute": "foo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "dc927013-0448-4a29-ac72-7d6b019180d9", + "name": "web-origins", + "description": "OpenID Connect scope for add allowed web origins to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false", + "consent.screen.text": "" + }, + "protocolMappers": [ + { + "id": "3cc4569c-83b0-4bc9-af31-186c8081f8ac", + "name": "allowed web origins", + "protocol": "openid-connect", + "protocolMapper": "oidc-allowed-origins-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "id": "4bd583e6-9f6d-4846-9a94-2f02b1b4b1db", + "name": "roles", + "description": "OpenID Connect scope for add user roles to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "true", + "consent.screen.text": "${rolesScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "0d359e4f-3d4d-4ef3-88fd-2dd9f41da8cd", + "name": "client roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "resource_access.${client_id}.roles", + "jsonType.label": "String" + } + }, + { + "id": "98ea5505-f703-49d2-b927-7715a7fc7a19", + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "realm_access.roles", + "jsonType.label": "String" + } + }, + { + "id": "28b26ce3-7edc-47c2-982f-881f1c001ef3", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "id": "e2606962-dd91-4926-af4e-cce6a036a04a", + "name": "phone", + "description": "OpenID Connect built-in scope: phone", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${phoneScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "c7e30f92-6026-4291-b526-3716662c26f1", + "name": "phone number verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumberVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number_verified", + "jsonType.label": "boolean" + } + }, + { + "id": "b1927570-c38d-49b8-9bbb-3cf9571f00be", + "name": "phone number", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumber", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "6f532104-efc0-41d9-8fbc-9c78372d3f1b", + "name": "address", + "description": "OpenID Connect built-in scope: address", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${addressScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "548dd8e4-1ee8-4f7d-8934-439bdd1cc0bb", + "name": "address", + "protocol": "openid-connect", + "protocolMapper": "oidc-address-mapper", + "consentRequired": false, + "config": { + "user.attribute.formatted": "formatted", + "user.attribute.country": "country", + "user.attribute.postal_code": "postal_code", + "userinfo.token.claim": "true", + "user.attribute.street": "street", + "id.token.claim": "true", + "user.attribute.region": "region", + "access.token.claim": "true", + "user.attribute.locality": "locality" + } + } + ] + }, + { + "id": "b16d9232-a4e2-47d4-a368-5279a0d84913", + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${emailScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "4bd6701a-cc02-481e-83c5-e048ea5d83a9", + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + }, + { + "id": "4cf00282-d385-456a-8943-4bdde6357c16", + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "1e9fa514-8ae1-4980-9ccc-2d2d2c43c7e6", + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${profileScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "2e186cd7-b7d5-4b63-b765-c77036183db6", + "name": "updated at", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "updatedAt", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "updated_at", + "jsonType.label": "String" + } + }, + { + "id": "86e94688-d91b-493b-809a-07005c7e6cab", + "name": "picture", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "picture", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "picture", + "jsonType.label": "String" + } + }, + { + "id": "8e65f9c7-a3c0-4bf6-9c4e-47be99464408", + "name": "zoneinfo", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "zoneinfo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "zoneinfo", + "jsonType.label": "String" + } + }, + { + "id": "9eeaaeb3-93fc-439f-a8db-d6f3693a8ba1", + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "id": "34e60d98-fcde-49a2-b093-748464886a0d", + "name": "middle name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "middleName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "middle_name", + "jsonType.label": "String" + } + }, + { + "id": "08fa0341-5dd3-42e2-babb-1151c35b72c3", + "name": "nickname", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "nickname", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "nickname", + "jsonType.label": "String" + } + }, + { + "id": "9d9f1655-9b23-4e15-b244-aeffcb20c5ba", + "name": "gender", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "gender", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "gender", + "jsonType.label": "String" + } + }, + { + "id": "23b19dbb-5af2-494e-b462-e8f63d9266f4", + "name": "birthdate", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "birthdate", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "birthdate", + "jsonType.label": "String" + } + }, + { + "id": "b4644d65-ffbb-4e0b-8aac-238665af40dc", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + }, + { + "id": "6366756e-bf69-4844-b127-60fa514ad768", + "name": "website", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "website", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "website", + "jsonType.label": "String" + } + }, + { + "id": "3d763f84-d417-4b4e-99e4-2b0e05bf861a", + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + }, + { + "id": "d05efa25-5348-4a14-9550-69791df4ec5e", + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-full-name-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + }, + { + "id": "0d66e664-6b0c-45de-ba88-b2b86b23cacc", + "name": "profile", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "profile", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "profile", + "jsonType.label": "String" + } + }, + { + "id": "17d3b93d-993b-4768-892c-0b20f8462be3", + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "a46716b3-8da1-4657-b703-13a5cd472c92", + "name": "role_list", + "description": "SAML role list", + "protocol": "saml", + "attributes": { + "consent.screen.text": "${samlRoleListScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "0ab50259-1e8b-40bd-9686-fb9a54dfc37d", + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": false, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role" + } + } + ] + }, + { + "id": "0f9bd78c-129e-4f87-9cf7-8b68b628ea1b", + "name": "offline_access", + "description": "OpenID Connect built-in scope: offline_access", + "protocol": "openid-connect", + "attributes": { + "consent.screen.text": "${offlineAccessScopeConsentText}", + "display.on.consent.screen": "true" + } + } + ], + "keycloakVersion": "6.0.1" + } \ No newline at end of file diff --git a/applications/accounts/deploy/templates/configmap.yaml b/applications/accounts/deploy/templates/configmap.yaml new file mode 100644 index 00000000..de79112e --- /dev/null +++ b/applications/accounts/deploy/templates/configmap.yaml @@ -0,0 +1,11 @@ +{{- /* Be careful with json typos here, mainly trailing ',' */}} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ .Values.apps.accounts.name | quote }} + labels: + app: {{ .Values.apps.accounts.name | quote }} +{{ include "deploy_utils.labels" $ | indent 4 }} +data: + realm.json: {{ tpl (.Files.Get "resources/accounts/realm.json") . | quote }} + diff --git a/applications/accounts/deploy/templates/keycloak-postgres.yaml b/applications/accounts/deploy/templates/keycloak-postgres.yaml new file mode 100644 index 00000000..0995b5c6 --- /dev/null +++ b/applications/accounts/deploy/templates/keycloak-postgres.yaml @@ -0,0 +1,77 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: accounts-db + labels: + app: {{ .Values.apps.accounts.db.name }} +{{ include "deploy_utils.labels" $ | indent 4 }} +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 2Gi +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.apps.accounts.db.name }} + labels: + app: {{ .Values.apps.accounts.db.name }} +{{ include "deploy_utils.labels" $ | indent 4 }} +spec: + type: ClusterIP + ports: + - port: 5432 + selector: + app: {{ .Values.apps.accounts.db.name }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Values.apps.accounts.db.name }} + labels: + app: {{ .Values.apps.accounts.db.name }} +{{ include "deploy_utils.labels" $ | indent 4 }} +spec: + replicas: 1 + selector: + matchLabels: + app: {{ .Values.apps.accounts.db.name | quote }} +{{ include "deploy_utils.labels" $ | indent 6 }} + template: + metadata: + labels: + app: {{ .Values.apps.accounts.db.name }} +{{ include "deploy_utils.labels" $ | indent 8 }} + spec: + containers: + - name: {{ .Values.apps.accounts.db.name | default "keycloak-postgress" | quote }} + image: {{ .Values.apps.accounts.db.image }} + imagePullPolicy: "IfNotPresent" + env: + - name: POSTGRES_DB + value: {{ .Values.apps.accounts.db.initialdb | quote }} + - name: POSTGRES_USER + value: {{ .Values.apps.accounts.db.user | quote }} + - name: POSTGRES_PASSWORD + value: {{ .Values.apps.accounts.db.pass | quote }} + - name: PGDATA + value: /var/lib/postgresql/data/pgdata + ports: + - containerPort: 5432 + resources: + requests: + memory: "64Mi" + cpu: "100m" + limits: + memory: "128Mi" + cpu: "200m" + volumeMounts: + - name: accounts-db + mountPath: /var/lib/postgresql/data + volumes: + - name: accounts-db + persistentVolumeClaim: + claimName: accounts-db +--- \ No newline at end of file diff --git a/applications/accounts/deploy/templates/keycloak.yaml b/applications/accounts/deploy/templates/keycloak.yaml new file mode 100644 index 00000000..29fa128e --- /dev/null +++ b/applications/accounts/deploy/templates/keycloak.yaml @@ -0,0 +1,78 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Values.apps.accounts.name | quote }} + labels: + app: {{ .Values.apps.accounts.name | quote }} +{{ include "deploy_utils.labels" $ | indent 4 }} +spec: + replicas: 1 + selector: + matchLabels: + app: {{ .Values.apps.accounts.name | quote }} +{{ include "deploy_utils.labels" $ | indent 6 }} + template: + metadata: + {{- if .Values.apps.accounts.harvest }} + annotations: + co.elastic.logs/enabled: "true" + metricbeat: "true" + {{- end }} + labels: + app: {{ .Values.apps.accounts.name | quote }} +{{ include "deploy_utils.labels" $ | indent 8 }} + spec: + {{ if .Values.codefresh }} + imagePullSecrets: + - name: {{ .Values.codefresh.secret }} + {{- end }} + containers: + - name: {{ .Values.apps.accounts.name | default "keycloak" | quote }} + image: {{ .Values.apps.accounts.image }} + imagePullPolicy: {{ include "deploy_utils.pullpolicy" . }} + env: + - name: KEYCLOAK_IMPORT + value: "/tmp/realm.json" + - name: KEYCLOAK_USER + value: {{ .Values.apps.accounts.admin.user | quote }} + - name: KEYCLOAK_PASSWORD + value: {{ .Values.apps.accounts.admin.pass | quote }} + - name: PROXY_ADDRESS_FORWARDING + value: "true" + - name: DB_VENDOR + value: POSTGRES + - name: DB_ADDR + value: {{ .Values.apps.accounts.db.name | quote }} + - name: DB_DATABASE + value: {{ .Values.apps.accounts.db.initialdb | quote }} + - name: DB_USER + value: {{ .Values.apps.accounts.db.user | quote }} + - name: DB_PASSWORD + value: {{ .Values.apps.accounts.db.pass | quote }} + {{- include "deploy_utils.env" . | nindent 8 }} + {{- include "deploy_utils.privenv" . | nindent 8 }} + volumeMounts: + - name: realm-config + mountPath: "/tmp/realm.json" + subPath: realm.json + ports: + - name: http + containerPort: {{ .Values.apps.accounts.port | default 8080 }} + - name: https + containerPort: 8443 + readinessProbe: + httpGet: + path: /auth/realms/master + port: {{ .Values.apps.accounts.port }} + resources: + requests: + memory: "256Mi" + cpu: "200m" + limits: + memory: "1024Mi" + cpu: "500m" + volumes: + - name: realm-config + configMap: + name: {{ .Values.apps.accounts.name | quote }} +--- \ No newline at end of file diff --git a/applications/accounts/deploy/values.yaml b/applications/accounts/deploy/values.yaml new file mode 100644 index 00000000..e4622bb3 --- /dev/null +++ b/applications/accounts/deploy/values.yaml @@ -0,0 +1,24 @@ +admin: + pass: metacell + user: admin + role: administrator +client: + id: rest-client + secret: 5678eb6e-9e2c-4ee5-bd54-34e7411339e8 +db: + image: postgres:10.4 + initialdb: auth_db + name: keycloak-postgress + pass: password + user: user +enabled: true +harvest: true +webclient: + id: web-client + secret: 452952ae-922c-4766-b912-7b106271e34b +name: accounts +port: 8080 +subdomain: accounts +# only use in minikube (with letsencrypt, we use default image) +gatekeeper: + image: accounts-keycloak-gatekeeper \ No newline at end of file diff --git a/applications/accounts/keycloak-gatekeeper/Dockerfile b/applications/accounts/keycloak-gatekeeper/Dockerfile new file mode 100644 index 00000000..50168e3a --- /dev/null +++ b/applications/accounts/keycloak-gatekeeper/Dockerfile @@ -0,0 +1,37 @@ +FROM alpine:3.8 + +ENV NAME keycloak-gatekeeper +ENV KEYCLOAK_VERSION 6.0.1 +ENV GOOS linux +ENV GOARCH amd64 +ENV DOMAIN cloudharness.local + +LABEL Name=keycloak-gatekeeper \ + Release=https://github.com/keycloak/keycloak-gatekeeper \ + Url=https://github.com/keycloak/keycloak-gatekeeper \ + Help=https://issues.jboss.org/projects/KEYCLOAK + +RUN apk add --no-cache curl tar bash +RUN apk add --update openssl && \ + rm -rf /var/cache/apk/* + +RUN openssl genrsa -des3 -passout pass:x -out server.pass.key 2048 && \ + openssl rsa -passin pass:x -in server.pass.key -out server.key && \ + rm server.pass.key && \ + openssl req -new -key server.key -out server.csr \ + -subj "/C=UK/ST=Oxford/L=Leamington/O=OrgName/OU=IT Department/CN=*.${DOMAIN}" && \ + openssl x509 -req -days 365 -in server.csr -signkey server.key -out /usr/local/share/ca-certificates/cacert.crt +RUN cat /usr/local/share/ca-certificates/cacert.crt +WORKDIR /opt +RUN echo "https://downloads.jboss.org/keycloak/$KEYCLOAK_VERSION/gatekeeper/$NAME-$GOOS-$GOARCH.tar.gz" +RUN curl -fssL "https://downloads.jboss.org/keycloak/$KEYCLOAK_VERSION/gatekeeper/$NAME-$GOOS-$GOARCH.tar.gz" | tar -xz && chmod +x /opt/$NAME + + +# Update the CA list for ubuntu +RUN update-ca-certificates --verbose +# include your CA in httplib2 (required for hand-shake between UI servers and keycloak) +# RUN cat /usr/local/share/ca-certificates/extra/cacert.crt >> /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +#### + +ENTRYPOINT [ "/opt/keycloak-gatekeeper" ] \ No newline at end of file diff --git a/applications/accounts/standalone.xml b/applications/accounts/standalone.xml new file mode 100644 index 00000000..b262e4a6 --- /dev/null +++ b/applications/accounts/standalone.xml @@ -0,0 +1,614 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE + h2 + + sa + sa + + + + jdbc:postgresql://${env.DB_ADDR:postgres}/${env.DB_DATABASE:keycloak}${env.JDBC_PARAMS:} + postgresql + + IdleConnections + + + ${env.DB_USER:keycloak} + ${env.DB_PASSWORD:password} + + + SELECT 1 + true + 60000 + + + + + org.h2.jdbcx.JdbcDataSource + + + org.postgresql.xa.PGXADataSource + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + auth + + + classpath:${jboss.home.dir}/providers/* + + + master + 900 + + -1 + false + false + ${env.KEYCLOAK_WELCOME_THEME:keycloak} + ${env.KEYCLOAK_DEFAULT_THEME:keycloak} + ${jboss.home.dir}/themes + + + + + + + + + + + + + jpa + + + basic + + + + + + + + + + + + + + + + + + + + default + + + + + + + + ${keycloak.jta.lookup.provider:jboss} + + + + + + + + + + + ${keycloak.x509cert.lookup.provider:default} + + + + ${keycloak.hostname.provider:default} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/applications/argo/deploy/templates/argo-sa.yaml b/applications/argo/deploy/templates/argo-sa.yaml new file mode 100644 index 00000000..91bfe691 --- /dev/null +++ b/applications/argo/deploy/templates/argo-sa.yaml @@ -0,0 +1,53 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ .Values.apps.argo.serviceaccount }} + namespace: {{ .Release.Namespace }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: {{ .Values.apps.argo.serviceaccount }}-access-1 + namespace: {{ .Release.Namespace }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: cluster-admin +subjects: +- kind: ServiceAccount + name: {{ .Values.apps.argo.serviceaccount }} + namespace: {{ .Release.Namespace }} +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ .Values.apps.argo.serviceaccount }} + namespace: {{ .Values.apps.argo.workflows_namespace }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: {{ .Values.apps.argo.serviceaccount }}-access-2 + namespace: {{ .Values.apps.argo.workflows_namespace }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: cluster-admin +subjects: +- kind: ServiceAccount + name: {{ .Values.apps.argo.serviceaccount }} + namespace: {{ .Values.apps.argo.workflows_namespace }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: {{ .Values.apps.argo.serviceaccount }}-access-3 + namespace: {{ .Values.apps.argo.workflows_namespace }} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: cluster-admin +subjects: +- kind: ServiceAccount + name: cloudharness + namespace: default \ No newline at end of file diff --git a/applications/argo/deploy/values.yaml b/applications/argo/deploy/values.yaml new file mode 100644 index 00000000..b6987160 --- /dev/null +++ b/applications/argo/deploy/values.yaml @@ -0,0 +1,9 @@ +enabled: true +name: argo-server +subdomain: argo +port: 2746 +serviceaccount: argo-workflows +namespace: argo +workflows_namespace: argo-workflows +secureme: true +autoservice: false \ No newline at end of file diff --git a/applications/events/deploy/resources/broker/init.sh b/applications/events/deploy/resources/broker/init.sh new file mode 100644 index 00000000..99581a71 --- /dev/null +++ b/applications/events/deploy/resources/broker/init.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -e +set -x +cp /etc/kafka-configmap/log4j.properties /etc/kafka/ + +KAFKA_BROKER_ID=${HOSTNAME##*-} +SEDS=("s/#init#broker.id=#init#/broker.id=$KAFKA_BROKER_ID/") +LABELS="kafka-broker-id=$KAFKA_BROKER_ID" +ANNOTATIONS="" + +hash kubectl 2>/dev/null || { + SEDS+=("s/#init#broker.rack=#init#/#init#broker.rack=# kubectl not found in path/") +} && { + ZONE=$(kubectl get node "$NODE_NAME" -o=go-template='{{index .metadata.labels "failure-domain.beta.kubernetes.io/zone"}}') + if [ "x$ZONE" == "x" ]; then + SEDS+=("s/#init#broker.rack=#init#/#init#broker.rack=# zone label not found for node $NODE_NAME/") + else + SEDS+=("s/#init#broker.rack=#init#/broker.rack=$ZONE/") + LABELS="$LABELS kafka-broker-rack=$ZONE" + fi + + OUTSIDE_HOST=$(kubectl get node "$NODE_NAME" -o jsonpath='{.status.addresses[?(@.type=="InternalIP")].address}') + OUTSIDE_PORT=$((32400 + ${KAFKA_BROKER_ID})) + SEDS+=("s|#init#advertised.listeners=PLAINTEXT://#init#|advertised.listeners=PLAINTEXT://:9092,OUTSIDE://${OUTSIDE_HOST}:${OUTSIDE_PORT}|") + ANNOTATIONS="$ANNOTATIONS kafka-listener-outside-host=$OUTSIDE_HOST kafka-listener-outside-port=$OUTSIDE_PORT" + + if [ ! -z "$LABELS" ]; then + kubectl -n $POD_NAMESPACE label pod $POD_NAME $LABELS || echo "Failed to label $POD_NAMESPACE.$POD_NAME - RBAC issue?" + fi + if [ ! -z "$ANNOTATIONS" ]; then + kubectl -n $POD_NAMESPACE annotate pod $POD_NAME $ANNOTATIONS || echo "Failed to annotate $POD_NAMESPACE.$POD_NAME - RBAC issue?" + fi +} +printf '%s\n' "${SEDS[@]}" | sed -f - /etc/kafka-configmap/server.properties > /etc/kafka/server.properties.tmp +[ $? -eq 0 ] && mv /etc/kafka/server.properties.tmp /etc/kafka/server.properties + diff --git a/applications/events/deploy/resources/broker/log4j.properties b/applications/events/deploy/resources/broker/log4j.properties new file mode 100644 index 00000000..085a7e41 --- /dev/null +++ b/applications/events/deploy/resources/broker/log4j.properties @@ -0,0 +1,76 @@ +# Unspecified loggers and loggers with additivity=true output to server.log and stdout +# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log +log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log +log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log +log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log +log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log +log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Change the two lines below to adjust ZK client logging +log4j.logger.org.I0Itec.zkclient.ZkClient=INFO +log4j.logger.org.apache.zookeeper=INFO + +# Change the two lines below to adjust the general broker logging level (output to server.log and stdout) +log4j.logger.kafka=INFO +log4j.logger.org.apache.kafka=INFO + +# Change to DEBUG or TRACE to enable request logging +log4j.logger.kafka.request.logger=WARN, requestAppender +log4j.additivity.kafka.request.logger=false + +# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output +# related to the handling of requests +#log4j.logger.kafka.network.Processor=TRACE, requestAppender +#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender +#log4j.additivity.kafka.server.KafkaApis=false +log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender +log4j.additivity.kafka.network.RequestChannel$=false + +log4j.logger.kafka.controller=TRACE, controllerAppender +log4j.additivity.kafka.controller=false + +log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender +log4j.additivity.kafka.log.LogCleaner=false + +log4j.logger.state.change.logger=TRACE, stateChangeAppender +log4j.additivity.state.change.logger=false + +# Change to DEBUG to enable audit log for the authorizer +log4j.logger.kafka.authorizer.logger=WARN, authorizerAppender +log4j.additivity.kafka.authorizer.logger=false \ No newline at end of file diff --git a/applications/events/deploy/resources/broker/server.properties b/applications/events/deploy/resources/broker/server.properties new file mode 100644 index 00000000..2eb996f6 --- /dev/null +++ b/applications/events/deploy/resources/broker/server.properties @@ -0,0 +1,134 @@ +############################# Log Basics ############################## + +# A comma seperated list of directories under which to store log files +# Overrideslog.dir +log.dirs=/var/lib/kafka/data/topics + +# The default number of log partitions per topic. More partitions allow greater +# parallelism for consumption, but this will also result in more files across +# the brokers. +num.partitions=12 + +default.replication.factor=3 + +min.insync.replicas=2 + +auto.create.topics.enable=false + +# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. +# This value is recommended to be increased for installations with data dirs located in RAID array. +# num.recovery.threads.per.data.dir=1 + +############################# Server Basics ############################# + +# The id of the broker. This must be set to a unique integer for each broker. +#init#broker.id=#init# + +#init#broker.rack=#init# + +############################# Socket Server Settings ############################# + +# The address the socket server listens on. It will get the value returned from +# java.net.InetAddress.getCanonicalHostName() if not configured. +# FORMAT: +# listeners = listener_name://host_name:port +# EXAMPLE: +# listeners = PLAINTEXT://your.host.name:9092 +#listeners=PLAINTEXT://:9092 +listeners=PLAINTEXT://:9092,OUTSIDE://:9094 + +# Hostname and port the broker will advertise to producers and consumers. If not set, +# it uses the value for "listeners" if configured. Otherwise, it will use the value +# returned from java.net.InetAddress.getCanonicalHostName(). +#advertised.listeners=PLAINTEXT://your.host.name:9092 +#init#advertised.listeners=PLAINTEXT://#init# + +# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details +#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL +listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL,OUTSIDE:PLAINTEXT +inter.broker.listener.name=PLAINTEXT + +# The number of threads that the server uses for receiving requests from the network and sending responses to the network +#num.network.threads=3 + +# The number of threads that the server uses for processing requests, which may include disk I/O +#num.io.threads=8 + +#The send buffer (SO_SNDBUF) used by the socket server +#socket.send.buffer.bytes=102400 + +#The receive buffer (SO_RCVBUF) used by the socket server +#socket.receive.buffer.bytes=102400 + +#The maximum size of a request that the socket server will accept (protection againstOOM) +#socket.request.max.bytes=104857600 + +############################# Internal Topic Settings ############################# +# The replication factor for the group metadata internal topics \"__consumer_offsets\" and \"__transaction_state\" +#For anything other than development testing, a value greater than 1 is recommended for to ensure availability such as 3. +#offsets.topic.replication.factor=1 +#transaction.state.log.replication.factor=1 +#transaction.state.log.min.isr=1 + +############################# Log Flush Policy ############################# + +# Messages are immediately written to the filesystem but by default we only fsync() to sync +# the OS cache lazily. The following configurations control the flush of data to disk. +# There are a few important trade-offs here: +#1. Durability: Unflushed data may be lost if you are not using replication. +#2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. +#3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. +# The settings below allow one to configure the flush policy to flush data after a period of time or +# every N messages (or both). This can be done globally and overridden on a per-topic basis. + +# The number of messages to accept before forcing a flush of data to disk +#log.flush.interval.messages=10000 + +# The maximum amount of time a message can sit in a log before we force a flush +#log.flush.interval.ms=1000 + +############################# Log Retention Policy ############################# + +# The following configurations control the disposal of log segments. The policy can +# be set to delete segments after a period of time, or after a given size has accumulated. +# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens +# from the end of the log. + +# https://cwiki.apache.org/confluence/display/KAFKA/KIP-186%3A+Increase+offsets+retention+default+to+7+days +offsets.retention.minutes=10080 + +# The minimum age of a log file to be eligible for deletion due to age +log.retention.hours=-1 + +# A size-based retention policy for logs. Segments are pruned from the log unless the remaining +# segments drop below log.retention.bytes. Functions independently of log.retention.hours. +#log.retention.bytes=1073741824 + +# The maximum size of a log segment file. When this size is reached a new log segment will be created. +#log.segment.bytes=1073741824 + +# The interval at which log segments are checked to see if they can be deleted according +# to the retention policies +#log.retention.check.interval.ms=300000 + +############################# Zookeeper ############################# + +# Zookeeper connection string (see zookeeper docs for details). +# This is a comma separated host:port pairs, each corresponding to a zk +# server. e.g. \"127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002\". +# You can also append an optional chroot string to the urls to specify the +# root directory for all kafka znodes. +zookeeper.connect=zookeeper:2181 + +# Timeout in ms for connecting to zookeeper +#zookeeper.connection.timeout.ms=6000 + + +############################# Group Coordinator Settings ############################# + +# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. +# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. +# The default value for this is 3 seconds. +# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. +# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. +#group.initial.rebalance.delay.ms=0" diff --git a/applications/events/deploy/resources/zookeeper/init.sh b/applications/events/deploy/resources/zookeeper/init.sh new file mode 100644 index 00000000..eb9f567c --- /dev/null +++ b/applications/events/deploy/resources/zookeeper/init.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -e +set -x + +[ -d /var/lib/zookeeper/data ] || mkdir /var/lib/zookeeper/data +[ -z "$ID_OFFSET" ] && ID_OFFSET=1 +export ZOOKEEPER_SERVER_ID=$((${HOSTNAME##*-} + $ID_OFFSET)) +echo "${ZOOKEEPER_SERVER_ID:-1}" | tee /var/lib/zookeeper/data/myid +cp -Lur /etc/kafka-configmap/* /etc/kafka/ +[ ! -z "$PZOO_REPLICAS" ] && [ ! -z "$ZOO_REPLICAS" ] && { + sed -i "s/^server\\./#server./" /etc/kafka/zookeeper.properties + for N in $(seq $PZOO_REPLICAS); do echo "server.$N=pzoo-$(( $N - 1 )).pzoo:2888:3888:participant" >> /etc/kafka/zookeeper.properties; done + for N in $(seq $ZOO_REPLICAS); do echo "server.$(( $PZOO_REPLICAS + $N ))=zoo-$(( $N - 1 )).zoo:2888:3888:participant" >> /etc/kafka/zookeeper.properties; done +} +sed -i "s/server\.$ZOOKEEPER_SERVER_ID\=[a-z0-9.-]*/server.$ZOOKEEPER_SERVER_ID=0.0.0.0/" /etc/kafka/zookeeper.properties diff --git a/applications/events/deploy/resources/zookeeper/log4j.properties b/applications/events/deploy/resources/zookeeper/log4j.properties new file mode 100644 index 00000000..9f71fa75 --- /dev/null +++ b/applications/events/deploy/resources/zookeeper/log4j.properties @@ -0,0 +1,8 @@ +log4j.rootLogger=INFO, stdout +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Suppress connection log messages, three lines per livenessProbe execution +log4j.logger.org.apache.zookeeper.server.NIOServerCnxnFactory=WARN +log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN \ No newline at end of file diff --git a/applications/events/deploy/resources/zookeeper/zookeeper.properties b/applications/events/deploy/resources/zookeeper/zookeeper.properties new file mode 100644 index 00000000..ee890a08 --- /dev/null +++ b/applications/events/deploy/resources/zookeeper/zookeeper.properties @@ -0,0 +1,12 @@ +tickTime=2000 +dataDir=/var/lib/zookeeper/data +dataLogDir=/var/lib/zookeeper/log +clientPort=2181 +maxClientCnxns=100 +initLimit=5 +syncLimit=2 +server.1=pzoo-0.pzoo:2888:3888:participant +server.2=pzoo-1.pzoo:2888:3888:participant +server.3=pzoo-2.pzoo:2888:3888:participant +server.4=zoo-0.zoo:2888:3888:participant +server.5=zoo-1.zoo:2888:3888:participant \ No newline at end of file diff --git a/applications/events/deploy/templates/broker-config.yml b/applications/events/deploy/templates/broker-config.yml new file mode 100644 index 00000000..e49bbda2 --- /dev/null +++ b/applications/events/deploy/templates/broker-config.yml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: broker-config +data: +{{ (.Files.Glob "resources/events/broker/*").AsConfig | indent 2 }} + + + \ No newline at end of file diff --git a/applications/events/deploy/templates/deployments.yml b/applications/events/deploy/templates/deployments.yml new file mode 100644 index 00000000..ab91e7b6 --- /dev/null +++ b/applications/events/deploy/templates/deployments.yml @@ -0,0 +1,342 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: kafka +spec: + podManagementPolicy: Parallel + replicas: 1 + selector: + matchLabels: + app: kafka + serviceName: broker + template: + metadata: + annotations: null + labels: + app: kafka + spec: + serviceAccountName: kafka + containers: + - command: + - ./bin/kafka-server-start.sh + - /etc/kafka/server.properties + - --override + - default.replication.factor=1 + - --override + - min.insync.replicas=1 + - --override + - offsets.topic.replication.factor=1 + - --override + - offsets.topic.num.partitions=1 + env: + - name: CLASSPATH + value: /opt/kafka/libs/extensions/* + - name: KAFKA_LOG4J_OPTS + value: -Dlog4j.configuration=file:/etc/kafka/log4j.properties + - name: JMX_PORT + value: "5555" + image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d + lifecycle: + preStop: + exec: + command: + - sh + - -ce + - kill -s TERM 1; while $(kill -0 1 2>/dev/null); do sleep 1; done + name: broker + ports: + - containerPort: 9092 + name: inside + - containerPort: 9094 + name: outside + - containerPort: 5555 + name: jmx + readinessProbe: + tcpSocket: + port: 9092 + timeoutSeconds: 1 + resources: + limits: + cpu: 500m + memory: 600Mi + requests: + cpu: 100m + memory: 100Mi + volumeMounts: + - mountPath: /etc/kafka + name: config + - mountPath: /var/lib/kafka/data + name: data + - mountPath: /opt/kafka/libs/extensions + name: extensions + initContainers: + - command: + - /bin/bash + - /etc/kafka-configmap/init.sh + env: + - name: NODE_NAME + valueFrom: + fieldRef: + fieldPath: spec.nodeName + - name: POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: POD_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef + name: init-config + volumeMounts: + - mountPath: /etc/kafka-configmap + name: configmap + - mountPath: /etc/kafka + name: config + - mountPath: /opt/kafka/libs/extensions + name: extensions + terminationGracePeriodSeconds: 30 + volumes: + - configMap: + name: broker-config + name: configmap + - emptyDir: {} + name: config + - emptyDir: {} + name: extensions + updateStrategy: + type: RollingUpdate + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 10Gi + storageClassName: standard +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: pzoo +spec: + podManagementPolicy: Parallel + replicas: 1 + selector: + matchLabels: + app: zookeeper + storage: persistent + serviceName: pzoo + template: + metadata: + annotations: null + labels: + app: zookeeper + storage: persistent + spec: + serviceAccountName: kafka + containers: + - command: + - ./bin/zookeeper-server-start.sh + - /etc/kafka/zookeeper.properties + env: + - name: KAFKA_LOG4J_OPTS + value: -Dlog4j.configuration=file:/etc/kafka/log4j.properties + image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d + lifecycle: + preStop: + exec: + command: + - sh + - -ce + - kill -s TERM 1; while $(kill -0 1 2>/dev/null); do sleep 1; done + name: zookeeper + ports: + - containerPort: 2181 + name: client + - containerPort: 2888 + name: peer + - containerPort: 3888 + name: leader-election + readinessProbe: + exec: + command: + - /bin/sh + - -c + - '[ "imok" = "$(echo ruok | nc -w 1 -q 1 127.0.0.1 2181)" ]' + resources: + limits: + memory: 120Mi + requests: + cpu: 10m + memory: 100Mi + volumeMounts: + - mountPath: /etc/kafka + name: config + - mountPath: /var/lib/zookeeper + name: data + initContainers: + - command: + - /bin/bash + - /etc/kafka-configmap/init.sh + env: + - name: PZOO_REPLICAS + value: "1" + - name: ZOO_REPLICAS + value: "0" + image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef + name: init-config + volumeMounts: + - mountPath: /etc/kafka-configmap + name: configmap + - mountPath: /etc/kafka + name: config + - mountPath: /var/lib/zookeeper + name: data + terminationGracePeriodSeconds: 10 + volumes: + - configMap: + name: zookeeper-config + name: configmap + - emptyDir: {} + name: config + updateStrategy: + type: RollingUpdate + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi + storageClassName: standard +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: zoo +spec: + podManagementPolicy: Parallel + replicas: 0 + selector: + matchLabels: + app: zookeeper + storage: persistent-regional + serviceName: zoo + template: + metadata: + annotations: null + labels: + app: zookeeper + storage: persistent-regional + spec: + serviceAccountName: kafka + containers: + - command: + - ./bin/zookeeper-server-start.sh + - /etc/kafka/zookeeper.properties + env: + - name: KAFKA_LOG4J_OPTS + value: -Dlog4j.configuration=file:/etc/kafka/log4j.properties + image: solsson/kafka:2.3.0@sha256:b59603a8c0645f792fb54e9571500e975206352a021d6a116b110945ca6c3a1d + lifecycle: + preStop: + exec: + command: + - sh + - -ce + - kill -s TERM 1; while $(kill -0 1 2>/dev/null); do sleep 1; done + name: zookeeper + ports: + - containerPort: 2181 + name: client + - containerPort: 2888 + name: peer + - containerPort: 3888 + name: leader-election + readinessProbe: + exec: + command: + - /bin/sh + - -c + - '[ "imok" = "$(echo ruok | nc -w 1 -q 1 127.0.0.1 2181)" ]' + resources: + limits: + memory: 120Mi + requests: + cpu: 10m + memory: 100Mi + volumeMounts: + - mountPath: /etc/kafka + name: config + - mountPath: /var/lib/zookeeper + name: data + initContainers: + - command: + - /bin/bash + - /etc/kafka-configmap/init.sh + env: + - name: PZOO_REPLICAS + value: "1" + - name: ZOO_REPLICAS + value: "0" + - name: ID_OFFSET + value: "2" + image: solsson/kafka-initutils@sha256:f6d9850c6c3ad5ecc35e717308fddb47daffbde18eb93e98e031128fe8b899ef + name: init-config + volumeMounts: + - mountPath: /etc/kafka-configmap + name: configmap + - mountPath: /etc/kafka + name: config + - mountPath: /var/lib/zookeeper + name: data + terminationGracePeriodSeconds: 10 + volumes: + - configMap: + name: zookeeper-config + name: configmap + - emptyDir: {} + name: config + updateStrategy: + type: RollingUpdate + volumeClaimTemplates: + - metadata: + name: data + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi + storageClassName: standard +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Values.apps.events.name }} +spec: + replicas: 1 + selector: + matchLabels: + app: {{ .Values.apps.events.name }} + template: + metadata: + labels: + app: {{ .Values.apps.events.name }} + spec: + containers: + - name: kafka-manager + image: solsson/kafka-manager@sha256:9da595ecbb733074a1d3c6091a1e0c384da4f4e1f19f4e16276062278da8e592 + ports: + - containerPort: 80 + env: + - name: ZK_HOSTS + value: zookeeper.{{ .Values.namespace }}:2181 + command: + - ./bin/kafka-manager + - -Dhttp.port=80 \ No newline at end of file diff --git a/applications/events/deploy/templates/roles.yml b/applications/events/deploy/templates/roles.yml new file mode 100644 index 00000000..47018c32 --- /dev/null +++ b/applications/events/deploy/templates/roles.yml @@ -0,0 +1,66 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: kafka + namespace: {{ .Release.Namespace }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + labels: + origin: github.com_Yolean_kubernetes-kafka + name: pod-labler + +rules: +- apiGroups: + - "" + resources: + - pods + verbs: + - get + - update + - patch +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: node-reader + labels: + origin: github.com_Yolean_kubernetes-kafka +rules: +- apiGroups: + - "" + resources: + - nodes + verbs: + - get +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + labels: + origin: github.com_Yolean_kubernetes-kafka + name: kafka-pod-labler +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: pod-labler +subjects: +- kind: ServiceAccount + name: kafka + namespace: {{ .Release.Namespace }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + labels: + origin: github.com_Yolean_kubernetes-kafka + name: kafka-node-reader +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: node-reader +subjects: +- kind: ServiceAccount + name: kafka + namespace: {{ .Release.Namespace }} diff --git a/applications/events/deploy/templates/services.yml b/applications/events/deploy/templates/services.yml new file mode 100644 index 00000000..b192a735 --- /dev/null +++ b/applications/events/deploy/templates/services.yml @@ -0,0 +1,61 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.apps.events.kafka.name }} +spec: + ports: + - port: {{ .Values.apps.events.kafka.port }} + selector: + app: kafka +--- +apiVersion: v1 +kind: Service +metadata: + name: broker +spec: + clusterIP: None + ports: + - port: {{ .Values.apps.events.kafka.port }} + selector: + app: kafka +--- +apiVersion: v1 +kind: Service +metadata: + name: pzoo +spec: + clusterIP: None + ports: + - name: peer + port: 2888 + - name: leader-election + port: 3888 + selector: + app: zookeeper + storage: persistent +--- +apiVersion: v1 +kind: Service +metadata: + name: zookeeper +spec: + ports: + - name: client + port: 2181 + selector: + app: zookeeper +--- +apiVersion: v1 +kind: Service +metadata: + name: zoo +spec: + clusterIP: None + ports: + - name: peer + port: 2888 + - name: leader-election + port: 3888 + selector: + app: zookeeper + storage: persistent-regional \ No newline at end of file diff --git a/applications/events/deploy/templates/zoo-config.yml b/applications/events/deploy/templates/zoo-config.yml new file mode 100644 index 00000000..db948e5d --- /dev/null +++ b/applications/events/deploy/templates/zoo-config.yml @@ -0,0 +1,7 @@ +--- +apiVersion: v1 +kind: ConfigMap +metadata: + name: zookeeper-config +data: +{{ (.Files.Glob "resources/events/zookeeper/*").AsConfig | indent 2 }} \ No newline at end of file diff --git a/applications/events/deploy/values.yaml b/applications/events/deploy/values.yaml new file mode 100644 index 00000000..c23221d2 --- /dev/null +++ b/applications/events/deploy/values.yaml @@ -0,0 +1,9 @@ +name: events +subdomain: events +autoservice: true +autodeploy: false +secureme: true +port: 80 +kafka: + name: bootstrap + port: 9092 \ No newline at end of file diff --git a/applications/samples/README.md b/applications/samples/README.md new file mode 100644 index 00000000..399a8543 --- /dev/null +++ b/applications/samples/README.md @@ -0,0 +1,3 @@ +Temporary: we are using it to test the token generation. + +TODO move into neuroimaging \ No newline at end of file diff --git a/applications/samples/api/config.json b/applications/samples/api/config.json new file mode 100644 index 00000000..0dd0c28f --- /dev/null +++ b/applications/samples/api/config.json @@ -0,0 +1,3 @@ +{ + "packageName": "api_samples" +} \ No newline at end of file diff --git a/applications/samples/api/samples.yaml b/applications/samples/api/samples.yaml new file mode 100644 index 00000000..6cf5e820 --- /dev/null +++ b/applications/samples/api/samples.yaml @@ -0,0 +1,116 @@ +openapi: 3.0.0 +info: + description: CloudHarness Sample api + version: 0.1.0 + title: CloudHarness Sample API + contact: + email: cloudharness@metacell.us + license: + name: UNLICENSED + +tags: + - name: auth + - name: workflows + +paths: + /valid: + get: + summary: Check if the token is valid. Get a token by logging into the base url + security: + - bearerAuth: [] + tags: + - auth + operationId: valid_token + description: | + Check if the token is valid + responses: + "200": + description: Check if token is valid + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Valid" + "400": + description: bad input parameter + + /operation_sync: + get: + summary: Send a synchronous operation + operationId: submitSync + tags: + - workflows + responses: + "200": + description: Operation result + content: + application/json: + schema: + type: string + + /operation_sync_results: + get: + summary: Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + operationId: submitSyncWithResults + tags: + - workflows + parameters: + - in: query + name: a + description: first number to sum + schema: + type: number + example: 10 + - in: query + name: b + description: second number to sum + schema: + type: number + example: 10 + responses: + "200": + description: Operation result + content: + application/json: + schema: + type: string + /operation_async: + get: + summary: Send an asynchronous operation + operationId: submitAsync + tags: + - workflows + responses: + "202": + description: Submitted operation. See also https://restfulapi.net/http-status-202-accepted/ + content: + application/json: + schema: + type: object + properties: + task: + type: object + properties: + href: + description: the url where to check the operation status + type: string + example: http://workflows.cloudharness.metacell.us/api/operation/my-op + name: + type: string + example: my-op +servers: + - url: https://samples.cloudharness.metacell.us/api +components: + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT + x-bearerInfoFunc: cloudharness.auth.decode_token + schemas: + Valid: + type: object + properties: + response: + type: string diff --git a/applications/samples/deploy/values.yaml b/applications/samples/deploy/values.yaml new file mode 100644 index 00000000..91949d61 --- /dev/null +++ b/applications/samples/deploy/values.yaml @@ -0,0 +1,4 @@ +port: 8080 +subdomain: samples +autodeploy: true +autoservice: true \ No newline at end of file diff --git a/applications/samples/server/.dockerignore b/applications/samples/server/.dockerignore new file mode 100644 index 00000000..f9619601 --- /dev/null +++ b/applications/samples/server/.dockerignore @@ -0,0 +1,72 @@ +.travis.yaml +.openapi-generator-ignore +README.md +tox.ini +git_push.sh +test-requirements.txt +setup.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.python-version + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/samples/server/.gitignore b/applications/samples/server/.gitignore new file mode 100644 index 00000000..43995bd4 --- /dev/null +++ b/applications/samples/server/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/samples/server/.openapi-generator-ignore b/applications/samples/server/.openapi-generator-ignore new file mode 100644 index 00000000..7484ee59 --- /dev/null +++ b/applications/samples/server/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/applications/samples/server/.travis.yml b/applications/samples/server/.travis.yml new file mode 100644 index 00000000..ad71ee5c --- /dev/null +++ b/applications/samples/server/.travis.yml @@ -0,0 +1,14 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.2" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: "pip install -r requirements.txt" +# command to run tests +script: nosetests diff --git a/applications/samples/server/Dockerfile b/applications/samples/server/Dockerfile new file mode 100644 index 00000000..006e6e33 --- /dev/null +++ b/applications/samples/server/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3-alpine + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +COPY requirements.txt /usr/src/app/ + +RUN pip3 install --no-cache-dir -r requirements.txt + +COPY . /usr/src/app + +EXPOSE 8080 + +ENTRYPOINT ["python3"] + +CMD ["-m", "api_samples"] \ No newline at end of file diff --git a/applications/samples/server/README.md b/applications/samples/server/README.md new file mode 100644 index 00000000..b1e6a1f9 --- /dev/null +++ b/applications/samples/server/README.md @@ -0,0 +1,49 @@ +# OpenAPI generated server + +## Overview +This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the +[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This +is an example of building a OpenAPI-enabled Flask server. + +This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask. + +## Requirements +Python 3.5.2+ + +## Usage +To run the server, please execute the following from the root directory: + +``` +pip3 install -r requirements.txt +python3 -m api_samples +``` + +and open your browser to here: + +``` +http://localhost:8080/api/ui/ +``` + +Your OpenAPI definition lives here: + +``` +http://localhost:8080/api/openapi.json +``` + +To launch the integration tests, use tox: +``` +sudo pip install tox +tox +``` + +## Running with Docker + +To run the server on a Docker container, please execute the following from the root directory: + +```bash +# building the image +docker build -t api_samples . + +# starting up a container +docker run -p 8080:8080 api_samples +``` \ No newline at end of file diff --git a/applications/samples/server/api_samples/__init__.py b/applications/samples/server/api_samples/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/samples/server/api_samples/__main__.py b/applications/samples/server/api_samples/__main__.py new file mode 100644 index 00000000..9df35748 --- /dev/null +++ b/applications/samples/server/api_samples/__main__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import connexion + +from api_samples import encoder + + +def main(): + app = connexion.App(__name__, specification_dir='./openapi/') + app.app.json_encoder = encoder.JSONEncoder + app.add_api('openapi.yaml', + arguments={'title': 'CloudHarness Sample API'}, + pythonic_params=True) + app.run(port=8080) + + +if __name__ == '__main__': + main() diff --git a/applications/samples/server/api_samples/controllers/__init__.py b/applications/samples/server/api_samples/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/samples/server/api_samples/controllers/auth_controller.py b/applications/samples/server/api_samples/controllers/auth_controller.py new file mode 100644 index 00000000..9d7e8159 --- /dev/null +++ b/applications/samples/server/api_samples/controllers/auth_controller.py @@ -0,0 +1,16 @@ +import connexion +import six + +from api_samples.models.valid import Valid # noqa: E501 +from api_samples import util + + +def valid_token(): # noqa: E501 + """Check if the token is valid. Get a token by logging into the base url + + Check if the token is valid # noqa: E501 + + + :rtype: List[Valid] + """ + return 'do some magic!' diff --git a/applications/samples/server/api_samples/controllers/security_controller_.py b/applications/samples/server/api_samples/controllers/security_controller_.py new file mode 100644 index 00000000..8dd254a2 --- /dev/null +++ b/applications/samples/server/api_samples/controllers/security_controller_.py @@ -0,0 +1,17 @@ +from typing import List + + +def info_from_bearerAuth(token): + """ + Check and retrieve authentication information from custom bearer token. + Returned value will be passed in 'token_info' parameter of your operation function, if there is one. + 'sub' or 'uid' will be set in 'user' parameter of your operation function, if there is one. + + :param token Token provided by Authorization header + :type token: str + :return: Decoded token information or None if token is invalid + :rtype: dict | None + """ + return {'uid': 'user_id'} + + diff --git a/applications/samples/server/api_samples/controllers/workflows_controller.py b/applications/samples/server/api_samples/controllers/workflows_controller.py new file mode 100644 index 00000000..3f5ccfbf --- /dev/null +++ b/applications/samples/server/api_samples/controllers/workflows_controller.py @@ -0,0 +1,42 @@ +import connexion +import six + +from api_samples.models.inline_response202 import InlineResponse202 # noqa: E501 +from api_samples import util + + +def submit_async(): # noqa: E501 + """Send an asynchronous operation + + # noqa: E501 + + + :rtype: InlineResponse202 + """ + return 'do some magic!' + + +def submit_sync(): # noqa: E501 + """Send a synchronous operation + + # noqa: E501 + + + :rtype: str + """ + return 'do some magic!' + + +def submit_sync_with_results(a=None, b=None): # noqa: E501 + """Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + + # noqa: E501 + + :param a: first number to sum + :type a: + :param b: second number to sum + :type b: + + :rtype: str + """ + return 'do some magic!' diff --git a/applications/samples/server/api_samples/encoder.py b/applications/samples/server/api_samples/encoder.py new file mode 100644 index 00000000..7a200e51 --- /dev/null +++ b/applications/samples/server/api_samples/encoder.py @@ -0,0 +1,20 @@ +from connexion.apps.flask_app import FlaskJSONEncoder +import six + +from api_samples.models.base_model_ import Model + + +class JSONEncoder(FlaskJSONEncoder): + include_nulls = False + + def default(self, o): + if isinstance(o, Model): + dikt = {} + for attr, _ in six.iteritems(o.openapi_types): + value = getattr(o, attr) + if value is None and not self.include_nulls: + continue + attr = o.attribute_map[attr] + dikt[attr] = value + return dikt + return FlaskJSONEncoder.default(self, o) diff --git a/applications/samples/server/api_samples/models/__init__.py b/applications/samples/server/api_samples/models/__init__.py new file mode 100644 index 00000000..260fdebe --- /dev/null +++ b/applications/samples/server/api_samples/models/__init__.py @@ -0,0 +1,8 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import +# import models into model package +from api_samples.models.inline_response202 import InlineResponse202 +from api_samples.models.inline_response202_task import InlineResponse202Task +from api_samples.models.valid import Valid diff --git a/applications/samples/server/api_samples/models/base_model_.py b/applications/samples/server/api_samples/models/base_model_.py new file mode 100644 index 00000000..3ace029a --- /dev/null +++ b/applications/samples/server/api_samples/models/base_model_.py @@ -0,0 +1,69 @@ +import pprint + +import six +import typing + +from api_samples import util + +T = typing.TypeVar('T') + + +class Model(object): + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/applications/samples/server/api_samples/models/inline_response202.py b/applications/samples/server/api_samples/models/inline_response202.py new file mode 100644 index 00000000..597ed98f --- /dev/null +++ b/applications/samples/server/api_samples/models/inline_response202.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from api_samples.models.base_model_ import Model +from api_samples.models.inline_response202_task import InlineResponse202Task +from api_samples import util + +from api_samples.models.inline_response202_task import InlineResponse202Task # noqa: E501 + +class InlineResponse202(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, task=None): # noqa: E501 + """InlineResponse202 - a model defined in OpenAPI + + :param task: The task of this InlineResponse202. # noqa: E501 + :type task: InlineResponse202Task + """ + self.openapi_types = { + 'task': InlineResponse202Task + } + + self.attribute_map = { + 'task': 'task' + } + + self._task = task + + @classmethod + def from_dict(cls, dikt) -> 'InlineResponse202': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The inline_response_202 of this InlineResponse202. # noqa: E501 + :rtype: InlineResponse202 + """ + return util.deserialize_model(dikt, cls) + + @property + def task(self): + """Gets the task of this InlineResponse202. + + + :return: The task of this InlineResponse202. + :rtype: InlineResponse202Task + """ + return self._task + + @task.setter + def task(self, task): + """Sets the task of this InlineResponse202. + + + :param task: The task of this InlineResponse202. + :type task: InlineResponse202Task + """ + + self._task = task diff --git a/applications/samples/server/api_samples/models/inline_response202_task.py b/applications/samples/server/api_samples/models/inline_response202_task.py new file mode 100644 index 00000000..465a8824 --- /dev/null +++ b/applications/samples/server/api_samples/models/inline_response202_task.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from api_samples.models.base_model_ import Model +from api_samples import util + + +class InlineResponse202Task(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, href=None, name=None): # noqa: E501 + """InlineResponse202Task - a model defined in OpenAPI + + :param href: The href of this InlineResponse202Task. # noqa: E501 + :type href: str + :param name: The name of this InlineResponse202Task. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'href': str, + 'name': str + } + + self.attribute_map = { + 'href': 'href', + 'name': 'name' + } + + self._href = href + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'InlineResponse202Task': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The inline_response_202_task of this InlineResponse202Task. # noqa: E501 + :rtype: InlineResponse202Task + """ + return util.deserialize_model(dikt, cls) + + @property + def href(self): + """Gets the href of this InlineResponse202Task. + + the url where to check the operation status # noqa: E501 + + :return: The href of this InlineResponse202Task. + :rtype: str + """ + return self._href + + @href.setter + def href(self, href): + """Sets the href of this InlineResponse202Task. + + the url where to check the operation status # noqa: E501 + + :param href: The href of this InlineResponse202Task. + :type href: str + """ + + self._href = href + + @property + def name(self): + """Gets the name of this InlineResponse202Task. + + + :return: The name of this InlineResponse202Task. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this InlineResponse202Task. + + + :param name: The name of this InlineResponse202Task. + :type name: str + """ + + self._name = name diff --git a/applications/samples/server/api_samples/models/valid.py b/applications/samples/server/api_samples/models/valid.py new file mode 100644 index 00000000..eae6c5f4 --- /dev/null +++ b/applications/samples/server/api_samples/models/valid.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from api_samples.models.base_model_ import Model +from api_samples import util + + +class Valid(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, response=None): # noqa: E501 + """Valid - a model defined in OpenAPI + + :param response: The response of this Valid. # noqa: E501 + :type response: str + """ + self.openapi_types = { + 'response': str + } + + self.attribute_map = { + 'response': 'response' + } + + self._response = response + + @classmethod + def from_dict(cls, dikt) -> 'Valid': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The Valid of this Valid. # noqa: E501 + :rtype: Valid + """ + return util.deserialize_model(dikt, cls) + + @property + def response(self): + """Gets the response of this Valid. + + + :return: The response of this Valid. + :rtype: str + """ + return self._response + + @response.setter + def response(self, response): + """Sets the response of this Valid. + + + :param response: The response of this Valid. + :type response: str + """ + + self._response = response diff --git a/applications/samples/server/api_samples/openapi/openapi.yaml b/applications/samples/server/api_samples/openapi/openapi.yaml new file mode 100644 index 00000000..126520be --- /dev/null +++ b/applications/samples/server/api_samples/openapi/openapi.yaml @@ -0,0 +1,134 @@ +openapi: 3.0.0 +info: + contact: + email: cloudharness@metacell.us + description: CloudHarness Sample api + license: + name: UNLICENSED + title: CloudHarness Sample API + version: 0.1.0 +servers: +- url: https://samples.cloudharness.metacell.us/api +tags: +- name: auth +- name: workflows +paths: + /operation_async: + get: + operationId: submit_async + responses: + "202": + content: + application/json: + schema: + $ref: '#/components/schemas/inline_response_202' + description: Submitted operation. See also https://restfulapi.net/http-status-202-accepted/ + summary: Send an asynchronous operation + tags: + - workflows + x-openapi-router-controller: api_samples.controllers.workflows_controller + /operation_sync: + get: + operationId: submit_sync + responses: + "200": + content: + application/json: + schema: + type: string + description: Operation result + summary: Send a synchronous operation + tags: + - workflows + x-openapi-router-controller: api_samples.controllers.workflows_controller + /operation_sync_results: + get: + operationId: submit_sync_with_results + parameters: + - description: first number to sum + example: 10 + explode: true + in: query + name: a + required: false + schema: + type: number + style: form + - description: second number to sum + example: 10 + explode: true + in: query + name: b + required: false + schema: + type: number + style: form + responses: + "200": + content: + application/json: + schema: + type: string + description: Operation result + summary: Send a synchronous operation and get results using the event queue. + Just a sum, but in the cloud + tags: + - workflows + x-openapi-router-controller: api_samples.controllers.workflows_controller + /valid: + get: + description: | + Check if the token is valid + operationId: valid_token + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/Valid' + type: array + description: Check if token is valid + "400": + description: bad input parameter + security: + - bearerAuth: [] + summary: Check if the token is valid. Get a token by logging into the base url + tags: + - auth + x-openapi-router-controller: api_samples.controllers.auth_controller +components: + schemas: + Valid: + example: + response: response + properties: + response: + type: string + type: object + inline_response_202_task: + example: + name: my-op + href: http://workflows.cloudharness.metacell.us/api/operation/my-op + properties: + href: + description: the url where to check the operation status + example: http://workflows.cloudharness.metacell.us/api/operation/my-op + type: string + name: + example: my-op + type: string + inline_response_202: + example: + task: + name: my-op + href: http://workflows.cloudharness.metacell.us/api/operation/my-op + properties: + task: + $ref: '#/components/schemas/inline_response_202_task' + securitySchemes: + bearerAuth: + bearerFormat: JWT + scheme: bearer + type: http + x-bearerInfoFunc: cloudharness.auth.decode_token diff --git a/applications/samples/server/api_samples/test/__init__.py b/applications/samples/server/api_samples/test/__init__.py new file mode 100644 index 00000000..a2dbaf9d --- /dev/null +++ b/applications/samples/server/api_samples/test/__init__.py @@ -0,0 +1,16 @@ +import logging + +import connexion +from flask_testing import TestCase + +from api_samples.encoder import JSONEncoder + + +class BaseTestCase(TestCase): + + def create_app(self): + logging.getLogger('connexion.operation').setLevel('ERROR') + app = connexion.App(__name__, specification_dir='../openapi/') + app.app.json_encoder = JSONEncoder + app.add_api('openapi.yaml', pythonic_params=True) + return app.app diff --git a/applications/samples/server/api_samples/test/test_auth_controller.py b/applications/samples/server/api_samples/test/test_auth_controller.py new file mode 100644 index 00000000..962c63cd --- /dev/null +++ b/applications/samples/server/api_samples/test/test_auth_controller.py @@ -0,0 +1,34 @@ +# coding: utf-8 + +from __future__ import absolute_import +import unittest + +from flask import json +from six import BytesIO + +from api_samples.models.valid import Valid # noqa: E501 +from api_samples.test import BaseTestCase + + +class TestAuthController(BaseTestCase): + """AuthController integration test stubs""" + + def test_valid_token(self): + """Test case for valid_token + + Check if the token is valid. Get a token by logging into the base url + """ + headers = { + 'Accept': 'application/json', + 'Authorization': 'Bearer special-key', + } + response = self.client.open( + '/api/valid', + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + unittest.main() diff --git a/applications/samples/server/api_samples/test/test_workflows_controller.py b/applications/samples/server/api_samples/test/test_workflows_controller.py new file mode 100644 index 00000000..07760241 --- /dev/null +++ b/applications/samples/server/api_samples/test/test_workflows_controller.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +from __future__ import absolute_import +import unittest + +from flask import json +from six import BytesIO + +from api_samples.models.inline_response202 import InlineResponse202 # noqa: E501 +from api_samples.test import BaseTestCase + + +class TestWorkflowsController(BaseTestCase): + """WorkflowsController integration test stubs""" + + def test_submit_async(self): + """Test case for submit_async + + Send an asynchronous operation + """ + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/api/operation_async', + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_submit_sync(self): + """Test case for submit_sync + + Send a synchronous operation + """ + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/api/operation_sync', + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_submit_sync_with_results(self): + """Test case for submit_sync_with_results + + Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + """ + query_string = [('a', 10), + ('b', 10)] + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/api/operation_sync_results', + method='GET', + headers=headers, + query_string=query_string) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + unittest.main() diff --git a/applications/samples/server/api_samples/typing_utils.py b/applications/samples/server/api_samples/typing_utils.py new file mode 100644 index 00000000..0563f81f --- /dev/null +++ b/applications/samples/server/api_samples/typing_utils.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +import sys + +if sys.version_info < (3, 7): + import typing + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return type(klass) == typing.GenericMeta + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__extra__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__extra__ == list + +else: + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return hasattr(klass, '__origin__') + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__origin__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__origin__ == list diff --git a/applications/samples/server/api_samples/util.py b/applications/samples/server/api_samples/util.py new file mode 100644 index 00000000..6e8baa5f --- /dev/null +++ b/applications/samples/server/api_samples/util.py @@ -0,0 +1,142 @@ +import datetime + +import six +import typing +from api_samples import typing_utils + + +def _deserialize(data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if klass in six.integer_types or klass in (float, str, bool, bytearray): + return _deserialize_primitive(data, klass) + elif klass == object: + return _deserialize_object(data) + elif klass == datetime.date: + return deserialize_date(data) + elif klass == datetime.datetime: + return deserialize_datetime(data) + elif typing_utils.is_generic(klass): + if typing_utils.is_list(klass): + return _deserialize_list(data, klass.__args__[0]) + if typing_utils.is_dict(klass): + return _deserialize_dict(data, klass.__args__[1]) + else: + return deserialize_model(data, klass) + + +def _deserialize_primitive(data, klass): + """Deserializes to primitive type. + + :param data: data to deserialize. + :param klass: class literal. + + :return: int, long, float, str, bool. + :rtype: int | long | float | str | bool + """ + try: + value = klass(data) + except UnicodeEncodeError: + value = six.u(data) + except TypeError: + value = data + return value + + +def _deserialize_object(value): + """Return an original value. + + :return: object. + """ + return value + + +def deserialize_date(string): + """Deserializes string to date. + + :param string: str. + :type string: str + :return: date. + :rtype: date + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + + +def deserialize_datetime(string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :type string: str + :return: datetime. + :rtype: datetime + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + + +def deserialize_model(data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :type data: dict | list + :param klass: class literal. + :return: model object. + """ + instance = klass() + + if not instance.openapi_types: + return data + + for attr, attr_type in six.iteritems(instance.openapi_types): + if data is not None \ + and instance.attribute_map[attr] in data \ + and isinstance(data, (list, dict)): + value = data[instance.attribute_map[attr]] + setattr(instance, attr, _deserialize(value, attr_type)) + + return instance + + +def _deserialize_list(data, boxed_type): + """Deserializes a list and its elements. + + :param data: list to deserialize. + :type data: list + :param boxed_type: class literal. + + :return: deserialized list. + :rtype: list + """ + return [_deserialize(sub_data, boxed_type) + for sub_data in data] + + +def _deserialize_dict(data, boxed_type): + """Deserializes a dict and its elements. + + :param data: dict to deserialize. + :type data: dict + :param boxed_type: class literal. + + :return: deserialized dict. + :rtype: dict + """ + return {k: _deserialize(v, boxed_type) + for k, v in six.iteritems(data)} diff --git a/applications/samples/server/git_push.sh b/applications/samples/server/git_push.sh new file mode 100644 index 00000000..ced3be2b --- /dev/null +++ b/applications/samples/server/git_push.sh @@ -0,0 +1,58 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/applications/samples/server/requirements.txt b/applications/samples/server/requirements.txt new file mode 100644 index 00000000..2639eedf --- /dev/null +++ b/applications/samples/server/requirements.txt @@ -0,0 +1,7 @@ +connexion >= 2.6.0; python_version>="3.6" +connexion >= 2.3.0; python_version=="3.5" +connexion >= 2.3.0; python_version=="3.4" +connexion == 2.4.0; python_version<="2.7" +swagger-ui-bundle >= 0.0.2 +python_dateutil >= 2.6.0 +setuptools >= 21.0.0 diff --git a/applications/samples/server/setup.py b/applications/samples/server/setup.py new file mode 100644 index 00000000..e1d27372 --- /dev/null +++ b/applications/samples/server/setup.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +import sys +from setuptools import setup, find_packages + +NAME = "api_samples" +VERSION = "1.0.0" + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = [ + "connexion>=2.0.2", + "swagger-ui-bundle>=0.0.2", + "python_dateutil>=2.6.0" +] + +setup( + name=NAME, + version=VERSION, + description="CloudHarness Sample API", + author_email="cloudharness@metacell.us", + url="", + keywords=["OpenAPI", "CloudHarness Sample API"], + install_requires=REQUIRES, + packages=find_packages(), + package_data={'': ['openapi/openapi.yaml']}, + include_package_data=True, + entry_points={ + 'console_scripts': ['api_samples=api_samples.__main__:main']}, + long_description="""\ + CloudHarness Sample api + """ +) + diff --git a/applications/samples/server/test-requirements.txt b/applications/samples/server/test-requirements.txt new file mode 100644 index 00000000..a2626d87 --- /dev/null +++ b/applications/samples/server/test-requirements.txt @@ -0,0 +1,4 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 +flask_testing==0.6.1 \ No newline at end of file diff --git a/applications/samples/server/tox.ini b/applications/samples/server/tox.ini new file mode 100644 index 00000000..8f380ee0 --- /dev/null +++ b/applications/samples/server/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=api_samples \ No newline at end of file diff --git a/applications/samples/src/.dockerignore b/applications/samples/src/.dockerignore new file mode 100644 index 00000000..f9619601 --- /dev/null +++ b/applications/samples/src/.dockerignore @@ -0,0 +1,72 @@ +.travis.yaml +.openapi-generator-ignore +README.md +tox.ini +git_push.sh +test-requirements.txt +setup.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.python-version + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/samples/src/.gitignore b/applications/samples/src/.gitignore new file mode 100644 index 00000000..43995bd4 --- /dev/null +++ b/applications/samples/src/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/samples/src/.openapi-generator-ignore b/applications/samples/src/.openapi-generator-ignore new file mode 100644 index 00000000..5cc13cc6 --- /dev/null +++ b/applications/samples/src/.openapi-generator-ignore @@ -0,0 +1,27 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md +setup.py +*/controllers/* +*/models/* +Dockerfile \ No newline at end of file diff --git a/applications/samples/src/.travis.yml b/applications/samples/src/.travis.yml new file mode 100644 index 00000000..ad71ee5c --- /dev/null +++ b/applications/samples/src/.travis.yml @@ -0,0 +1,14 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.2" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: "pip install -r requirements.txt" +# command to run tests +script: nosetests diff --git a/applications/samples/src/Dockerfile b/applications/samples/src/Dockerfile new file mode 100644 index 00000000..d74b98f7 --- /dev/null +++ b/applications/samples/src/Dockerfile @@ -0,0 +1,18 @@ +ARG REGISTRY +ARG TAG=latest +FROM ${REGISTRY}cloudharness-base:${TAG} + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +COPY requirements.txt /usr/src/app/ + +RUN pip3 install --no-cache-dir -r requirements.txt + +COPY . /usr/src/app + +EXPOSE 8080 + +ENTRYPOINT ["python3"] + +CMD ["-m", "api_samples"] \ No newline at end of file diff --git a/applications/samples/src/README.md b/applications/samples/src/README.md new file mode 100644 index 00000000..b1e6a1f9 --- /dev/null +++ b/applications/samples/src/README.md @@ -0,0 +1,49 @@ +# OpenAPI generated server + +## Overview +This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the +[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This +is an example of building a OpenAPI-enabled Flask server. + +This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask. + +## Requirements +Python 3.5.2+ + +## Usage +To run the server, please execute the following from the root directory: + +``` +pip3 install -r requirements.txt +python3 -m api_samples +``` + +and open your browser to here: + +``` +http://localhost:8080/api/ui/ +``` + +Your OpenAPI definition lives here: + +``` +http://localhost:8080/api/openapi.json +``` + +To launch the integration tests, use tox: +``` +sudo pip install tox +tox +``` + +## Running with Docker + +To run the server on a Docker container, please execute the following from the root directory: + +```bash +# building the image +docker build -t api_samples . + +# starting up a container +docker run -p 8080:8080 api_samples +``` \ No newline at end of file diff --git a/applications/samples/src/api_samples/__init__.py b/applications/samples/src/api_samples/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/samples/src/api_samples/__main__.py b/applications/samples/src/api_samples/__main__.py new file mode 100644 index 00000000..9df35748 --- /dev/null +++ b/applications/samples/src/api_samples/__main__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import connexion + +from api_samples import encoder + + +def main(): + app = connexion.App(__name__, specification_dir='./openapi/') + app.app.json_encoder = encoder.JSONEncoder + app.add_api('openapi.yaml', + arguments={'title': 'CloudHarness Sample API'}, + pythonic_params=True) + app.run(port=8080) + + +if __name__ == '__main__': + main() diff --git a/applications/samples/src/api_samples/controllers/__init__.py b/applications/samples/src/api_samples/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/samples/src/api_samples/controllers/auth_controller.py b/applications/samples/src/api_samples/controllers/auth_controller.py new file mode 100644 index 00000000..6561b554 --- /dev/null +++ b/applications/samples/src/api_samples/controllers/auth_controller.py @@ -0,0 +1,16 @@ +import connexion +import six + +from api_samples.models.valid import Valid # noqa: E501 +from api_samples import util + + +def valid_token(): # noqa: E501 + """Check if the token is valid. Get a token by logging into the dashboard + + Check if the token is valid # noqa: E501 + + + :rtype: List[Valid] + """ + return 'OK!' diff --git a/applications/samples/src/api_samples/controllers/workflows_controller.py b/applications/samples/src/api_samples/controllers/workflows_controller.py new file mode 100644 index 00000000..24744603 --- /dev/null +++ b/applications/samples/src/api_samples/controllers/workflows_controller.py @@ -0,0 +1,71 @@ +import connexion +import six + +from api_samples.models.inline_response202 import InlineResponse202 # noqa: E501 +from api_samples import util +from api_samples.models import InlineResponse202 +from api_samples.models.inline_response202_task import InlineResponse202Task +from flask.json import jsonify + +from cloudharness import log + +try: + from cloudharness.workflows import operations, tasks +except Exception as e: + log.error("Cannot start workflows module. Probably this is related some problem with the kubectl configuration", e) + + +def submit_async(): # noqa: E501 + """Send an asyncronous operation + + # noqa: E501 + + + :rtype: InlineResponse202 + """ + shared_directory = '/mnt/shared' + task_write = tasks.CustomTask('download-file', 'workflows-extract-download', url='https://raw.githubusercontent.com/openworm/org.geppetto/master/README.md') + task_print = tasks.CustomTask('print-file', 'workflows-print-file', file_path=shared_directory + '/README.md') + op = operations.PipelineOperation('test-custom-connected-op-', (task_write, task_print), shared_directory=shared_directory) + + submitted = op.execute() + if not op.is_error(): + return InlineResponse202(task= InlineResponse202Task(href=op.get_operation_update_url(), name=submitted.name)), 202 + else: + return 'Error submitting operation', 500 + + +def submit_sync(): # noqa: E501 + """Send a syncronous operation + + # noqa: E501 + + + :rtype: str + """ + task = tasks.CustomTask('download-file', 'workflows-extract-download', url='https://www.metacell.us') + + op = operations.DistributedSyncOperation('test-sync-op-', task) + workflow = op.execute() + return workflow.raw + + +def submit_sync_with_results(a=1, b=2): # noqa: E501 + """Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + + # noqa: E501 + + :param a: first number to sum + :type a: float + :param b: second number to sum + :type b: float + + :rtype: str + """ + task = tasks.CustomTask('test-sum', 'samples-sum', a=a, b=b) + try: + op = operations.DistributedSyncOperationWithResults('test-sync-op-results-', task) + result = op.execute() + return result + except Exception as e: + return jsonify(str(e)), 200 diff --git a/applications/samples/src/api_samples/encoder.py b/applications/samples/src/api_samples/encoder.py new file mode 100644 index 00000000..7a200e51 --- /dev/null +++ b/applications/samples/src/api_samples/encoder.py @@ -0,0 +1,20 @@ +from connexion.apps.flask_app import FlaskJSONEncoder +import six + +from api_samples.models.base_model_ import Model + + +class JSONEncoder(FlaskJSONEncoder): + include_nulls = False + + def default(self, o): + if isinstance(o, Model): + dikt = {} + for attr, _ in six.iteritems(o.openapi_types): + value = getattr(o, attr) + if value is None and not self.include_nulls: + continue + attr = o.attribute_map[attr] + dikt[attr] = value + return dikt + return FlaskJSONEncoder.default(self, o) diff --git a/applications/samples/src/api_samples/models/__init__.py b/applications/samples/src/api_samples/models/__init__.py new file mode 100644 index 00000000..260fdebe --- /dev/null +++ b/applications/samples/src/api_samples/models/__init__.py @@ -0,0 +1,8 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import +# import models into model package +from api_samples.models.inline_response202 import InlineResponse202 +from api_samples.models.inline_response202_task import InlineResponse202Task +from api_samples.models.valid import Valid diff --git a/applications/samples/src/api_samples/models/base_model_.py b/applications/samples/src/api_samples/models/base_model_.py new file mode 100644 index 00000000..3ace029a --- /dev/null +++ b/applications/samples/src/api_samples/models/base_model_.py @@ -0,0 +1,69 @@ +import pprint + +import six +import typing + +from api_samples import util + +T = typing.TypeVar('T') + + +class Model(object): + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/applications/samples/src/api_samples/models/inline_response202.py b/applications/samples/src/api_samples/models/inline_response202.py new file mode 100644 index 00000000..9d389979 --- /dev/null +++ b/applications/samples/src/api_samples/models/inline_response202.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from api_samples.models.base_model_ import Model +from api_samples.models.inline_response202_task import InlineResponse202Task +from api_samples import util + + + +class InlineResponse202(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, task=None): # noqa: E501 + """InlineResponse202 - a model defined in OpenAPI + + :param task: The task of this InlineResponse202. # noqa: E501 + :type task: InlineResponse202Task + """ + self.openapi_types = { + 'task': InlineResponse202Task + } + + self.attribute_map = { + 'task': 'task' + } + + self._task = task + + @classmethod + def from_dict(cls, dikt) -> 'InlineResponse202': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The inline_response_202 of this InlineResponse202. # noqa: E501 + :rtype: InlineResponse202 + """ + return util.deserialize_model(dikt, cls) + + @property + def task(self): + """Gets the task of this InlineResponse202. + + + :return: The task of this InlineResponse202. + :rtype: InlineResponse202Task + """ + return self._task + + @task.setter + def task(self, task): + """Sets the task of this InlineResponse202. + + + :param task: The task of this InlineResponse202. + :type task: InlineResponse202Task + """ + + self._task = task diff --git a/applications/samples/src/api_samples/models/inline_response202_task.py b/applications/samples/src/api_samples/models/inline_response202_task.py new file mode 100644 index 00000000..465a8824 --- /dev/null +++ b/applications/samples/src/api_samples/models/inline_response202_task.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from api_samples.models.base_model_ import Model +from api_samples import util + + +class InlineResponse202Task(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, href=None, name=None): # noqa: E501 + """InlineResponse202Task - a model defined in OpenAPI + + :param href: The href of this InlineResponse202Task. # noqa: E501 + :type href: str + :param name: The name of this InlineResponse202Task. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'href': str, + 'name': str + } + + self.attribute_map = { + 'href': 'href', + 'name': 'name' + } + + self._href = href + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'InlineResponse202Task': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The inline_response_202_task of this InlineResponse202Task. # noqa: E501 + :rtype: InlineResponse202Task + """ + return util.deserialize_model(dikt, cls) + + @property + def href(self): + """Gets the href of this InlineResponse202Task. + + the url where to check the operation status # noqa: E501 + + :return: The href of this InlineResponse202Task. + :rtype: str + """ + return self._href + + @href.setter + def href(self, href): + """Sets the href of this InlineResponse202Task. + + the url where to check the operation status # noqa: E501 + + :param href: The href of this InlineResponse202Task. + :type href: str + """ + + self._href = href + + @property + def name(self): + """Gets the name of this InlineResponse202Task. + + + :return: The name of this InlineResponse202Task. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this InlineResponse202Task. + + + :param name: The name of this InlineResponse202Task. + :type name: str + """ + + self._name = name diff --git a/applications/samples/src/api_samples/models/valid.py b/applications/samples/src/api_samples/models/valid.py new file mode 100644 index 00000000..eae6c5f4 --- /dev/null +++ b/applications/samples/src/api_samples/models/valid.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from api_samples.models.base_model_ import Model +from api_samples import util + + +class Valid(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, response=None): # noqa: E501 + """Valid - a model defined in OpenAPI + + :param response: The response of this Valid. # noqa: E501 + :type response: str + """ + self.openapi_types = { + 'response': str + } + + self.attribute_map = { + 'response': 'response' + } + + self._response = response + + @classmethod + def from_dict(cls, dikt) -> 'Valid': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The Valid of this Valid. # noqa: E501 + :rtype: Valid + """ + return util.deserialize_model(dikt, cls) + + @property + def response(self): + """Gets the response of this Valid. + + + :return: The response of this Valid. + :rtype: str + """ + return self._response + + @response.setter + def response(self, response): + """Sets the response of this Valid. + + + :param response: The response of this Valid. + :type response: str + """ + + self._response = response diff --git a/applications/samples/src/api_samples/openapi/openapi.yaml b/applications/samples/src/api_samples/openapi/openapi.yaml new file mode 100644 index 00000000..126520be --- /dev/null +++ b/applications/samples/src/api_samples/openapi/openapi.yaml @@ -0,0 +1,134 @@ +openapi: 3.0.0 +info: + contact: + email: cloudharness@metacell.us + description: CloudHarness Sample api + license: + name: UNLICENSED + title: CloudHarness Sample API + version: 0.1.0 +servers: +- url: https://samples.cloudharness.metacell.us/api +tags: +- name: auth +- name: workflows +paths: + /operation_async: + get: + operationId: submit_async + responses: + "202": + content: + application/json: + schema: + $ref: '#/components/schemas/inline_response_202' + description: Submitted operation. See also https://restfulapi.net/http-status-202-accepted/ + summary: Send an asynchronous operation + tags: + - workflows + x-openapi-router-controller: api_samples.controllers.workflows_controller + /operation_sync: + get: + operationId: submit_sync + responses: + "200": + content: + application/json: + schema: + type: string + description: Operation result + summary: Send a synchronous operation + tags: + - workflows + x-openapi-router-controller: api_samples.controllers.workflows_controller + /operation_sync_results: + get: + operationId: submit_sync_with_results + parameters: + - description: first number to sum + example: 10 + explode: true + in: query + name: a + required: false + schema: + type: number + style: form + - description: second number to sum + example: 10 + explode: true + in: query + name: b + required: false + schema: + type: number + style: form + responses: + "200": + content: + application/json: + schema: + type: string + description: Operation result + summary: Send a synchronous operation and get results using the event queue. + Just a sum, but in the cloud + tags: + - workflows + x-openapi-router-controller: api_samples.controllers.workflows_controller + /valid: + get: + description: | + Check if the token is valid + operationId: valid_token + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/Valid' + type: array + description: Check if token is valid + "400": + description: bad input parameter + security: + - bearerAuth: [] + summary: Check if the token is valid. Get a token by logging into the base url + tags: + - auth + x-openapi-router-controller: api_samples.controllers.auth_controller +components: + schemas: + Valid: + example: + response: response + properties: + response: + type: string + type: object + inline_response_202_task: + example: + name: my-op + href: http://workflows.cloudharness.metacell.us/api/operation/my-op + properties: + href: + description: the url where to check the operation status + example: http://workflows.cloudharness.metacell.us/api/operation/my-op + type: string + name: + example: my-op + type: string + inline_response_202: + example: + task: + name: my-op + href: http://workflows.cloudharness.metacell.us/api/operation/my-op + properties: + task: + $ref: '#/components/schemas/inline_response_202_task' + securitySchemes: + bearerAuth: + bearerFormat: JWT + scheme: bearer + type: http + x-bearerInfoFunc: cloudharness.auth.decode_token diff --git a/applications/samples/src/api_samples/service/__init__.py b/applications/samples/src/api_samples/service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/samples/src/api_samples/service/security_service.py b/applications/samples/src/api_samples/service/security_service.py new file mode 100644 index 00000000..6bfd5610 --- /dev/null +++ b/applications/samples/src/api_samples/service/security_service.py @@ -0,0 +1,16 @@ +from cloudharness.auth import decode_token + +def decode_doken(token): + """ + Check and retrieve authentication information from custom bearer token. + Returned value will be passed in 'token_info' parameter of your operation function, if there is one. + 'sub' or 'uid' will be set in 'user' parameter of your operation function, if there is one. + + :param token Token provided by Authorization header + :type token: str + :return: Decoded token information or None if token is invalid + :rtype: dict | None + """ + return decode_token(token) + + diff --git a/applications/samples/src/api_samples/test/__init__.py b/applications/samples/src/api_samples/test/__init__.py new file mode 100644 index 00000000..a2dbaf9d --- /dev/null +++ b/applications/samples/src/api_samples/test/__init__.py @@ -0,0 +1,16 @@ +import logging + +import connexion +from flask_testing import TestCase + +from api_samples.encoder import JSONEncoder + + +class BaseTestCase(TestCase): + + def create_app(self): + logging.getLogger('connexion.operation').setLevel('ERROR') + app = connexion.App(__name__, specification_dir='../openapi/') + app.app.json_encoder = JSONEncoder + app.add_api('openapi.yaml', pythonic_params=True) + return app.app diff --git a/applications/samples/src/api_samples/test/test_auth_controller.py b/applications/samples/src/api_samples/test/test_auth_controller.py new file mode 100644 index 00000000..d68f15c1 --- /dev/null +++ b/applications/samples/src/api_samples/test/test_auth_controller.py @@ -0,0 +1,29 @@ +# coding: utf-8 + +from __future__ import absolute_import + +from flask import json +from six import BytesIO + +from api_samples.models.valid import Valid # noqa: E501 +from api_samples.test import BaseTestCase + + +class TestAuthController(BaseTestCase): + """AuthController integration test stubs""" + + def test_valid_token(self): + """Test case for valid_token + + Check if the token is valid + """ + response = self.client.open( + '/0.1.0/valid', + method='GET') + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + import unittest + unittest.main() diff --git a/applications/samples/src/api_samples/test/test_default_controller.py b/applications/samples/src/api_samples/test/test_default_controller.py new file mode 100644 index 00000000..47e7f3a6 --- /dev/null +++ b/applications/samples/src/api_samples/test/test_default_controller.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +from __future__ import absolute_import + +from flask import json +from six import BytesIO + +from api_samples.models.valid import Valid # noqa: E501 +from api_samples.test import BaseTestCase + + +class TestDefaultController(BaseTestCase): + """DefaultController integration test stubs""" + + def test_operation_sync_post(self): + """Test case for operation_sync_post + + Send a syncronous operation + """ + response = self.client.open( + '0.1.0/operation-sync', + method='POST') + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_valid_token(self): + """Test case for valid_token + + Check if the token is valid + """ + response = self.client.open( + '0.1.0/valid', + method='GET') + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + import unittest + unittest.main() diff --git a/applications/samples/src/api_samples/test/test_workflows_controller.py b/applications/samples/src/api_samples/test/test_workflows_controller.py new file mode 100644 index 00000000..4bb1998e --- /dev/null +++ b/applications/samples/src/api_samples/test/test_workflows_controller.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +from __future__ import absolute_import + +from flask import json +from six import BytesIO + +from api_samples.models.inline_response202 import InlineResponse202 # noqa: E501 +from api_samples.test import BaseTestCase + + +class TestWorkflowsController(BaseTestCase): + """WorkflowsController integration test stubs""" + + def test_operation_submit_async(self): + """Test case for operation_submit_async + + Send an asyncronous operation + """ + response = self.client.open( + '/0.1.0/operation_async', + method='POST') + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_operation_submit_sync(self): + """Test case for operation_submit_sync + + Send a syncronous operation + """ + response = self.client.open( + '/0.1.0/operation_sync', + method='POST') + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + import unittest + unittest.main() diff --git a/applications/samples/src/api_samples/typing_utils.py b/applications/samples/src/api_samples/typing_utils.py new file mode 100644 index 00000000..0563f81f --- /dev/null +++ b/applications/samples/src/api_samples/typing_utils.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +import sys + +if sys.version_info < (3, 7): + import typing + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return type(klass) == typing.GenericMeta + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__extra__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__extra__ == list + +else: + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return hasattr(klass, '__origin__') + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__origin__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__origin__ == list diff --git a/applications/samples/src/api_samples/util.py b/applications/samples/src/api_samples/util.py new file mode 100644 index 00000000..6e8baa5f --- /dev/null +++ b/applications/samples/src/api_samples/util.py @@ -0,0 +1,142 @@ +import datetime + +import six +import typing +from api_samples import typing_utils + + +def _deserialize(data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if klass in six.integer_types or klass in (float, str, bool, bytearray): + return _deserialize_primitive(data, klass) + elif klass == object: + return _deserialize_object(data) + elif klass == datetime.date: + return deserialize_date(data) + elif klass == datetime.datetime: + return deserialize_datetime(data) + elif typing_utils.is_generic(klass): + if typing_utils.is_list(klass): + return _deserialize_list(data, klass.__args__[0]) + if typing_utils.is_dict(klass): + return _deserialize_dict(data, klass.__args__[1]) + else: + return deserialize_model(data, klass) + + +def _deserialize_primitive(data, klass): + """Deserializes to primitive type. + + :param data: data to deserialize. + :param klass: class literal. + + :return: int, long, float, str, bool. + :rtype: int | long | float | str | bool + """ + try: + value = klass(data) + except UnicodeEncodeError: + value = six.u(data) + except TypeError: + value = data + return value + + +def _deserialize_object(value): + """Return an original value. + + :return: object. + """ + return value + + +def deserialize_date(string): + """Deserializes string to date. + + :param string: str. + :type string: str + :return: date. + :rtype: date + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + + +def deserialize_datetime(string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :type string: str + :return: datetime. + :rtype: datetime + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + + +def deserialize_model(data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :type data: dict | list + :param klass: class literal. + :return: model object. + """ + instance = klass() + + if not instance.openapi_types: + return data + + for attr, attr_type in six.iteritems(instance.openapi_types): + if data is not None \ + and instance.attribute_map[attr] in data \ + and isinstance(data, (list, dict)): + value = data[instance.attribute_map[attr]] + setattr(instance, attr, _deserialize(value, attr_type)) + + return instance + + +def _deserialize_list(data, boxed_type): + """Deserializes a list and its elements. + + :param data: list to deserialize. + :type data: list + :param boxed_type: class literal. + + :return: deserialized list. + :rtype: list + """ + return [_deserialize(sub_data, boxed_type) + for sub_data in data] + + +def _deserialize_dict(data, boxed_type): + """Deserializes a dict and its elements. + + :param data: dict to deserialize. + :type data: dict + :param boxed_type: class literal. + + :return: deserialized dict. + :rtype: dict + """ + return {k: _deserialize(v, boxed_type) + for k, v in six.iteritems(data)} diff --git a/applications/samples/src/git_push.sh b/applications/samples/src/git_push.sh new file mode 100644 index 00000000..ced3be2b --- /dev/null +++ b/applications/samples/src/git_push.sh @@ -0,0 +1,58 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/applications/samples/src/requirements.txt b/applications/samples/src/requirements.txt new file mode 100644 index 00000000..4fe6c15c --- /dev/null +++ b/applications/samples/src/requirements.txt @@ -0,0 +1,8 @@ +connexion >= 2.6.0; python_version>="3.6" +connexion >= 2.3.0; python_version=="3.5" +connexion >= 2.3.0; python_version=="3.4" +connexion == 2.4.0; python_version<="2.7" +swagger-ui-bundle >= 0.0.2 +python_dateutil >= 2.6.0 +setuptools >= 21.0.0 +pyjwt>=1.7.1 \ No newline at end of file diff --git a/applications/samples/src/setup.py b/applications/samples/src/setup.py new file mode 100644 index 00000000..3e72420c --- /dev/null +++ b/applications/samples/src/setup.py @@ -0,0 +1,41 @@ +# coding: utf-8 + +import sys +from setuptools import setup, find_packages + +NAME = "api_samples" +VERSION = "1.0.0" + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = [ + "connexion>=2.0.2", + "swagger-ui-bundle>=0.0.2", + "python_dateutil>=2.6.0", + "pyjwt>=1.7.1", + "cloudharness" +] + +setup( + name=NAME, + version=VERSION, + description="CloudHarness Sample API", + author_email="cloudharness@metacell.us", + url="", + keywords=["OpenAPI", "CloudHarness Sample API"], + install_requires=REQUIRES, + packages=find_packages(), + package_data={'': ['openapi/openapi.yaml']}, + include_package_data=True, + entry_points={ + 'console_scripts': ['api_samples=api_samples.__main__:main']}, + long_description="""\ + CloudHarness Sample api + """ +) + diff --git a/applications/samples/src/test-requirements.txt b/applications/samples/src/test-requirements.txt new file mode 100644 index 00000000..a2626d87 --- /dev/null +++ b/applications/samples/src/test-requirements.txt @@ -0,0 +1,4 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 +flask_testing==0.6.1 \ No newline at end of file diff --git a/applications/samples/src/tox.ini b/applications/samples/src/tox.ini new file mode 100644 index 00000000..8f380ee0 --- /dev/null +++ b/applications/samples/src/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=api_samples \ No newline at end of file diff --git a/applications/samples/src/www/index.html b/applications/samples/src/www/index.html new file mode 100644 index 00000000..75b5ddb0 --- /dev/null +++ b/applications/samples/src/www/index.html @@ -0,0 +1 @@ +Index of cloudharness sample application \ No newline at end of file diff --git a/applications/samples/tasks/sum/Dockerfile b/applications/samples/tasks/sum/Dockerfile new file mode 100644 index 00000000..2c344f77 --- /dev/null +++ b/applications/samples/tasks/sum/Dockerfile @@ -0,0 +1,10 @@ +ARG REGISTRY=r.cfcr.io/tarelli/ +ARG TAG=latest +FROM ${REGISTRY}cloudharness-base:${TAG} + +ADD . / + +ENV a 0 +ENV b 0 + +CMD python main.py $a $b \ No newline at end of file diff --git a/applications/samples/tasks/sum/main.py b/applications/samples/tasks/sum/main.py new file mode 100644 index 00000000..578c4952 --- /dev/null +++ b/applications/samples/tasks/sum/main.py @@ -0,0 +1,18 @@ +import sys +import os + +assert len(sys.argv) > 2, 'Arguments not specified. Cannot proceed' + +from cloudharness.workflows.utils import get_shared_directory + +a = float(sys.argv[1]) +b = float(sys.argv[2]) + +for env in os.environ: + print(f"{env}:{os.environ[env]}") + +file_name = os.path.join(get_shared_directory(), "result") +print("File name is", file_name) + +with open(file_name, "w") as f: + f.write(str(a+b)) diff --git a/applications/workflows/README.md b/applications/workflows/README.md new file mode 100644 index 00000000..3c73e864 --- /dev/null +++ b/applications/workflows/README.md @@ -0,0 +1,3 @@ +# MetaCell Reference Neuroimaging + + diff --git a/applications/workflows/api/config.json b/applications/workflows/api/config.json new file mode 100644 index 00000000..d22c8cd4 --- /dev/null +++ b/applications/workflows/api/config.json @@ -0,0 +1,3 @@ +{ + "packageName": "workflows_api" +} diff --git a/applications/workflows/api/workflows.yaml b/applications/workflows/api/workflows.yaml new file mode 100644 index 00000000..7290d295 --- /dev/null +++ b/applications/workflows/api/workflows.yaml @@ -0,0 +1,174 @@ +openapi: 3.0.0 + +servers: + - description: Metacell host + url: https://workflows.cloudharness.metacell.us + +info: + description: Workflows API + version: "0.1.0" + title: Workflows API + contact: + email: cloudharness@metacell.us + license: + name: UNLICENSED + +tags: + - name: Create and Access + description: standard creation, listing and retrieve + +paths: + + /operations: + get: + summary: lists operations + operationId: listOperations + tags: + - Create and Access + description: | + see all operations for the user + parameters: + - in: query + name: status + description: filter by status + schema: + $ref: '#/components/schemas/OperationStatus' + example: 'QUEUED' + - in: query + name: previous_search_token + description: continue previous search (pagination chunks) + schema: + type: string + - in: query + name: limit + description: maximum number of records to return per page + schema: + type: integer + minimum: 1 + maximum: 50 + default: 10 + responses: + 200: + description: search results matching criteria + content: + application/json: + schema: + $ref: '#/components/schemas/OperationSearchResult' + 400: + description: bad input parameter + + '/operations/{name}': + get: + summary: get operation by name + operationId: getOperation + tags: + - Create and Access + description: | + retrieves an operation by its name + parameters: + - in: path + name: name + required: true + schema: + type: string + responses: + 200: + description: search results matching criteria + content: + application/json: + schema: + type: object + items: + $ref: '#/components/schemas/Operation' + 404: + description: not found + delete: + summary: deletes operation by name + operationId: deleteOperation + tags: + - Create and Access + description: | + delete operation by its name + parameters: + - in: path + name: name + required: true + schema: + type: string + responses: + 200: + description: delete OK + 404: + description: not found + '/operations/{name}/logs': + get: + summary: get operation by name + operationId: logOperation + tags: + - Create and Access + description: | + retrieves an operation log by its name + parameters: + - in: path + name: name + required: true + schema: + type: string + responses: + 200: + description: search results matching criteria + content: + text/plain: + schema: + type: string + example: "Hello world" + 404: + description: not found +components: + schemas: + OperationSearchResult: + description: a list of operations with meta data about the result + properties: + meta: + $ref: '#/components/schemas/SearchResultData' + items: + type: array + items: + $ref: '#/components/schemas/Operation' + SearchResultData: + description: describes a search + properties: + continueToken: + description: token to use for pagination + type: string + Operation: + description: represents the status of a distributed API call + properties: + message: + type: string + description: usually set when an error occurred + example: "" + name: + type: string + description: operation name + createTime: + type: string + format: date-time + readOnly: true + example: "2016-08-29T09:12:33.001Z" + status: + $ref: '#/components/schemas/OperationStatus' + workflow: + type: string + description: low level representation as an Argo json + + OperationStatus: + type: string + default: Pending + enum: + - Pending + - Running + - Error + - Succeeded + - Skipped + - Failed \ No newline at end of file diff --git a/applications/workflows/deploy/values.yaml b/applications/workflows/deploy/values.yaml new file mode 100644 index 00000000..232fe98f --- /dev/null +++ b/applications/workflows/deploy/values.yaml @@ -0,0 +1,6 @@ +harvest: false +port: 8080 +subdomain: workflows +autoservice: true +autodeploy: true +serviceaccount: argo-workflows \ No newline at end of file diff --git a/applications/workflows/package-lock.json b/applications/workflows/package-lock.json new file mode 100644 index 00000000..48e341a0 --- /dev/null +++ b/applications/workflows/package-lock.json @@ -0,0 +1,3 @@ +{ + "lockfileVersion": 1 +} diff --git a/applications/workflows/server/.dockerignore b/applications/workflows/server/.dockerignore new file mode 100644 index 00000000..f9619601 --- /dev/null +++ b/applications/workflows/server/.dockerignore @@ -0,0 +1,72 @@ +.travis.yaml +.openapi-generator-ignore +README.md +tox.ini +git_push.sh +test-requirements.txt +setup.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.python-version + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/workflows/server/.gitignore b/applications/workflows/server/.gitignore new file mode 100644 index 00000000..43995bd4 --- /dev/null +++ b/applications/workflows/server/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/workflows/server/.openapi-generator-ignore b/applications/workflows/server/.openapi-generator-ignore new file mode 100644 index 00000000..7484ee59 --- /dev/null +++ b/applications/workflows/server/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/applications/workflows/server/.travis.yml b/applications/workflows/server/.travis.yml new file mode 100644 index 00000000..ad71ee5c --- /dev/null +++ b/applications/workflows/server/.travis.yml @@ -0,0 +1,14 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.2" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: "pip install -r requirements.txt" +# command to run tests +script: nosetests diff --git a/applications/workflows/server/Dockerfile b/applications/workflows/server/Dockerfile new file mode 100644 index 00000000..8c6ebffd --- /dev/null +++ b/applications/workflows/server/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3-alpine + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +COPY requirements.txt /usr/src/app/ + +RUN pip3 install --no-cache-dir -r requirements.txt + +COPY . /usr/src/app + +EXPOSE 8080 + +ENTRYPOINT ["python3"] + +CMD ["-m", "workflows_api"] \ No newline at end of file diff --git a/applications/workflows/server/README.md b/applications/workflows/server/README.md new file mode 100644 index 00000000..0f31414a --- /dev/null +++ b/applications/workflows/server/README.md @@ -0,0 +1,49 @@ +# OpenAPI generated server + +## Overview +This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the +[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This +is an example of building a OpenAPI-enabled Flask server. + +This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask. + +## Requirements +Python 3.5.2+ + +## Usage +To run the server, please execute the following from the root directory: + +``` +pip3 install -r requirements.txt +python3 -m workflows_api +``` + +and open your browser to here: + +``` +http://localhost:8080/ui/ +``` + +Your OpenAPI definition lives here: + +``` +http://localhost:8080/openapi.json +``` + +To launch the integration tests, use tox: +``` +sudo pip install tox +tox +``` + +## Running with Docker + +To run the server on a Docker container, please execute the following from the root directory: + +```bash +# building the image +docker build -t workflows_api . + +# starting up a container +docker run -p 8080:8080 workflows_api +``` \ No newline at end of file diff --git a/applications/workflows/server/git_push.sh b/applications/workflows/server/git_push.sh new file mode 100644 index 00000000..ced3be2b --- /dev/null +++ b/applications/workflows/server/git_push.sh @@ -0,0 +1,58 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/applications/workflows/server/requirements.txt b/applications/workflows/server/requirements.txt new file mode 100644 index 00000000..2639eedf --- /dev/null +++ b/applications/workflows/server/requirements.txt @@ -0,0 +1,7 @@ +connexion >= 2.6.0; python_version>="3.6" +connexion >= 2.3.0; python_version=="3.5" +connexion >= 2.3.0; python_version=="3.4" +connexion == 2.4.0; python_version<="2.7" +swagger-ui-bundle >= 0.0.2 +python_dateutil >= 2.6.0 +setuptools >= 21.0.0 diff --git a/applications/workflows/server/setup.py b/applications/workflows/server/setup.py new file mode 100644 index 00000000..9bd6a02e --- /dev/null +++ b/applications/workflows/server/setup.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +import sys +from setuptools import setup, find_packages + +NAME = "workflows_api" +VERSION = "1.0.0" + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = [ + "connexion>=2.0.2", + "swagger-ui-bundle>=0.0.2", + "python_dateutil>=2.6.0" +] + +setup( + name=NAME, + version=VERSION, + description="Workflows API", + author_email="cloudharness@metacell.us", + url="", + keywords=["OpenAPI", "Workflows API"], + install_requires=REQUIRES, + packages=find_packages(), + package_data={'': ['openapi/openapi.yaml']}, + include_package_data=True, + entry_points={ + 'console_scripts': ['workflows_api=workflows_api.__main__:main']}, + long_description="""\ + Workflows API + """ +) + diff --git a/applications/workflows/server/test-requirements.txt b/applications/workflows/server/test-requirements.txt new file mode 100644 index 00000000..a2626d87 --- /dev/null +++ b/applications/workflows/server/test-requirements.txt @@ -0,0 +1,4 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 +flask_testing==0.6.1 \ No newline at end of file diff --git a/applications/workflows/server/tox.ini b/applications/workflows/server/tox.ini new file mode 100644 index 00000000..e6dce35b --- /dev/null +++ b/applications/workflows/server/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=workflows_api \ No newline at end of file diff --git a/applications/workflows/server/workflows_api/__init__.py b/applications/workflows/server/workflows_api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/workflows/server/workflows_api/__main__.py b/applications/workflows/server/workflows_api/__main__.py new file mode 100644 index 00000000..40782233 --- /dev/null +++ b/applications/workflows/server/workflows_api/__main__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import connexion + +from workflows_api import encoder + + +def main(): + app = connexion.App(__name__, specification_dir='./openapi/') + app.app.json_encoder = encoder.JSONEncoder + app.add_api('openapi.yaml', + arguments={'title': 'Workflows API'}, + pythonic_params=True) + app.run(port=8080) + + +if __name__ == '__main__': + main() diff --git a/applications/workflows/server/workflows_api/controllers/__init__.py b/applications/workflows/server/workflows_api/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/workflows/server/workflows_api/controllers/create_and_access_controller.py b/applications/workflows/server/workflows_api/controllers/create_and_access_controller.py new file mode 100644 index 00000000..f7226c97 --- /dev/null +++ b/applications/workflows/server/workflows_api/controllers/create_and_access_controller.py @@ -0,0 +1,65 @@ +import connexion +import six + +from workflows_api.models.operation import Operation # noqa: E501 +from workflows_api.models.operation_search_result import OperationSearchResult # noqa: E501 +from workflows_api.models.operation_status import OperationStatus # noqa: E501 +from workflows_api import util + + +def delete_operation(name): # noqa: E501 + """deletes operation by name + + delete operation by its name # noqa: E501 + + :param name: + :type name: str + + :rtype: None + """ + return 'do some magic!' + + +def get_operation(name): # noqa: E501 + """get operation by name + + retrieves an operation by its name # noqa: E501 + + :param name: + :type name: str + + :rtype: List[Operation] + """ + return 'do some magic!' + + +def list_operations(status=None, previous_search_token=None, limit=None): # noqa: E501 + """lists operations + + see all operations for the user # noqa: E501 + + :param status: filter by status + :type status: dict | bytes + :param previous_search_token: continue previous search (pagination chunks) + :type previous_search_token: str + :param limit: maximum number of records to return per page + :type limit: int + + :rtype: OperationSearchResult + """ + if connexion.request.is_json: + status = OperationStatus.from_dict(connexion.request.get_json()) # noqa: E501 + return 'do some magic!' + + +def log_operation(name): # noqa: E501 + """get operation by name + + retrieves an operation log by its name # noqa: E501 + + :param name: + :type name: str + + :rtype: str + """ + return 'do some magic!' diff --git a/applications/workflows/server/workflows_api/controllers/security_controller_.py b/applications/workflows/server/workflows_api/controllers/security_controller_.py new file mode 100644 index 00000000..ecac4055 --- /dev/null +++ b/applications/workflows/server/workflows_api/controllers/security_controller_.py @@ -0,0 +1,3 @@ +from typing import List + + diff --git a/applications/workflows/server/workflows_api/encoder.py b/applications/workflows/server/workflows_api/encoder.py new file mode 100644 index 00000000..ffc6e492 --- /dev/null +++ b/applications/workflows/server/workflows_api/encoder.py @@ -0,0 +1,20 @@ +from connexion.apps.flask_app import FlaskJSONEncoder +import six + +from workflows_api.models.base_model_ import Model + + +class JSONEncoder(FlaskJSONEncoder): + include_nulls = False + + def default(self, o): + if isinstance(o, Model): + dikt = {} + for attr, _ in six.iteritems(o.openapi_types): + value = getattr(o, attr) + if value is None and not self.include_nulls: + continue + attr = o.attribute_map[attr] + dikt[attr] = value + return dikt + return FlaskJSONEncoder.default(self, o) diff --git a/applications/workflows/server/workflows_api/models/__init__.py b/applications/workflows/server/workflows_api/models/__init__.py new file mode 100644 index 00000000..ba414fcd --- /dev/null +++ b/applications/workflows/server/workflows_api/models/__init__.py @@ -0,0 +1,9 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import +# import models into model package +from workflows_api.models.operation import Operation +from workflows_api.models.operation_search_result import OperationSearchResult +from workflows_api.models.operation_status import OperationStatus +from workflows_api.models.search_result_data import SearchResultData diff --git a/applications/workflows/server/workflows_api/models/base_model_.py b/applications/workflows/server/workflows_api/models/base_model_.py new file mode 100644 index 00000000..d532ae7b --- /dev/null +++ b/applications/workflows/server/workflows_api/models/base_model_.py @@ -0,0 +1,69 @@ +import pprint + +import six +import typing + +from workflows_api import util + +T = typing.TypeVar('T') + + +class Model(object): + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/applications/workflows/server/workflows_api/models/operation.py b/applications/workflows/server/workflows_api/models/operation.py new file mode 100644 index 00000000..c4d7ab75 --- /dev/null +++ b/applications/workflows/server/workflows_api/models/operation.py @@ -0,0 +1,176 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api.models.operation_status import OperationStatus +from workflows_api import util + +from workflows_api.models.operation_status import OperationStatus # noqa: E501 + +class Operation(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, message=None, name=None, create_time=None, status=None, workflow=None): # noqa: E501 + """Operation - a model defined in OpenAPI + + :param message: The message of this Operation. # noqa: E501 + :type message: str + :param name: The name of this Operation. # noqa: E501 + :type name: str + :param create_time: The create_time of this Operation. # noqa: E501 + :type create_time: datetime + :param status: The status of this Operation. # noqa: E501 + :type status: OperationStatus + :param workflow: The workflow of this Operation. # noqa: E501 + :type workflow: str + """ + self.openapi_types = { + 'message': str, + 'name': str, + 'create_time': datetime, + 'status': OperationStatus, + 'workflow': str + } + + self.attribute_map = { + 'message': 'message', + 'name': 'name', + 'create_time': 'createTime', + 'status': 'status', + 'workflow': 'workflow' + } + + self._message = message + self._name = name + self._create_time = create_time + self._status = status + self._workflow = workflow + + @classmethod + def from_dict(cls, dikt) -> 'Operation': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The Operation of this Operation. # noqa: E501 + :rtype: Operation + """ + return util.deserialize_model(dikt, cls) + + @property + def message(self): + """Gets the message of this Operation. + + usually set when an error occurred # noqa: E501 + + :return: The message of this Operation. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Operation. + + usually set when an error occurred # noqa: E501 + + :param message: The message of this Operation. + :type message: str + """ + + self._message = message + + @property + def name(self): + """Gets the name of this Operation. + + operation name # noqa: E501 + + :return: The name of this Operation. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this Operation. + + operation name # noqa: E501 + + :param name: The name of this Operation. + :type name: str + """ + + self._name = name + + @property + def create_time(self): + """Gets the create_time of this Operation. + + + :return: The create_time of this Operation. + :rtype: datetime + """ + return self._create_time + + @create_time.setter + def create_time(self, create_time): + """Sets the create_time of this Operation. + + + :param create_time: The create_time of this Operation. + :type create_time: datetime + """ + + self._create_time = create_time + + @property + def status(self): + """Gets the status of this Operation. + + + :return: The status of this Operation. + :rtype: OperationStatus + """ + return self._status + + @status.setter + def status(self, status): + """Sets the status of this Operation. + + + :param status: The status of this Operation. + :type status: OperationStatus + """ + + self._status = status + + @property + def workflow(self): + """Gets the workflow of this Operation. + + low level representation as an Argo json # noqa: E501 + + :return: The workflow of this Operation. + :rtype: str + """ + return self._workflow + + @workflow.setter + def workflow(self, workflow): + """Sets the workflow of this Operation. + + low level representation as an Argo json # noqa: E501 + + :param workflow: The workflow of this Operation. + :type workflow: str + """ + + self._workflow = workflow diff --git a/applications/workflows/server/workflows_api/models/operation_search_result.py b/applications/workflows/server/workflows_api/models/operation_search_result.py new file mode 100644 index 00000000..d51b1b9b --- /dev/null +++ b/applications/workflows/server/workflows_api/models/operation_search_result.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api.models.operation import Operation +from workflows_api.models.search_result_data import SearchResultData +from workflows_api import util + +from workflows_api.models.operation import Operation # noqa: E501 +from workflows_api.models.search_result_data import SearchResultData # noqa: E501 + +class OperationSearchResult(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, meta=None, items=None): # noqa: E501 + """OperationSearchResult - a model defined in OpenAPI + + :param meta: The meta of this OperationSearchResult. # noqa: E501 + :type meta: SearchResultData + :param items: The items of this OperationSearchResult. # noqa: E501 + :type items: List[Operation] + """ + self.openapi_types = { + 'meta': SearchResultData, + 'items': List[Operation] + } + + self.attribute_map = { + 'meta': 'meta', + 'items': 'items' + } + + self._meta = meta + self._items = items + + @classmethod + def from_dict(cls, dikt) -> 'OperationSearchResult': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OperationSearchResult of this OperationSearchResult. # noqa: E501 + :rtype: OperationSearchResult + """ + return util.deserialize_model(dikt, cls) + + @property + def meta(self): + """Gets the meta of this OperationSearchResult. + + + :return: The meta of this OperationSearchResult. + :rtype: SearchResultData + """ + return self._meta + + @meta.setter + def meta(self, meta): + """Sets the meta of this OperationSearchResult. + + + :param meta: The meta of this OperationSearchResult. + :type meta: SearchResultData + """ + + self._meta = meta + + @property + def items(self): + """Gets the items of this OperationSearchResult. + + + :return: The items of this OperationSearchResult. + :rtype: List[Operation] + """ + return self._items + + @items.setter + def items(self, items): + """Sets the items of this OperationSearchResult. + + + :param items: The items of this OperationSearchResult. + :type items: List[Operation] + """ + + self._items = items diff --git a/applications/workflows/server/workflows_api/models/operation_status.py b/applications/workflows/server/workflows_api/models/operation_status.py new file mode 100644 index 00000000..e4492c7d --- /dev/null +++ b/applications/workflows/server/workflows_api/models/operation_status.py @@ -0,0 +1,46 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util + + +class OperationStatus(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + PENDING = "Pending" + RUNNING = "Running" + ERROR = "Error" + SUCCEEDED = "Succeeded" + SKIPPED = "Skipped" + FAILED = "Failed" + def __init__(self): # noqa: E501 + """OperationStatus - a model defined in OpenAPI + + """ + self.openapi_types = { + } + + self.attribute_map = { + } + + @classmethod + def from_dict(cls, dikt) -> 'OperationStatus': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OperationStatus of this OperationStatus. # noqa: E501 + :rtype: OperationStatus + """ + return util.deserialize_model(dikt, cls) diff --git a/applications/workflows/server/workflows_api/models/search_result_data.py b/applications/workflows/server/workflows_api/models/search_result_data.py new file mode 100644 index 00000000..e50fafec --- /dev/null +++ b/applications/workflows/server/workflows_api/models/search_result_data.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util + + +class SearchResultData(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, continue_token=None): # noqa: E501 + """SearchResultData - a model defined in OpenAPI + + :param continue_token: The continue_token of this SearchResultData. # noqa: E501 + :type continue_token: str + """ + self.openapi_types = { + 'continue_token': str + } + + self.attribute_map = { + 'continue_token': 'continueToken' + } + + self._continue_token = continue_token + + @classmethod + def from_dict(cls, dikt) -> 'SearchResultData': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The SearchResultData of this SearchResultData. # noqa: E501 + :rtype: SearchResultData + """ + return util.deserialize_model(dikt, cls) + + @property + def continue_token(self): + """Gets the continue_token of this SearchResultData. + + token to use for pagination # noqa: E501 + + :return: The continue_token of this SearchResultData. + :rtype: str + """ + return self._continue_token + + @continue_token.setter + def continue_token(self, continue_token): + """Sets the continue_token of this SearchResultData. + + token to use for pagination # noqa: E501 + + :param continue_token: The continue_token of this SearchResultData. + :type continue_token: str + """ + + self._continue_token = continue_token diff --git a/applications/workflows/server/workflows_api/openapi/openapi.yaml b/applications/workflows/server/workflows_api/openapi/openapi.yaml new file mode 100644 index 00000000..05556983 --- /dev/null +++ b/applications/workflows/server/workflows_api/openapi/openapi.yaml @@ -0,0 +1,204 @@ +openapi: 3.0.0 +info: + contact: + email: cloudharness@metacell.us + description: Workflows API + license: + name: UNLICENSED + title: Workflows API + version: 0.1.0 +servers: +- description: Metacell host + url: https://workflows.cloudharness.metacell.us +tags: +- description: standard creation, listing and retrieve + name: Create and Access +paths: + /operations: + get: + description: | + see all operations for the user + operationId: list_operations + parameters: + - description: filter by status + example: QUEUED + explode: true + in: query + name: status + required: false + schema: + $ref: '#/components/schemas/OperationStatus' + style: form + - description: continue previous search (pagination chunks) + explode: true + in: query + name: previous_search_token + required: false + schema: + type: string + style: form + - description: maximum number of records to return per page + explode: true + in: query + name: limit + required: false + schema: + default: 10 + maximum: 50 + minimum: 1 + type: integer + style: form + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/OperationSearchResult' + description: search results matching criteria + "400": + description: bad input parameter + summary: lists operations + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller + /operations/{name}: + delete: + description: | + delete operation by its name + operationId: delete_operation + parameters: + - explode: false + in: path + name: name + required: true + schema: + type: string + style: simple + responses: + "200": + description: delete OK + "404": + description: not found + summary: deletes operation by name + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller + get: + description: | + retrieves an operation by its name + operationId: get_operation + parameters: + - explode: false + in: path + name: name + required: true + schema: + type: string + style: simple + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/Operation' + type: array + description: search results matching criteria + "404": + description: not found + summary: get operation by name + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller + /operations/{name}/logs: + get: + description: | + retrieves an operation log by its name + operationId: log_operation + parameters: + - explode: false + in: path + name: name + required: true + schema: + type: string + style: simple + responses: + "200": + content: + text/plain: + schema: + example: Hello world + type: string + description: search results matching criteria + "404": + description: not found + summary: get operation by name + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller +components: + schemas: + OperationSearchResult: + description: a list of operations with meta data about the result + example: + meta: + continueToken: continueToken + items: + - workflow: workflow + createTime: 2016-08-29T09:12:33.001Z + name: name + message: message + - workflow: workflow + createTime: 2016-08-29T09:12:33.001Z + name: name + message: message + properties: + meta: + $ref: '#/components/schemas/SearchResultData' + items: + items: + $ref: '#/components/schemas/Operation' + type: array + SearchResultData: + description: describes a search + example: + continueToken: continueToken + properties: + continueToken: + description: token to use for pagination + type: string + Operation: + description: represents the status of a distributed API call + example: + workflow: workflow + createTime: 2016-08-29T09:12:33.001Z + name: name + message: message + properties: + message: + description: usually set when an error occurred + type: string + name: + description: operation name + type: string + createTime: + example: 2016-08-29T09:12:33.001Z + format: date-time + readOnly: true + type: string + status: + $ref: '#/components/schemas/OperationStatus' + workflow: + description: low level representation as an Argo json + type: string + OperationStatus: + default: Pending + enum: + - Pending + - Running + - Error + - Succeeded + - Skipped + - Failed + type: string diff --git a/applications/workflows/server/workflows_api/test/__init__.py b/applications/workflows/server/workflows_api/test/__init__.py new file mode 100644 index 00000000..1f7852ce --- /dev/null +++ b/applications/workflows/server/workflows_api/test/__init__.py @@ -0,0 +1,16 @@ +import logging + +import connexion +from flask_testing import TestCase + +from workflows_api.encoder import JSONEncoder + + +class BaseTestCase(TestCase): + + def create_app(self): + logging.getLogger('connexion.operation').setLevel('ERROR') + app = connexion.App(__name__, specification_dir='../openapi/') + app.app.json_encoder = JSONEncoder + app.add_api('openapi.yaml', pythonic_params=True) + return app.app diff --git a/applications/workflows/server/workflows_api/test/test_create_and_access_controller.py b/applications/workflows/server/workflows_api/test/test_create_and_access_controller.py new file mode 100644 index 00000000..04b7ca3b --- /dev/null +++ b/applications/workflows/server/workflows_api/test/test_create_and_access_controller.py @@ -0,0 +1,83 @@ +# coding: utf-8 + +from __future__ import absolute_import +import unittest + +from flask import json +from six import BytesIO + +from workflows_api.models.operation import Operation # noqa: E501 +from workflows_api.models.operation_search_result import OperationSearchResult # noqa: E501 +from workflows_api.models.operation_status import OperationStatus # noqa: E501 +from workflows_api.test import BaseTestCase + + +class TestCreateAndAccessController(BaseTestCase): + """CreateAndAccessController integration test stubs""" + + def test_delete_operation(self): + """Test case for delete_operation + + deletes operation by name + """ + headers = { + } + response = self.client.open( + '/operations/{name}'.format(name='name_example'), + method='DELETE', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_get_operation(self): + """Test case for get_operation + + get operation by name + """ + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/operations/{name}'.format(name='name_example'), + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_list_operations(self): + """Test case for list_operations + + lists operations + """ + query_string = [('status', QUEUED), + ('previous_search_token', 'previous_search_token_example'), + ('limit', 10)] + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/operations', + method='GET', + headers=headers, + query_string=query_string) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_log_operation(self): + """Test case for log_operation + + get operation by name + """ + headers = { + 'Accept': 'text/plain', + } + response = self.client.open( + '/operations/{name}/logs'.format(name='name_example'), + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + unittest.main() diff --git a/applications/workflows/server/workflows_api/typing_utils.py b/applications/workflows/server/workflows_api/typing_utils.py new file mode 100644 index 00000000..0563f81f --- /dev/null +++ b/applications/workflows/server/workflows_api/typing_utils.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +import sys + +if sys.version_info < (3, 7): + import typing + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return type(klass) == typing.GenericMeta + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__extra__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__extra__ == list + +else: + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return hasattr(klass, '__origin__') + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__origin__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__origin__ == list diff --git a/applications/workflows/server/workflows_api/util.py b/applications/workflows/server/workflows_api/util.py new file mode 100644 index 00000000..4c21578d --- /dev/null +++ b/applications/workflows/server/workflows_api/util.py @@ -0,0 +1,142 @@ +import datetime + +import six +import typing +from workflows_api import typing_utils + + +def _deserialize(data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if klass in six.integer_types or klass in (float, str, bool, bytearray): + return _deserialize_primitive(data, klass) + elif klass == object: + return _deserialize_object(data) + elif klass == datetime.date: + return deserialize_date(data) + elif klass == datetime.datetime: + return deserialize_datetime(data) + elif typing_utils.is_generic(klass): + if typing_utils.is_list(klass): + return _deserialize_list(data, klass.__args__[0]) + if typing_utils.is_dict(klass): + return _deserialize_dict(data, klass.__args__[1]) + else: + return deserialize_model(data, klass) + + +def _deserialize_primitive(data, klass): + """Deserializes to primitive type. + + :param data: data to deserialize. + :param klass: class literal. + + :return: int, long, float, str, bool. + :rtype: int | long | float | str | bool + """ + try: + value = klass(data) + except UnicodeEncodeError: + value = six.u(data) + except TypeError: + value = data + return value + + +def _deserialize_object(value): + """Return an original value. + + :return: object. + """ + return value + + +def deserialize_date(string): + """Deserializes string to date. + + :param string: str. + :type string: str + :return: date. + :rtype: date + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + + +def deserialize_datetime(string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :type string: str + :return: datetime. + :rtype: datetime + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + + +def deserialize_model(data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :type data: dict | list + :param klass: class literal. + :return: model object. + """ + instance = klass() + + if not instance.openapi_types: + return data + + for attr, attr_type in six.iteritems(instance.openapi_types): + if data is not None \ + and instance.attribute_map[attr] in data \ + and isinstance(data, (list, dict)): + value = data[instance.attribute_map[attr]] + setattr(instance, attr, _deserialize(value, attr_type)) + + return instance + + +def _deserialize_list(data, boxed_type): + """Deserializes a list and its elements. + + :param data: list to deserialize. + :type data: list + :param boxed_type: class literal. + + :return: deserialized list. + :rtype: list + """ + return [_deserialize(sub_data, boxed_type) + for sub_data in data] + + +def _deserialize_dict(data, boxed_type): + """Deserializes a dict and its elements. + + :param data: dict to deserialize. + :type data: dict + :param boxed_type: class literal. + + :return: deserialized dict. + :rtype: dict + """ + return {k: _deserialize(v, boxed_type) + for k, v in six.iteritems(data)} diff --git a/applications/workflows/src/.dockerignore b/applications/workflows/src/.dockerignore new file mode 100644 index 00000000..f9619601 --- /dev/null +++ b/applications/workflows/src/.dockerignore @@ -0,0 +1,72 @@ +.travis.yaml +.openapi-generator-ignore +README.md +tox.ini +git_push.sh +test-requirements.txt +setup.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.python-version + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/workflows/src/.gitignore b/applications/workflows/src/.gitignore new file mode 100644 index 00000000..43995bd4 --- /dev/null +++ b/applications/workflows/src/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/applications/workflows/src/.openapi-generator-ignore b/applications/workflows/src/.openapi-generator-ignore new file mode 100644 index 00000000..b09fd633 --- /dev/null +++ b/applications/workflows/src/.openapi-generator-ignore @@ -0,0 +1,27 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md + +Dockerfile +*/controllers/* +*/models/* \ No newline at end of file diff --git a/applications/workflows/src/.travis.yml b/applications/workflows/src/.travis.yml new file mode 100644 index 00000000..ad71ee5c --- /dev/null +++ b/applications/workflows/src/.travis.yml @@ -0,0 +1,14 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.2" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: "pip install -r requirements.txt" +# command to run tests +script: nosetests diff --git a/applications/workflows/src/Dockerfile b/applications/workflows/src/Dockerfile new file mode 100644 index 00000000..862083cd --- /dev/null +++ b/applications/workflows/src/Dockerfile @@ -0,0 +1,18 @@ +ARG REGISTRY=r.cfcr.io/tarelli/ +ARG TAG=latest +FROM ${REGISTRY}cloudharness-base:${TAG} + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +COPY requirements.txt /usr/src/app/ + +RUN pip3 install --no-cache-dir -r requirements.txt + +COPY . /usr/src/app + +EXPOSE 8080 + +ENTRYPOINT ["python3"] + +CMD ["-m", "workflows_api"] \ No newline at end of file diff --git a/applications/workflows/src/README.md b/applications/workflows/src/README.md new file mode 100644 index 00000000..0f31414a --- /dev/null +++ b/applications/workflows/src/README.md @@ -0,0 +1,49 @@ +# OpenAPI generated server + +## Overview +This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the +[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This +is an example of building a OpenAPI-enabled Flask server. + +This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask. + +## Requirements +Python 3.5.2+ + +## Usage +To run the server, please execute the following from the root directory: + +``` +pip3 install -r requirements.txt +python3 -m workflows_api +``` + +and open your browser to here: + +``` +http://localhost:8080/ui/ +``` + +Your OpenAPI definition lives here: + +``` +http://localhost:8080/openapi.json +``` + +To launch the integration tests, use tox: +``` +sudo pip install tox +tox +``` + +## Running with Docker + +To run the server on a Docker container, please execute the following from the root directory: + +```bash +# building the image +docker build -t workflows_api . + +# starting up a container +docker run -p 8080:8080 workflows_api +``` \ No newline at end of file diff --git a/applications/workflows/src/__init__.py b/applications/workflows/src/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/workflows/src/git_push.sh b/applications/workflows/src/git_push.sh new file mode 100644 index 00000000..ced3be2b --- /dev/null +++ b/applications/workflows/src/git_push.sh @@ -0,0 +1,58 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/applications/workflows/src/requirements.txt b/applications/workflows/src/requirements.txt new file mode 100644 index 00000000..2639eedf --- /dev/null +++ b/applications/workflows/src/requirements.txt @@ -0,0 +1,7 @@ +connexion >= 2.6.0; python_version>="3.6" +connexion >= 2.3.0; python_version=="3.5" +connexion >= 2.3.0; python_version=="3.4" +connexion == 2.4.0; python_version<="2.7" +swagger-ui-bundle >= 0.0.2 +python_dateutil >= 2.6.0 +setuptools >= 21.0.0 diff --git a/applications/workflows/src/setup.py b/applications/workflows/src/setup.py new file mode 100644 index 00000000..9bd6a02e --- /dev/null +++ b/applications/workflows/src/setup.py @@ -0,0 +1,39 @@ +# coding: utf-8 + +import sys +from setuptools import setup, find_packages + +NAME = "workflows_api" +VERSION = "1.0.0" + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = [ + "connexion>=2.0.2", + "swagger-ui-bundle>=0.0.2", + "python_dateutil>=2.6.0" +] + +setup( + name=NAME, + version=VERSION, + description="Workflows API", + author_email="cloudharness@metacell.us", + url="", + keywords=["OpenAPI", "Workflows API"], + install_requires=REQUIRES, + packages=find_packages(), + package_data={'': ['openapi/openapi.yaml']}, + include_package_data=True, + entry_points={ + 'console_scripts': ['workflows_api=workflows_api.__main__:main']}, + long_description="""\ + Workflows API + """ +) + diff --git a/applications/workflows/src/test-requirements.txt b/applications/workflows/src/test-requirements.txt new file mode 100644 index 00000000..a2626d87 --- /dev/null +++ b/applications/workflows/src/test-requirements.txt @@ -0,0 +1,4 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 +flask_testing==0.6.1 \ No newline at end of file diff --git a/applications/workflows/src/tox.ini b/applications/workflows/src/tox.ini new file mode 100644 index 00000000..e6dce35b --- /dev/null +++ b/applications/workflows/src/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=workflows_api \ No newline at end of file diff --git a/applications/workflows/src/workflows_api/__init__.py b/applications/workflows/src/workflows_api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/workflows/src/workflows_api/__main__.py b/applications/workflows/src/workflows_api/__main__.py new file mode 100644 index 00000000..40782233 --- /dev/null +++ b/applications/workflows/src/workflows_api/__main__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import connexion + +from workflows_api import encoder + + +def main(): + app = connexion.App(__name__, specification_dir='./openapi/') + app.app.json_encoder = encoder.JSONEncoder + app.add_api('openapi.yaml', + arguments={'title': 'Workflows API'}, + pythonic_params=True) + app.run(port=8080) + + +if __name__ == '__main__': + main() diff --git a/applications/workflows/src/workflows_api/controllers/__init__.py b/applications/workflows/src/workflows_api/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/workflows/src/workflows_api/controllers/create_and_access_controller.py b/applications/workflows/src/workflows_api/controllers/create_and_access_controller.py new file mode 100644 index 00000000..c2f1f5d5 --- /dev/null +++ b/applications/workflows/src/workflows_api/controllers/create_and_access_controller.py @@ -0,0 +1,90 @@ +import connexion +import six + +from workflows_api.models.operation import Operation # noqa: E501 +from workflows_api.models.operation_search_result import OperationSearchResult # noqa: E501 +from workflows_api.models.operation_status import OperationStatus # noqa: E501 +from workflows_api.models.search_result_data import SearchResultData # noqa: E501 +from workflows_api import util + +from workflows_api.service import workflow_service +from workflows_api.service.workflow_service import OperationNotFound, OperationException, BadParam + +from cloudharness import log + + +def delete_operation(name): # noqa: E501 + """deletes operation by name + + delete operation by its name # noqa: E501 + + :param name: + :type name: str + + :rtype: None + """ + try: + workflow_service.delete_operation(name) + except OperationNotFound as e: + return (f'{name} not found', 404) + except OperationException as e: + log.error(f'Unhandled remote exception while deleting workflow {name}', exc_info=e) + return f'Unexpected error', e.status + + +def get_operation(name): # noqa: E501 + """get operation by name + + retrieves an operation by its name # noqa: E501 + + :param name: + :type name: str + + :rtype: List[Operation] + """ + try: + return workflow_service.get_operation(name) + except OperationNotFound as e: + return (f'{name} not found', 404) + except OperationException as e: + log.error(f'Unhandled remote exception while retrieving workflow {name}', exc_info=e) + return f'Unexpected error', e.status + + +def list_operations(status=None, previous_search_token=None, limit=None): # noqa: E501 + """lists operations + + see all operations for the user # noqa: E501 + + :param status: filter by status + :type status: dict | bytes + :param previous_search: continue previous search (pagination chunks) + :type previous_search: dict | bytes + :param limit: maximum number of records to return per page + :type limit: int + + :rtype: OperationSearchResult + """ + try: + return workflow_service.list_operations(status, continue_token=previous_search_token, limit=limit) + except BadParam as e: + return (f'Bad parameter: {e.param}, {e}', e.status) + except OperationException as e: + log.error(f'Unhandled remote exception while retrieving workflows', exc_info=e) + return '', e.status + +def log_operation(name): # noqa: E501 + """get operation by name + + retrieves an operation log by its name # noqa: E501 + + :param name: + :type name: str + + :rtype: str + """ + if not name or name == '': + return '' + + return workflow_service.log_operation(name) + diff --git a/applications/workflows/src/workflows_api/encoder.py b/applications/workflows/src/workflows_api/encoder.py new file mode 100644 index 00000000..ffc6e492 --- /dev/null +++ b/applications/workflows/src/workflows_api/encoder.py @@ -0,0 +1,20 @@ +from connexion.apps.flask_app import FlaskJSONEncoder +import six + +from workflows_api.models.base_model_ import Model + + +class JSONEncoder(FlaskJSONEncoder): + include_nulls = False + + def default(self, o): + if isinstance(o, Model): + dikt = {} + for attr, _ in six.iteritems(o.openapi_types): + value = getattr(o, attr) + if value is None and not self.include_nulls: + continue + attr = o.attribute_map[attr] + dikt[attr] = value + return dikt + return FlaskJSONEncoder.default(self, o) diff --git a/applications/workflows/src/workflows_api/models/__init__.py b/applications/workflows/src/workflows_api/models/__init__.py new file mode 100644 index 00000000..ba414fcd --- /dev/null +++ b/applications/workflows/src/workflows_api/models/__init__.py @@ -0,0 +1,9 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import +# import models into model package +from workflows_api.models.operation import Operation +from workflows_api.models.operation_search_result import OperationSearchResult +from workflows_api.models.operation_status import OperationStatus +from workflows_api.models.search_result_data import SearchResultData diff --git a/applications/workflows/src/workflows_api/models/base_model_.py b/applications/workflows/src/workflows_api/models/base_model_.py new file mode 100644 index 00000000..d532ae7b --- /dev/null +++ b/applications/workflows/src/workflows_api/models/base_model_.py @@ -0,0 +1,69 @@ +import pprint + +import six +import typing + +from workflows_api import util + +T = typing.TypeVar('T') + + +class Model(object): + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + return util.deserialize_model(dikt, cls) + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/applications/workflows/src/workflows_api/models/operation.py b/applications/workflows/src/workflows_api/models/operation.py new file mode 100644 index 00000000..66110437 --- /dev/null +++ b/applications/workflows/src/workflows_api/models/operation.py @@ -0,0 +1,174 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util +from .operation_status import OperationStatus + +class Operation(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, message=None, name=None, create_time=None, status=None, workflow=None): # noqa: E501 + """Operation - a model defined in OpenAPI + + :param message: The message of this Operation. # noqa: E501 + :type message: str + :param name: The name of this Operation. # noqa: E501 + :type name: str + :param create_time: The create_time of this Operation. # noqa: E501 + :type create_time: datetime + :param status: The status of this Operation. # noqa: E501 + :type status: OperationStatus + :param workflow: The workflow of this Operation. # noqa: E501 + :type workflow: str + """ + self.openapi_types = { + 'message': str, + 'name': str, + 'create_time': datetime, + 'status': OperationStatus, + 'workflow': str + } + + self.attribute_map = { + 'message': 'message', + 'name': 'name', + 'create_time': 'createTime', + 'status': 'status', + 'workflow': 'workflow' + } + + self._message = message + self._name = name + self._create_time = create_time + self._status = status + self._workflow = workflow + + @classmethod + def from_dict(cls, dikt) -> 'Operation': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The Operation of this Operation. # noqa: E501 + :rtype: Operation + """ + return util.deserialize_model(dikt, cls) + + @property + def message(self): + """Gets the message of this Operation. + + usually set when an error occurred # noqa: E501 + + :return: The message of this Operation. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Operation. + + usually set when an error occurred # noqa: E501 + + :param message: The message of this Operation. + :type message: str + """ + + self._message = message + + @property + def name(self): + """Gets the name of this Operation. + + operation name # noqa: E501 + + :return: The name of this Operation. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this Operation. + + operation name # noqa: E501 + + :param name: The name of this Operation. + :type name: str + """ + + self._name = name + + @property + def create_time(self): + """Gets the create_time of this Operation. + + + :return: The create_time of this Operation. + :rtype: datetime + """ + return self._create_time + + @create_time.setter + def create_time(self, create_time): + """Sets the create_time of this Operation. + + + :param create_time: The create_time of this Operation. + :type create_time: datetime + """ + + self._create_time = create_time + + @property + def status(self): + """Gets the status of this Operation. + + + :return: The status of this Operation. + :rtype: OperationStatus + """ + return self._status + + @status.setter + def status(self, status): + """Sets the status of this Operation. + + + :param status: The status of this Operation. + :type status: OperationStatus + """ + + self._status = status + + @property + def workflow(self): + """Gets the workflow of this Operation. + + low level representation as an Argo json # noqa: E501 + + :return: The workflow of this Operation. + :rtype: str + """ + return self._workflow + + @workflow.setter + def workflow(self, workflow): + """Sets the workflow of this Operation. + + low level representation as an Argo json # noqa: E501 + + :param workflow: The workflow of this Operation. + :type workflow: str + """ + + self._workflow = workflow diff --git a/applications/workflows/src/workflows_api/models/operation_search_result.py b/applications/workflows/src/workflows_api/models/operation_search_result.py new file mode 100644 index 00000000..7c5f9770 --- /dev/null +++ b/applications/workflows/src/workflows_api/models/operation_search_result.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util +from workflows_api.models.search_result_data import SearchResultData +from workflows_api.models.operation import Operation + +class OperationSearchResult(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, meta=None, items=None): # noqa: E501 + """OperationSearchResult - a model defined in OpenAPI + + :param meta: The meta of this OperationSearchResult. # noqa: E501 + :type meta: SearchResultData + :param items: The items of this OperationSearchResult. # noqa: E501 + :type items: List[Operation] + """ + self.openapi_types = { + 'meta': SearchResultData, + 'items': List[Operation] + } + + self.attribute_map = { + 'meta': 'meta', + 'items': 'items' + } + + self._meta = meta + self._items = items + + @classmethod + def from_dict(cls, dikt) -> 'OperationSearchResult': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OperationSearchResult of this OperationSearchResult. # noqa: E501 + :rtype: OperationSearchResult + """ + return util.deserialize_model(dikt, cls) + + @property + def meta(self): + """Gets the meta of this OperationSearchResult. + + + :return: The meta of this OperationSearchResult. + :rtype: SearchResultData + """ + return self._meta + + @meta.setter + def meta(self, meta): + """Sets the meta of this OperationSearchResult. + + + :param meta: The meta of this OperationSearchResult. + :type meta: SearchResultData + """ + + self._meta = meta + + @property + def items(self): + """Gets the items of this OperationSearchResult. + + + :return: The items of this OperationSearchResult. + :rtype: List[Operation] + """ + return self._items + + @items.setter + def items(self, items): + """Sets the items of this OperationSearchResult. + + + :param items: The items of this OperationSearchResult. + :type items: List[Operation] + """ + + self._items = items diff --git a/applications/workflows/src/workflows_api/models/operation_status.py b/applications/workflows/src/workflows_api/models/operation_status.py new file mode 100644 index 00000000..1b2ba6c7 --- /dev/null +++ b/applications/workflows/src/workflows_api/models/operation_status.py @@ -0,0 +1,47 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util + + +class OperationStatus(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + PENDING = "Pending" + RUNNING = "Running" + ERROR = "Error" + SUCCEEDED = "Succeeded" + SKIPPED = "Skipped" + FAILED = "Failed" + + def __init__(self): # noqa: E501 + """OperationStatus - a model defined in OpenAPI + + """ + self.openapi_types = { + } + + self.attribute_map = { + } + + @classmethod + def from_dict(cls, dikt) -> 'OperationStatus': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OperationStatus of this OperationStatus. # noqa: E501 + :rtype: OperationStatus + """ + return util.deserialize_model(dikt, cls) diff --git a/applications/workflows/src/workflows_api/models/operation_type.py b/applications/workflows/src/workflows_api/models/operation_type.py new file mode 100644 index 00000000..f9f93197 --- /dev/null +++ b/applications/workflows/src/workflows_api/models/operation_type.py @@ -0,0 +1,44 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util + + +class OperationType(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + COMPOSITEOPERATION = "CompositeOperation" + MULTIPLECOMPOSITEOPERATION = "MultipleCompositeOperation" + SPARKOPERATION = "SparkOperation" + + def __init__(self): # noqa: E501 + """OperationType - a model defined in OpenAPI + + """ + self.openapi_types = { + } + + self.attribute_map = { + } + + @classmethod + def from_dict(cls, dikt) -> 'OperationType': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OperationType of this OperationType. # noqa: E501 + :rtype: OperationType + """ + return util.deserialize_model(dikt, cls) diff --git a/applications/workflows/src/workflows_api/models/search_result_data.py b/applications/workflows/src/workflows_api/models/search_result_data.py new file mode 100644 index 00000000..e50fafec --- /dev/null +++ b/applications/workflows/src/workflows_api/models/search_result_data.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workflows_api.models.base_model_ import Model +from workflows_api import util + + +class SearchResultData(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, continue_token=None): # noqa: E501 + """SearchResultData - a model defined in OpenAPI + + :param continue_token: The continue_token of this SearchResultData. # noqa: E501 + :type continue_token: str + """ + self.openapi_types = { + 'continue_token': str + } + + self.attribute_map = { + 'continue_token': 'continueToken' + } + + self._continue_token = continue_token + + @classmethod + def from_dict(cls, dikt) -> 'SearchResultData': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The SearchResultData of this SearchResultData. # noqa: E501 + :rtype: SearchResultData + """ + return util.deserialize_model(dikt, cls) + + @property + def continue_token(self): + """Gets the continue_token of this SearchResultData. + + token to use for pagination # noqa: E501 + + :return: The continue_token of this SearchResultData. + :rtype: str + """ + return self._continue_token + + @continue_token.setter + def continue_token(self, continue_token): + """Sets the continue_token of this SearchResultData. + + token to use for pagination # noqa: E501 + + :param continue_token: The continue_token of this SearchResultData. + :type continue_token: str + """ + + self._continue_token = continue_token diff --git a/applications/workflows/src/workflows_api/openapi/openapi.yaml b/applications/workflows/src/workflows_api/openapi/openapi.yaml new file mode 100644 index 00000000..05556983 --- /dev/null +++ b/applications/workflows/src/workflows_api/openapi/openapi.yaml @@ -0,0 +1,204 @@ +openapi: 3.0.0 +info: + contact: + email: cloudharness@metacell.us + description: Workflows API + license: + name: UNLICENSED + title: Workflows API + version: 0.1.0 +servers: +- description: Metacell host + url: https://workflows.cloudharness.metacell.us +tags: +- description: standard creation, listing and retrieve + name: Create and Access +paths: + /operations: + get: + description: | + see all operations for the user + operationId: list_operations + parameters: + - description: filter by status + example: QUEUED + explode: true + in: query + name: status + required: false + schema: + $ref: '#/components/schemas/OperationStatus' + style: form + - description: continue previous search (pagination chunks) + explode: true + in: query + name: previous_search_token + required: false + schema: + type: string + style: form + - description: maximum number of records to return per page + explode: true + in: query + name: limit + required: false + schema: + default: 10 + maximum: 50 + minimum: 1 + type: integer + style: form + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/OperationSearchResult' + description: search results matching criteria + "400": + description: bad input parameter + summary: lists operations + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller + /operations/{name}: + delete: + description: | + delete operation by its name + operationId: delete_operation + parameters: + - explode: false + in: path + name: name + required: true + schema: + type: string + style: simple + responses: + "200": + description: delete OK + "404": + description: not found + summary: deletes operation by name + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller + get: + description: | + retrieves an operation by its name + operationId: get_operation + parameters: + - explode: false + in: path + name: name + required: true + schema: + type: string + style: simple + responses: + "200": + content: + application/json: + schema: + items: + $ref: '#/components/schemas/Operation' + type: array + description: search results matching criteria + "404": + description: not found + summary: get operation by name + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller + /operations/{name}/logs: + get: + description: | + retrieves an operation log by its name + operationId: log_operation + parameters: + - explode: false + in: path + name: name + required: true + schema: + type: string + style: simple + responses: + "200": + content: + text/plain: + schema: + example: Hello world + type: string + description: search results matching criteria + "404": + description: not found + summary: get operation by name + tags: + - Create and Access + x-openapi-router-controller: workflows_api.controllers.create_and_access_controller +components: + schemas: + OperationSearchResult: + description: a list of operations with meta data about the result + example: + meta: + continueToken: continueToken + items: + - workflow: workflow + createTime: 2016-08-29T09:12:33.001Z + name: name + message: message + - workflow: workflow + createTime: 2016-08-29T09:12:33.001Z + name: name + message: message + properties: + meta: + $ref: '#/components/schemas/SearchResultData' + items: + items: + $ref: '#/components/schemas/Operation' + type: array + SearchResultData: + description: describes a search + example: + continueToken: continueToken + properties: + continueToken: + description: token to use for pagination + type: string + Operation: + description: represents the status of a distributed API call + example: + workflow: workflow + createTime: 2016-08-29T09:12:33.001Z + name: name + message: message + properties: + message: + description: usually set when an error occurred + type: string + name: + description: operation name + type: string + createTime: + example: 2016-08-29T09:12:33.001Z + format: date-time + readOnly: true + type: string + status: + $ref: '#/components/schemas/OperationStatus' + workflow: + description: low level representation as an Argo json + type: string + OperationStatus: + default: Pending + enum: + - Pending + - Running + - Error + - Succeeded + - Skipped + - Failed + type: string diff --git a/applications/workflows/src/workflows_api/service/__init__.py b/applications/workflows/src/workflows_api/service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/applications/workflows/src/workflows_api/service/workflow_service.py b/applications/workflows/src/workflows_api/service/workflow_service.py new file mode 100644 index 00000000..c0850410 --- /dev/null +++ b/applications/workflows/src/workflows_api/service/workflow_service.py @@ -0,0 +1,52 @@ +from cloudharness.workflows import argo +from workflows_api.models import OperationSearchResult, Operation, SearchResultData + +OperationNotFound = argo.WorkflowNotFound +OperationException = argo.WorkflowException +BadParam = argo.BadParam + + +def argo_workflow_to_operation(workflow: argo.Workflow): + return Operation(name=workflow.name, status=workflow.status, create_time=workflow.create_time, + workflow=workflow.raw) + + +def delete_operation(name): + """deletes operation by id""" + argo.delete_workflow(name) + + +def get_operation(name): + """get operation by id""" + return argo_workflow_to_operation(argo.get_workflow(name)) + + +def list_operations(status=None, continue_token=None, limit=None) -> OperationSearchResult: + """lists operations + + see all operations for the user + + :param status: filter by status + :type status: dict | bytes + :param previous_search: continue previous search (pagination chunks) + :type previous_search: dict | bytes + :param limit: maximum number of records to return per page + :type limit: int + + :rtype: OperationSearchResult + """ + + argo_raw_result = argo.get_workflows(status, limit=limit, continue_token=continue_token) + result = OperationSearchResult() + result.items = tuple(argo_workflow_to_operation(item) for item in argo_raw_result.items) + result.meta = SearchResultData(continue_token=argo_raw_result.continue_token) + return result + + +def log_operation(name:str) -> str: + """get operation logs + :param name: workflow name + :rtype: str + """ + + return argo.get_workflow_logs(name) \ No newline at end of file diff --git a/applications/workflows/src/workflows_api/test/__init__.py b/applications/workflows/src/workflows_api/test/__init__.py new file mode 100644 index 00000000..1f7852ce --- /dev/null +++ b/applications/workflows/src/workflows_api/test/__init__.py @@ -0,0 +1,16 @@ +import logging + +import connexion +from flask_testing import TestCase + +from workflows_api.encoder import JSONEncoder + + +class BaseTestCase(TestCase): + + def create_app(self): + logging.getLogger('connexion.operation').setLevel('ERROR') + app = connexion.App(__name__, specification_dir='../openapi/') + app.app.json_encoder = JSONEncoder + app.add_api('openapi.yaml', pythonic_params=True) + return app.app diff --git a/applications/workflows/src/workflows_api/test/test_create_and_access_controller.py b/applications/workflows/src/workflows_api/test/test_create_and_access_controller.py new file mode 100644 index 00000000..04b7ca3b --- /dev/null +++ b/applications/workflows/src/workflows_api/test/test_create_and_access_controller.py @@ -0,0 +1,83 @@ +# coding: utf-8 + +from __future__ import absolute_import +import unittest + +from flask import json +from six import BytesIO + +from workflows_api.models.operation import Operation # noqa: E501 +from workflows_api.models.operation_search_result import OperationSearchResult # noqa: E501 +from workflows_api.models.operation_status import OperationStatus # noqa: E501 +from workflows_api.test import BaseTestCase + + +class TestCreateAndAccessController(BaseTestCase): + """CreateAndAccessController integration test stubs""" + + def test_delete_operation(self): + """Test case for delete_operation + + deletes operation by name + """ + headers = { + } + response = self.client.open( + '/operations/{name}'.format(name='name_example'), + method='DELETE', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_get_operation(self): + """Test case for get_operation + + get operation by name + """ + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/operations/{name}'.format(name='name_example'), + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_list_operations(self): + """Test case for list_operations + + lists operations + """ + query_string = [('status', QUEUED), + ('previous_search_token', 'previous_search_token_example'), + ('limit', 10)] + headers = { + 'Accept': 'application/json', + } + response = self.client.open( + '/operations', + method='GET', + headers=headers, + query_string=query_string) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + def test_log_operation(self): + """Test case for log_operation + + get operation by name + """ + headers = { + 'Accept': 'text/plain', + } + response = self.client.open( + '/operations/{name}/logs'.format(name='name_example'), + method='GET', + headers=headers) + self.assert200(response, + 'Response body is : ' + response.data.decode('utf-8')) + + +if __name__ == '__main__': + unittest.main() diff --git a/applications/workflows/src/workflows_api/typing_utils.py b/applications/workflows/src/workflows_api/typing_utils.py new file mode 100644 index 00000000..0563f81f --- /dev/null +++ b/applications/workflows/src/workflows_api/typing_utils.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +import sys + +if sys.version_info < (3, 7): + import typing + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return type(klass) == typing.GenericMeta + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__extra__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__extra__ == list + +else: + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return hasattr(klass, '__origin__') + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__origin__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__origin__ == list diff --git a/applications/workflows/src/workflows_api/util.py b/applications/workflows/src/workflows_api/util.py new file mode 100644 index 00000000..4c21578d --- /dev/null +++ b/applications/workflows/src/workflows_api/util.py @@ -0,0 +1,142 @@ +import datetime + +import six +import typing +from workflows_api import typing_utils + + +def _deserialize(data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if klass in six.integer_types or klass in (float, str, bool, bytearray): + return _deserialize_primitive(data, klass) + elif klass == object: + return _deserialize_object(data) + elif klass == datetime.date: + return deserialize_date(data) + elif klass == datetime.datetime: + return deserialize_datetime(data) + elif typing_utils.is_generic(klass): + if typing_utils.is_list(klass): + return _deserialize_list(data, klass.__args__[0]) + if typing_utils.is_dict(klass): + return _deserialize_dict(data, klass.__args__[1]) + else: + return deserialize_model(data, klass) + + +def _deserialize_primitive(data, klass): + """Deserializes to primitive type. + + :param data: data to deserialize. + :param klass: class literal. + + :return: int, long, float, str, bool. + :rtype: int | long | float | str | bool + """ + try: + value = klass(data) + except UnicodeEncodeError: + value = six.u(data) + except TypeError: + value = data + return value + + +def _deserialize_object(value): + """Return an original value. + + :return: object. + """ + return value + + +def deserialize_date(string): + """Deserializes string to date. + + :param string: str. + :type string: str + :return: date. + :rtype: date + """ + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + + +def deserialize_datetime(string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :type string: str + :return: datetime. + :rtype: datetime + """ + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + + +def deserialize_model(data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :type data: dict | list + :param klass: class literal. + :return: model object. + """ + instance = klass() + + if not instance.openapi_types: + return data + + for attr, attr_type in six.iteritems(instance.openapi_types): + if data is not None \ + and instance.attribute_map[attr] in data \ + and isinstance(data, (list, dict)): + value = data[instance.attribute_map[attr]] + setattr(instance, attr, _deserialize(value, attr_type)) + + return instance + + +def _deserialize_list(data, boxed_type): + """Deserializes a list and its elements. + + :param data: list to deserialize. + :type data: list + :param boxed_type: class literal. + + :return: deserialized list. + :rtype: list + """ + return [_deserialize(sub_data, boxed_type) + for sub_data in data] + + +def _deserialize_dict(data, boxed_type): + """Deserializes a dict and its elements. + + :param data: dict to deserialize. + :type data: dict + :param boxed_type: class literal. + + :return: deserialized dict. + :rtype: dict + """ + return {k: _deserialize(v, boxed_type) + for k, v in six.iteritems(data)} diff --git a/applications/workflows/tasks/extract-download/Dockerfile b/applications/workflows/tasks/extract-download/Dockerfile new file mode 100644 index 00000000..a22a9339 --- /dev/null +++ b/applications/workflows/tasks/extract-download/Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.7-alpine + +ADD . / + +ENV url 'https://www.google.com' +ENV shared_directory / + +CMD python main.py $url $shared_directory \ No newline at end of file diff --git a/applications/workflows/tasks/extract-download/main.py b/applications/workflows/tasks/extract-download/main.py new file mode 100644 index 00000000..efec501a --- /dev/null +++ b/applications/workflows/tasks/extract-download/main.py @@ -0,0 +1,16 @@ +import urllib.request +import sys +import logging +import os + +logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) + +assert len(sys.argv) > 2, 'Arguments not specified. Cannot download' + +url = sys.argv[1] +download_path = sys.argv[2] + +dest = os.path.join(download_path, url.split('/')[-1]) +logging.info("Downloading {} to {}".format(url, dest)) + +urllib.request.urlretrieve(url, dest) diff --git a/applications/workflows/tasks/extract-s3/Dockerfile b/applications/workflows/tasks/extract-s3/Dockerfile new file mode 100644 index 00000000..bfb00bf1 --- /dev/null +++ b/applications/workflows/tasks/extract-s3/Dockerfile @@ -0,0 +1 @@ +FROM alpine:3.7 \ No newline at end of file diff --git a/applications/workflows/tasks/print-file/Dockerfile b/applications/workflows/tasks/print-file/Dockerfile new file mode 100644 index 00000000..e25bd90f --- /dev/null +++ b/applications/workflows/tasks/print-file/Dockerfile @@ -0,0 +1,7 @@ +FROM python:3.7-alpine + +ADD . / + +ENV file_path / + +CMD python ./main.py $file_path \ No newline at end of file diff --git a/applications/workflows/tasks/print-file/main.py b/applications/workflows/tasks/print-file/main.py new file mode 100644 index 00000000..d93a984c --- /dev/null +++ b/applications/workflows/tasks/print-file/main.py @@ -0,0 +1,15 @@ +import urllib.request +import sys +import logging +import os + +logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) + +assert len(sys.argv) > 1, 'File path not specified' + +file_path = sys.argv[1] + +logging.info("Displaying content for file".format(file_path)) +assert os.path.exists(file_path), file_path + " does not exist." +with open(file_path) as f: + print(f.read()) diff --git a/applications/workflows/tasks/send-result-event/Dockerfile b/applications/workflows/tasks/send-result-event/Dockerfile new file mode 100644 index 00000000..19bb7d61 --- /dev/null +++ b/applications/workflows/tasks/send-result-event/Dockerfile @@ -0,0 +1,9 @@ +ARG REGISTRY=r.cfcr.io/tarelli/ +ARG TAG=latest +FROM ${REGISTRY}cloudharness-base:${TAG} + +ADD . / + +ENV shared_directory /mnt/shared + +CMD python main.py $shared_directory \ No newline at end of file diff --git a/applications/workflows/tasks/send-result-event/main.py b/applications/workflows/tasks/send-result-event/main.py new file mode 100644 index 00000000..21134fa7 --- /dev/null +++ b/applications/workflows/tasks/send-result-event/main.py @@ -0,0 +1,44 @@ +import sys +import os +print("Starting send-result-event") +import glob +from cloudharness import log, set_debug +MAX_FILE_SIZE = 2 ** 20 # 1MB + +from cloudharness.events.client import EventClient +from cloudharness.workflows.utils import get_workflow_name +set_debug() + + +topic_name = get_workflow_name() # Coming from the workflow name + +log.info("Topic name is: " + topic_name) + +assert len(sys.argv) > 1, 'Specify read path' + + +shared_directory = sys.argv[1] + +log.info("Sending content of directory `{}` to event queue topic `{}`".format(shared_directory, topic_name)) + +client = EventClient(topic_name) + +assert os.path.exists(shared_directory), shared_directory + " does not exist." + +for file_path in glob.glob(f"{shared_directory}/*"): + log.info("File `{}`".format(file_path)) + size = os.path.getsize(file_path) + if size > MAX_FILE_SIZE: + log.warning(f"{file_path} size is {size}, which is greater than the maximum of {MAX_FILE_SIZE}." + "The content will not be sent to the queue") + client.produce({file_path: "Error: size exceeded"}) + + log.info("Sending content for file `{}`".format(file_path)) + try: + with open(file_path) as f: + content = f.read() + except Exception as e: + log.error("Error reading file " + file_path + " " + str(e)) + continue + + client.produce({os.path.basename(file_path): content}) diff --git a/applications/workflows/tasks/send-result-event/requirements.txt b/applications/workflows/tasks/send-result-event/requirements.txt new file mode 100644 index 00000000..239cfb9d --- /dev/null +++ b/applications/workflows/tasks/send-result-event/requirements.txt @@ -0,0 +1 @@ +cloudharness_core \ No newline at end of file diff --git a/blueprint/applications/README.md b/blueprint/applications/README.md new file mode 100644 index 00000000..675143b3 --- /dev/null +++ b/blueprint/applications/README.md @@ -0,0 +1,4 @@ +# Custom Applications + +Put here applications intended to run on the cluster. +See [here](../../applications/README.md) to learn about how to define an application. diff --git a/client/cloudharness_cli/README.md b/client/cloudharness_cli/README.md new file mode 100644 index 00000000..d3290b84 --- /dev/null +++ b/client/cloudharness_cli/README.md @@ -0,0 +1,207 @@ +# cloudharness-cli.workflows +Workflows API + +This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: + +- API version: 0.1.0 +- Package version: 1.0.0 +- Build package: org.openapitools.codegen.languages.PythonClientCodegen + +## Requirements. + +Python 2.7 and 3.4+ + +## Installation & Usage +### pip install + +If the python package is hosted on a repository, you can install directly using: + +```sh +pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git +``` +(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) + +Then import the package: +```python +import cloudharness_cli.workflows +``` + +### Setuptools + +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). + +```sh +python setup.py install --user +``` +(or `sudo python setup.py install` to install the package for all users) + +Then import the package: +```python +import cloudharness_cli.workflows +``` + +## Getting Started + +Please follow the [installation procedure](#installation--usage) and then run the following: + +```python +from __future__ import print_function +import time +import cloudharness_cli.workflows +from cloudharness_cli.workflows.rest import ApiException +from pprint import pprint + + +# Defining host is optional and default to https://workflows.cloudharness.metacell.us +configuration.host = "https://workflows.cloudharness.metacell.us" +# Enter a context with an instance of the API client +with cloudharness_cli.workflows.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client) + name = 'name_example' # str | + + try: + # deletes operation by name + api_instance.delete_operation(name) + except ApiException as e: + print("Exception when calling CreateAndAccessApi->delete_operation: %s\n" % e) + +``` + +## Documentation for API Endpoints + +All URIs are relative to *https://workflows.cloudharness.metacell.us* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*CreateAndAccessApi* | [**delete_operation**](docs/workflows/CreateAndAccessApi.md#delete_operation) | **DELETE** /operations/{name} | deletes operation by name +*CreateAndAccessApi* | [**get_operation**](docs/workflows/CreateAndAccessApi.md#get_operation) | **GET** /operations/{name} | get operation by name +*CreateAndAccessApi* | [**list_operations**](docs/workflows/CreateAndAccessApi.md#list_operations) | **GET** /operations | lists operations +*CreateAndAccessApi* | [**log_operation**](docs/workflows/CreateAndAccessApi.md#log_operation) | **GET** /operations/{name}/logs | get operation by name + + +## Documentation For Models + + - [Operation](docs/workflows/Operation.md) + - [OperationSearchResult](docs/workflows/OperationSearchResult.md) + - [OperationStatus](docs/workflows/OperationStatus.md) + - [SearchResultData](docs/workflows/SearchResultData.md) + + +## Documentation For Authorization + + All endpoints do not require authorization. + +## Author + +cloudharness@metacell.us + + +# cloudharness-cli.samples +CloudHarness Sample api + +This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: + +- API version: 0.1.0 +- Package version: 1.0.0 +- Build package: org.openapitools.codegen.languages.PythonClientCodegen + +## Requirements. + +Python 2.7 and 3.4+ + +## Installation & Usage +### pip install + +If the python package is hosted on a repository, you can install directly using: + +```sh +pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git +``` +(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) + +Then import the package: +```python +import cloudharness_cli.samples +``` + +### Setuptools + +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). + +```sh +python setup.py install --user +``` +(or `sudo python setup.py install` to install the package for all users) + +Then import the package: +```python +import cloudharness_cli.samples +``` + +## Getting Started + +Please follow the [installation procedure](#installation--usage) and then run the following: + +```python +from __future__ import print_function +import time +import cloudharness_cli.samples +from cloudharness_cli.samples.rest import ApiException +from pprint import pprint + +configuration = cloudharness_cli.samples.Configuration() +# Configure Bearer authorization (JWT): bearerAuth +configuration.access_token = 'YOUR_BEARER_TOKEN' + +# Defining host is optional and default to https://samples.cloudharness.metacell.us/api +configuration.host = "https://samples.cloudharness.metacell.us/api" + +# Defining host is optional and default to https://samples.cloudharness.metacell.us/api +configuration.host = "https://samples.cloudharness.metacell.us/api" +# Enter a context with an instance of the API client +with cloudharness_cli.samples.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.samples.AuthApi(api_client) + + try: + # Check if the token is valid. Get a token by logging into the base url + api_response = api_instance.valid_token() + pprint(api_response) + except ApiException as e: + print("Exception when calling AuthApi->valid_token: %s\n" % e) + +``` + +## Documentation for API Endpoints + +All URIs are relative to *https://samples.cloudharness.metacell.us/api* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*AuthApi* | [**valid_token**](docs/samples/AuthApi.md#valid_token) | **GET** /valid | Check if the token is valid. Get a token by logging into the base url +*WorkflowsApi* | [**submit_async**](docs/samples/WorkflowsApi.md#submit_async) | **GET** /operation_async | Send an asynchronous operation +*WorkflowsApi* | [**submit_sync**](docs/samples/WorkflowsApi.md#submit_sync) | **GET** /operation_sync | Send a synchronous operation +*WorkflowsApi* | [**submit_sync_with_results**](docs/samples/WorkflowsApi.md#submit_sync_with_results) | **GET** /operation_sync_results | Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + + +## Documentation For Models + + - [InlineResponse202](docs/samples/InlineResponse202.md) + - [InlineResponse202Task](docs/samples/InlineResponse202Task.md) + - [Valid](docs/samples/Valid.md) + + +## Documentation For Authorization + + +## bearerAuth + +- **Type**: Bearer authentication (JWT) + + +## Author + +cloudharness@metacell.us + + diff --git a/client/cloudharness_cli/cloudharness_cli.egg-info/PKG-INFO b/client/cloudharness_cli/cloudharness_cli.egg-info/PKG-INFO new file mode 100644 index 00000000..7a3788ad --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli.egg-info/PKG-INFO @@ -0,0 +1,12 @@ +Metadata-Version: 1.0 +Name: cloudharness-cli +Version: 0.1.0 +Summary: CloudHarness Python API Client +Home-page: UNKNOWN +Author: OpenAPI Generator community +Author-email: cloudharness@metacell.us +License: UNLICENSED +Description: CloudHarness Python API Client # noqa: E501 + +Keywords: OpenAPI,CloudHarness Sample API +Platform: UNKNOWN diff --git a/client/cloudharness_cli/cloudharness_cli.egg-info/SOURCES.txt b/client/cloudharness_cli/cloudharness_cli.egg-info/SOURCES.txt new file mode 100644 index 00000000..8f65176a --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli.egg-info/SOURCES.txt @@ -0,0 +1,32 @@ +README.md +setup.py +cloudharness_cli/__init__.py +cloudharness_cli.egg-info/PKG-INFO +cloudharness_cli.egg-info/SOURCES.txt +cloudharness_cli.egg-info/dependency_links.txt +cloudharness_cli.egg-info/requires.txt +cloudharness_cli.egg-info/top_level.txt +cloudharness_cli/samples/__init__.py +cloudharness_cli/samples/api_client.py +cloudharness_cli/samples/configuration.py +cloudharness_cli/samples/exceptions.py +cloudharness_cli/samples/rest.py +cloudharness_cli/samples/api/__init__.py +cloudharness_cli/samples/api/auth_api.py +cloudharness_cli/samples/api/workflows_api.py +cloudharness_cli/samples/models/__init__.py +cloudharness_cli/samples/models/inline_response202.py +cloudharness_cli/samples/models/inline_response202_task.py +cloudharness_cli/samples/models/valid.py +cloudharness_cli/workflows/__init__.py +cloudharness_cli/workflows/api_client.py +cloudharness_cli/workflows/configuration.py +cloudharness_cli/workflows/exceptions.py +cloudharness_cli/workflows/rest.py +cloudharness_cli/workflows/api/__init__.py +cloudharness_cli/workflows/api/create_and_access_api.py +cloudharness_cli/workflows/models/__init__.py +cloudharness_cli/workflows/models/operation.py +cloudharness_cli/workflows/models/operation_search_result.py +cloudharness_cli/workflows/models/operation_status.py +cloudharness_cli/workflows/models/search_result_data.py \ No newline at end of file diff --git a/client/cloudharness_cli/cloudharness_cli.egg-info/dependency_links.txt b/client/cloudharness_cli/cloudharness_cli.egg-info/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/client/cloudharness_cli/cloudharness_cli.egg-info/requires.txt b/client/cloudharness_cli/cloudharness_cli.egg-info/requires.txt new file mode 100644 index 00000000..17318e9a --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli.egg-info/requires.txt @@ -0,0 +1,4 @@ +urllib3>=1.15 +six>=1.10 +certifi +python-dateutil diff --git a/client/cloudharness_cli/cloudharness_cli.egg-info/top_level.txt b/client/cloudharness_cli/cloudharness_cli.egg-info/top_level.txt new file mode 100644 index 00000000..beb24983 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli.egg-info/top_level.txt @@ -0,0 +1 @@ +cloudharness_cli diff --git a/client/cloudharness_cli/cloudharness_cli/__init__.py b/client/cloudharness_cli/cloudharness_cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/client/cloudharness_cli/cloudharness_cli/samples/__init__.py b/client/cloudharness_cli/cloudharness_cli/samples/__init__.py new file mode 100644 index 00000000..cd1da8f3 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/__init__.py @@ -0,0 +1,36 @@ +# coding: utf-8 + +# flake8: noqa + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +__version__ = "1.0.0" + +# import apis into sdk package +from cloudharness_cli.samples.api.auth_api import AuthApi +from cloudharness_cli.samples.api.workflows_api import WorkflowsApi + +# import ApiClient +from cloudharness_cli.samples.api_client import ApiClient +from cloudharness_cli.samples.configuration import Configuration +from cloudharness_cli.samples.exceptions import OpenApiException +from cloudharness_cli.samples.exceptions import ApiTypeError +from cloudharness_cli.samples.exceptions import ApiValueError +from cloudharness_cli.samples.exceptions import ApiKeyError +from cloudharness_cli.samples.exceptions import ApiException +# import models into sdk package +from cloudharness_cli.samples.models.inline_response202 import InlineResponse202 +from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task +from cloudharness_cli.samples.models.valid import Valid + diff --git a/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py b/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py new file mode 100644 index 00000000..6875c462 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/api/__init__.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import + +# flake8: noqa + +# import apis into api package +from cloudharness_cli.samples.api.auth_api import AuthApi +from cloudharness_cli.samples.api.workflows_api import WorkflowsApi diff --git a/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py b/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py new file mode 100644 index 00000000..be15843a --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/api/auth_api.py @@ -0,0 +1,143 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from cloudharness_cli.samples.api_client import ApiClient +from cloudharness_cli.samples.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class AuthApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def valid_token(self, **kwargs): # noqa: E501 + """Check if the token is valid. Get a token by logging into the base url # noqa: E501 + + Check if the token is valid # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.valid_token(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: list[Valid] + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.valid_token_with_http_info(**kwargs) # noqa: E501 + + def valid_token_with_http_info(self, **kwargs): # noqa: E501 + """Check if the token is valid. Get a token by logging into the base url # noqa: E501 + + Check if the token is valid # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.valid_token_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(list[Valid], status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method valid_token" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['bearerAuth'] # noqa: E501 + + return self.api_client.call_api( + '/valid', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='list[Valid]', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py b/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py new file mode 100644 index 00000000..cedd3219 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/api/workflows_api.py @@ -0,0 +1,357 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from cloudharness_cli.samples.api_client import ApiClient +from cloudharness_cli.samples.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class WorkflowsApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def submit_async(self, **kwargs): # noqa: E501 + """Send an asynchronous operation # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.submit_async(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: InlineResponse202 + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.submit_async_with_http_info(**kwargs) # noqa: E501 + + def submit_async_with_http_info(self, **kwargs): # noqa: E501 + """Send an asynchronous operation # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.submit_async_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(InlineResponse202, status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method submit_async" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operation_async', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='InlineResponse202', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def submit_sync(self, **kwargs): # noqa: E501 + """Send a synchronous operation # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.submit_sync(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: str + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.submit_sync_with_http_info(**kwargs) # noqa: E501 + + def submit_sync_with_http_info(self, **kwargs): # noqa: E501 + """Send a synchronous operation # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.submit_sync_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(str, status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method submit_sync" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operation_sync', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='str', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def submit_sync_with_results(self, **kwargs): # noqa: E501 + """Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.submit_sync_with_results(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param float a: first number to sum + :param float b: second number to sum + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: str + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.submit_sync_with_results_with_http_info(**kwargs) # noqa: E501 + + def submit_sync_with_results_with_http_info(self, **kwargs): # noqa: E501 + """Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.submit_sync_with_results_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param float a: first number to sum + :param float b: second number to sum + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(str, status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + 'a', + 'b' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method submit_sync_with_results" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'a' in local_var_params and local_var_params['a'] is not None: # noqa: E501 + query_params.append(('a', local_var_params['a'])) # noqa: E501 + if 'b' in local_var_params and local_var_params['b'] is not None: # noqa: E501 + query_params.append(('b', local_var_params['b'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operation_sync_results', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='str', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/client/cloudharness_cli/cloudharness_cli/samples/api_client.py b/client/cloudharness_cli/cloudharness_cli/samples/api_client.py new file mode 100644 index 00000000..beb1ab2c --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/api_client.py @@ -0,0 +1,648 @@ +# coding: utf-8 +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + +from __future__ import absolute_import + +import atexit +import datetime +from dateutil.parser import parse +import json +import mimetypes +from multiprocessing.pool import ThreadPool +import os +import re +import tempfile + +# python 2 and python 3 compatibility library +import six +from six.moves.urllib.parse import quote + +from cloudharness_cli.samples.configuration import Configuration +import cloudharness_cli.samples.models +from cloudharness_cli.samples import rest +from cloudharness_cli.samples.exceptions import ApiValueError + + +class ApiClient(object): + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int if six.PY3 else long, # noqa: F821 + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'object': object, + } + _pool = None + + def __init__(self, configuration=None, header_name=None, header_value=None, + cookie=None, pool_threads=1): + if configuration is None: + configuration = Configuration.get_default_copy() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.client_side_validation = configuration.client_side_validation + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + if hasattr(atexit, 'unregister'): + atexit.unregister(self.close) + + @property + def pool(self): + """Create thread pool on first request + avoids instantiating unused threadpool for blocking clients. + """ + if self._pool is None: + atexit.register(self.close) + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + def __call_api( + self, resource_path, method, path_params=None, + query_params=None, header_params=None, body=None, post_params=None, + files=None, response_type=None, auth_settings=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, _host=None): + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, + collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, + collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + query_params = self.parameters_to_tuples(query_params, + collection_formats) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, + collection_formats) + post_params.extend(self.files_parameters(files)) + + # auth setting + self.update_params_for_auth(header_params, query_params, auth_settings) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + # perform request and return response + response_data = self.request( + method, url, query_params=query_params, headers=header_params, + post_params=post_params, body=body, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + + self.last_response = response_data + + return_data = response_data + if _preload_content: + # deserialize response data + if response_type: + return_data = self.deserialize(response_data, response_type) + else: + return_data = None + + if _return_http_data_only: + return (return_data) + else: + return (return_data, response_data.status, + response_data.getheaders()) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) + for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) + for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + + if isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) + for attr, _ in six.iteritems(obj.openapi_types) + if getattr(obj, attr) is not None} + + return {key: self.sanitize_for_serialization(val) + for key, val in six.iteritems(obj_dict)} + + def deserialize(self, response, response_type): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + + :return: deserialized object. + """ + # handle file downloading + # save response body into a tmp file and return the instance + if response_type == "file": + return self.__deserialize_file(response) + + # fetch data from response object + try: + data = json.loads(response.data) + except ValueError: + data = response.data + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if type(klass) == str: + if klass.startswith('list['): + sub_kls = re.match(r'list\[(.*)\]', klass).group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('dict('): + sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in six.iteritems(data)} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(cloudharness_cli.samples.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + else: + return self.__deserialize_model(data, klass) + + def call_api(self, resource_path, method, + path_params=None, query_params=None, header_params=None, + body=None, post_params=None, files=None, + response_type=None, auth_settings=None, async_req=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, _host=None): + """Makes the HTTP request (synchronous) and returns deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param response: Response data type. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + if not async_req: + return self.__call_api(resource_path, method, + path_params, query_params, header_params, + body, post_params, files, + response_type, auth_settings, + _return_http_data_only, collection_formats, + _preload_content, _request_timeout, _host) + + return self.pool.apply_async(self.__call_api, (resource_path, + method, path_params, + query_params, + header_params, body, + post_params, files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + _host)) + + def request(self, method, url, query_params=None, headers=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Makes the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + else: + raise ApiValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501 + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def files_parameters(self, files=None): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + + if files: + for k, v in six.iteritems(files): + if not v: + continue + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = (mimetypes.guess_type(filename)[0] or + 'application/octet-stream') + params.append( + tuple([k, tuple([filename, filedata, mimetype])])) + + return params + + def select_header_accept(self, accepts): + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return + + accepts = [x.lower() for x in accepts] + + if 'application/json' in accepts: + return 'application/json' + else: + return ', '.join(accepts) + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return 'application/json' + + content_types = [x.lower() for x in content_types] + + if 'application/json' in content_types or '*/*' in content_types: + return 'application/json' + else: + return content_types[0] + + def update_params_for_auth(self, headers, querys, auth_settings): + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param querys: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + querys.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return six.text_type(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + if not klass.openapi_types and not hasattr(klass, + 'get_real_child_model'): + return data + + kwargs = {} + if (data is not None and + klass.openapi_types is not None and + isinstance(data, (list, dict))): + for attr, attr_type in six.iteritems(klass.openapi_types): + if klass.attribute_map[attr] in data: + value = data[klass.attribute_map[attr]] + kwargs[attr] = self.__deserialize(value, attr_type) + + instance = klass(**kwargs) + + if hasattr(instance, 'get_real_child_model'): + klass_name = instance.get_real_child_model(data) + if klass_name: + instance = self.__deserialize(data, klass_name) + return instance diff --git a/client/cloudharness_cli/cloudharness_cli/samples/configuration.py b/client/cloudharness_cli/cloudharness_cli/samples/configuration.py new file mode 100644 index 00000000..fd989905 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/configuration.py @@ -0,0 +1,386 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import copy +import logging +import multiprocessing +import sys +import urllib3 + +import six +from six.moves import http_client as httplib + + +class Configuration(object): + """NOTE: This class is auto generated by OpenAPI Generator + + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication + :param password: Password for HTTP basic authentication + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + + :Example: + """ + + _default = None + + def __init__(self, host="https://samples.cloudharness.metacell.us/api", + api_key=None, api_key_prefix=None, + username=None, password=None, + discard_unknown_keys=False, + ): + """Constructor + """ + self.host = host + """Default Base url + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.discard_unknown_keys = discard_unknown_keys + self.access_token = None + """access token for OAuth/Bearer + """ + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("cloudharness_cli.samples") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = None + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Disable client side validation + self.client_side_validation = True + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.DEBUG) + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.WARNING) + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + if self.access_token is not None: + auth['bearerAuth'] = { + 'type': 'bearer', + 'in': 'header', + 'format': 'JWT', + 'key': 'Authorization', + 'value': 'Bearer ' + self.access_token + } + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: 0.1.0\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "https://samples.cloudharness.metacell.us/api", + 'description': "No description provided", + } + ] + + def get_host_from_settings(self, index, variables=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :return: URL based on host settings + """ + variables = {} if variables is None else variables + servers = self.get_host_settings() + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server['variables'].items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url diff --git a/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py b/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py new file mode 100644 index 00000000..12dfeafb --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/exceptions.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import six + + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None): + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, six.integer_types): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py b/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py new file mode 100644 index 00000000..90101012 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/models/__init__.py @@ -0,0 +1,20 @@ +# coding: utf-8 + +# flake8: noqa +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import models into model package +from cloudharness_cli.samples.models.inline_response202 import InlineResponse202 +from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task +from cloudharness_cli.samples.models.valid import Valid diff --git a/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py b/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py new file mode 100644 index 00000000..0f444a2f --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.samples.configuration import Configuration + + +class InlineResponse202(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'task': 'InlineResponse202Task' + } + + attribute_map = { + 'task': 'task' + } + + def __init__(self, task=None, local_vars_configuration=None): # noqa: E501 + """InlineResponse202 - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._task = None + self.discriminator = None + + if task is not None: + self.task = task + + @property + def task(self): + """Gets the task of this InlineResponse202. # noqa: E501 + + + :return: The task of this InlineResponse202. # noqa: E501 + :rtype: InlineResponse202Task + """ + return self._task + + @task.setter + def task(self, task): + """Sets the task of this InlineResponse202. + + + :param task: The task of this InlineResponse202. # noqa: E501 + :type: InlineResponse202Task + """ + + self._task = task + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, InlineResponse202): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, InlineResponse202): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py b/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py new file mode 100644 index 00000000..2c24fb44 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/models/inline_response202_task.py @@ -0,0 +1,149 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.samples.configuration import Configuration + + +class InlineResponse202Task(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'href': 'str', + 'name': 'str' + } + + attribute_map = { + 'href': 'href', + 'name': 'name' + } + + def __init__(self, href=None, name=None, local_vars_configuration=None): # noqa: E501 + """InlineResponse202Task - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._href = None + self._name = None + self.discriminator = None + + if href is not None: + self.href = href + if name is not None: + self.name = name + + @property + def href(self): + """Gets the href of this InlineResponse202Task. # noqa: E501 + + the url where to check the operation status # noqa: E501 + + :return: The href of this InlineResponse202Task. # noqa: E501 + :rtype: str + """ + return self._href + + @href.setter + def href(self, href): + """Sets the href of this InlineResponse202Task. + + the url where to check the operation status # noqa: E501 + + :param href: The href of this InlineResponse202Task. # noqa: E501 + :type: str + """ + + self._href = href + + @property + def name(self): + """Gets the name of this InlineResponse202Task. # noqa: E501 + + + :return: The name of this InlineResponse202Task. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this InlineResponse202Task. + + + :param name: The name of this InlineResponse202Task. # noqa: E501 + :type: str + """ + + self._name = name + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, InlineResponse202Task): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, InlineResponse202Task): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/samples/models/valid.py b/client/cloudharness_cli/cloudharness_cli/samples/models/valid.py new file mode 100644 index 00000000..38eb1591 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/models/valid.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.samples.configuration import Configuration + + +class Valid(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'response': 'str' + } + + attribute_map = { + 'response': 'response' + } + + def __init__(self, response=None, local_vars_configuration=None): # noqa: E501 + """Valid - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._response = None + self.discriminator = None + + if response is not None: + self.response = response + + @property + def response(self): + """Gets the response of this Valid. # noqa: E501 + + + :return: The response of this Valid. # noqa: E501 + :rtype: str + """ + return self._response + + @response.setter + def response(self, response): + """Sets the response of this Valid. + + + :param response: The response of this Valid. # noqa: E501 + :type: str + """ + + self._response = response + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Valid): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, Valid): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/samples/rest.py b/client/cloudharness_cli/cloudharness_cli/samples/rest.py new file mode 100644 index 00000000..7eae042a --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/samples/rest.py @@ -0,0 +1,297 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import io +import json +import logging +import re +import ssl + +import certifi +# python 2 and python 3 compatibility library +import six +from six.moves.urllib.parse import urlencode +import urllib3 + +from cloudharness_cli.samples.exceptions import ApiException, ApiValueError + + +logger = logging.getLogger(__name__) + + +class RESTResponse(io.IOBase): + + def __init__(self, resp): + self.urllib3_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = resp.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.urllib3_response.getheaders() + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.urllib3_response.getheader(name, default) + + +class RESTClientObject(object): + + def __init__(self, configuration, pools_size=4, maxsize=None): + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + # ca_certs + if configuration.ssl_ca_cert: + ca_certs = configuration.ssl_ca_cert + else: + # if not set certificate file, use Mozilla's root certificates. + ca_certs = certifi.where() + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + + if configuration.retries is not None: + addition_pool_args['retries'] = configuration.retries + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy: + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + **addition_pool_args + ) + + def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None): + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 + timeout = urllib3.Timeout(total=_request_timeout) + elif (isinstance(_request_timeout, tuple) and + len(_request_timeout) == 2): + timeout = urllib3.Timeout( + connect=_request_timeout[0], read=_request_timeout[1]) + + if 'Content-Type' not in headers: + headers['Content-Type'] = 'application/json' + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if query_params: + url += '?' + urlencode(query_params) + if re.search('json', headers['Content-Type'], re.IGNORECASE): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=False, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=True, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if _preload_content: + r = RESTResponse(r) + + # In the python 3, the response.data is bytes. + # we need to decode it to string. + if six.PY3: + r.data = r.data.decode('utf8') + + # log response body + logger.debug("response body: %s", r.data) + + if not 200 <= r.status <= 299: + raise ApiException(http_resp=r) + + return r + + def GET(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def OPTIONS(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None): + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def POST(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PATCH(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py b/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py new file mode 100644 index 00000000..087f2cae --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/__init__.py @@ -0,0 +1,36 @@ +# coding: utf-8 + +# flake8: noqa + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +__version__ = "1.0.0" + +# import apis into sdk package +from cloudharness_cli.workflows.api.create_and_access_api import CreateAndAccessApi + +# import ApiClient +from cloudharness_cli.workflows.api_client import ApiClient +from cloudharness_cli.workflows.configuration import Configuration +from cloudharness_cli.workflows.exceptions import OpenApiException +from cloudharness_cli.workflows.exceptions import ApiTypeError +from cloudharness_cli.workflows.exceptions import ApiValueError +from cloudharness_cli.workflows.exceptions import ApiKeyError +from cloudharness_cli.workflows.exceptions import ApiException +# import models into sdk package +from cloudharness_cli.workflows.models.operation import Operation +from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult +from cloudharness_cli.workflows.models.operation_status import OperationStatus +from cloudharness_cli.workflows.models.search_result_data import SearchResultData + diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py b/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py new file mode 100644 index 00000000..1fca77c4 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/api/__init__.py @@ -0,0 +1,6 @@ +from __future__ import absolute_import + +# flake8: noqa + +# import apis into api package +from cloudharness_cli.workflows.api.create_and_access_api import CreateAndAccessApi diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py b/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py new file mode 100644 index 00000000..895914e7 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/api/create_and_access_api.py @@ -0,0 +1,500 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import re # noqa: F401 + +# python 2 and python 3 compatibility library +import six + +from cloudharness_cli.workflows.api_client import ApiClient +from cloudharness_cli.workflows.exceptions import ( # noqa: F401 + ApiTypeError, + ApiValueError +) + + +class CreateAndAccessApi(object): + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None): + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def delete_operation(self, name, **kwargs): # noqa: E501 + """deletes operation by name # noqa: E501 + + delete operation by its name # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_operation(name, async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param str name: (required) + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: None + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.delete_operation_with_http_info(name, **kwargs) # noqa: E501 + + def delete_operation_with_http_info(self, name, **kwargs): # noqa: E501 + """deletes operation by name # noqa: E501 + + delete operation by its name # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.delete_operation_with_http_info(name, async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param str name: (required) + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: None + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + 'name' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_operation" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'name' is set + if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 + local_var_params['name'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `name` when calling `delete_operation`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'name' in local_var_params: + path_params['name'] = local_var_params['name'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operations/{name}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type=None, # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_operation(self, name, **kwargs): # noqa: E501 + """get operation by name # noqa: E501 + + retrieves an operation by its name # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_operation(name, async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param str name: (required) + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: list[Operation] + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.get_operation_with_http_info(name, **kwargs) # noqa: E501 + + def get_operation_with_http_info(self, name, **kwargs): # noqa: E501 + """get operation by name # noqa: E501 + + retrieves an operation by its name # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_operation_with_http_info(name, async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param str name: (required) + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(list[Operation], status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + 'name' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method get_operation" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'name' is set + if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 + local_var_params['name'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `name` when calling `get_operation`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'name' in local_var_params: + path_params['name'] = local_var_params['name'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operations/{name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='list[Operation]', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_operations(self, **kwargs): # noqa: E501 + """lists operations # noqa: E501 + + see all operations for the user # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_operations(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param OperationStatus status: filter by status + :param str previous_search_token: continue previous search (pagination chunks) + :param int limit: maximum number of records to return per page + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: OperationSearchResult + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.list_operations_with_http_info(**kwargs) # noqa: E501 + + def list_operations_with_http_info(self, **kwargs): # noqa: E501 + """lists operations # noqa: E501 + + see all operations for the user # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_operations_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param OperationStatus status: filter by status + :param str previous_search_token: continue previous search (pagination chunks) + :param int limit: maximum number of records to return per page + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(OperationSearchResult, status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + 'status', + 'previous_search_token', + 'limit' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method list_operations" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + + if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 50: # noqa: E501 + raise ApiValueError("Invalid value for parameter `limit` when calling `list_operations`, must be a value less than or equal to `50`") # noqa: E501 + if self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501 + raise ApiValueError("Invalid value for parameter `limit` when calling `list_operations`, must be a value greater than or equal to `1`") # noqa: E501 + collection_formats = {} + + path_params = {} + + query_params = [] + if 'status' in local_var_params and local_var_params['status'] is not None: # noqa: E501 + query_params.append(('status', local_var_params['status'])) # noqa: E501 + if 'previous_search_token' in local_var_params and local_var_params['previous_search_token'] is not None: # noqa: E501 + query_params.append(('previous_search_token', local_var_params['previous_search_token'])) # noqa: E501 + if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501 + query_params.append(('limit', local_var_params['limit'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operations', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='OperationSearchResult', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) + + def log_operation(self, name, **kwargs): # noqa: E501 + """get operation by name # noqa: E501 + + retrieves an operation log by its name # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.log_operation(name, async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param str name: (required) + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: str + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + return self.log_operation_with_http_info(name, **kwargs) # noqa: E501 + + def log_operation_with_http_info(self, name, **kwargs): # noqa: E501 + """get operation by name # noqa: E501 + + retrieves an operation log by its name # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.log_operation_with_http_info(name, async_req=True) + >>> result = thread.get() + + :param async_req bool: execute request asynchronously + :param str name: (required) + :param _return_http_data_only: response data without head status code + and headers + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: tuple(str, status_code(int), headers(HTTPHeaderDict)) + If the method is called asynchronously, + returns the request thread. + """ + + local_var_params = locals() + + all_params = [ + 'name' + ] + all_params.extend( + [ + 'async_req', + '_return_http_data_only', + '_preload_content', + '_request_timeout' + ] + ) + + for key, val in six.iteritems(local_var_params['kwargs']): + if key not in all_params: + raise ApiTypeError( + "Got an unexpected keyword argument '%s'" + " to method log_operation" % key + ) + local_var_params[key] = val + del local_var_params['kwargs'] + # verify the required parameter 'name' is set + if self.api_client.client_side_validation and ('name' not in local_var_params or # noqa: E501 + local_var_params['name'] is None): # noqa: E501 + raise ApiValueError("Missing the required parameter `name` when calling `log_operation`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'name' in local_var_params: + path_params['name'] = local_var_params['name'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['text/plain']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/operations/{name}/logs', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='str', # noqa: E501 + auth_settings=auth_settings, + async_req=local_var_params.get('async_req'), + _return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501 + _preload_content=local_var_params.get('_preload_content', True), + _request_timeout=local_var_params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py b/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py new file mode 100644 index 00000000..2ed5022a --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/api_client.py @@ -0,0 +1,648 @@ +# coding: utf-8 +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + +from __future__ import absolute_import + +import atexit +import datetime +from dateutil.parser import parse +import json +import mimetypes +from multiprocessing.pool import ThreadPool +import os +import re +import tempfile + +# python 2 and python 3 compatibility library +import six +from six.moves.urllib.parse import quote + +from cloudharness_cli.workflows.configuration import Configuration +import cloudharness_cli.workflows.models +from cloudharness_cli.workflows import rest +from cloudharness_cli.workflows.exceptions import ApiValueError + + +class ApiClient(object): + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int if six.PY3 else long, # noqa: F821 + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'object': object, + } + _pool = None + + def __init__(self, configuration=None, header_name=None, header_value=None, + cookie=None, pool_threads=1): + if configuration is None: + configuration = Configuration.get_default_copy() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.client_side_validation = configuration.client_side_validation + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + if hasattr(atexit, 'unregister'): + atexit.unregister(self.close) + + @property + def pool(self): + """Create thread pool on first request + avoids instantiating unused threadpool for blocking clients. + """ + if self._pool is None: + atexit.register(self.close) + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + def __call_api( + self, resource_path, method, path_params=None, + query_params=None, header_params=None, body=None, post_params=None, + files=None, response_type=None, auth_settings=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, _host=None): + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, + collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, + collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + query_params = self.parameters_to_tuples(query_params, + collection_formats) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, + collection_formats) + post_params.extend(self.files_parameters(files)) + + # auth setting + self.update_params_for_auth(header_params, query_params, auth_settings) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + # perform request and return response + response_data = self.request( + method, url, query_params=query_params, headers=header_params, + post_params=post_params, body=body, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + + self.last_response = response_data + + return_data = response_data + if _preload_content: + # deserialize response data + if response_type: + return_data = self.deserialize(response_data, response_type) + else: + return_data = None + + if _return_http_data_only: + return (return_data) + else: + return (return_data, response_data.status, + response_data.getheaders()) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [self.sanitize_for_serialization(sub_obj) + for sub_obj in obj] + elif isinstance(obj, tuple): + return tuple(self.sanitize_for_serialization(sub_obj) + for sub_obj in obj) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + + if isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) + for attr, _ in six.iteritems(obj.openapi_types) + if getattr(obj, attr) is not None} + + return {key: self.sanitize_for_serialization(val) + for key, val in six.iteritems(obj_dict)} + + def deserialize(self, response, response_type): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + + :return: deserialized object. + """ + # handle file downloading + # save response body into a tmp file and return the instance + if response_type == "file": + return self.__deserialize_file(response) + + # fetch data from response object + try: + data = json.loads(response.data) + except ValueError: + data = response.data + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if type(klass) == str: + if klass.startswith('list['): + sub_kls = re.match(r'list\[(.*)\]', klass).group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('dict('): + sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in six.iteritems(data)} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(cloudharness_cli.workflows.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + else: + return self.__deserialize_model(data, klass) + + def call_api(self, resource_path, method, + path_params=None, query_params=None, header_params=None, + body=None, post_params=None, files=None, + response_type=None, auth_settings=None, async_req=None, + _return_http_data_only=None, collection_formats=None, + _preload_content=True, _request_timeout=None, _host=None): + """Makes the HTTP request (synchronous) and returns deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param response: Response data type. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param async_req bool: execute request asynchronously + :param _return_http_data_only: response data without head status code + and headers + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + if not async_req: + return self.__call_api(resource_path, method, + path_params, query_params, header_params, + body, post_params, files, + response_type, auth_settings, + _return_http_data_only, collection_formats, + _preload_content, _request_timeout, _host) + + return self.pool.apply_async(self.__call_api, (resource_path, + method, path_params, + query_params, + header_params, body, + post_params, files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + _host)) + + def request(self, method, url, query_params=None, headers=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Makes the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + else: + raise ApiValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501 + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def files_parameters(self, files=None): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + + if files: + for k, v in six.iteritems(files): + if not v: + continue + file_names = v if type(v) is list else [v] + for n in file_names: + with open(n, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + mimetype = (mimetypes.guess_type(filename)[0] or + 'application/octet-stream') + params.append( + tuple([k, tuple([filename, filedata, mimetype])])) + + return params + + def select_header_accept(self, accepts): + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return + + accepts = [x.lower() for x in accepts] + + if 'application/json' in accepts: + return 'application/json' + else: + return ', '.join(accepts) + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return 'application/json' + + content_types = [x.lower() for x in content_types] + + if 'application/json' in content_types or '*/*' in content_types: + return 'application/json' + else: + return content_types[0] + + def update_params_for_auth(self, headers, querys, auth_settings): + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param querys: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + querys.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return six.text_type(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + if not klass.openapi_types and not hasattr(klass, + 'get_real_child_model'): + return data + + kwargs = {} + if (data is not None and + klass.openapi_types is not None and + isinstance(data, (list, dict))): + for attr, attr_type in six.iteritems(klass.openapi_types): + if klass.attribute_map[attr] in data: + value = data[klass.attribute_map[attr]] + kwargs[attr] = self.__deserialize(value, attr_type) + + instance = klass(**kwargs) + + if hasattr(instance, 'get_real_child_model'): + klass_name = instance.get_real_child_model(data) + if klass_name: + instance = self.__deserialize(data, klass_name) + return instance diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py b/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py new file mode 100644 index 00000000..cbe05514 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/configuration.py @@ -0,0 +1,374 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import copy +import logging +import multiprocessing +import sys +import urllib3 + +import six +from six.moves import http_client as httplib + + +class Configuration(object): + """NOTE: This class is auto generated by OpenAPI Generator + + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication + :param password: Password for HTTP basic authentication + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + + """ + + _default = None + + def __init__(self, host="https://workflows.cloudharness.metacell.us", + api_key=None, api_key_prefix=None, + username=None, password=None, + discard_unknown_keys=False, + ): + """Constructor + """ + self.host = host + """Default Base url + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.discard_unknown_keys = discard_unknown_keys + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("cloudharness_cli.workflows") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = None + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Disable client side validation + self.client_side_validation = True + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.DEBUG) + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.WARNING) + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: 0.1.0\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "https://workflows.cloudharness.metacell.us", + 'description': "Metacell host", + } + ] + + def get_host_from_settings(self, index, variables=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :return: URL based on host settings + """ + variables = {} if variables is None else variables + servers = self.get_host_settings() + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server['variables'].items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py b/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py new file mode 100644 index 00000000..8687d9d4 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/exceptions.py @@ -0,0 +1,121 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import six + + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None): + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, six.integer_types): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py b/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py new file mode 100644 index 00000000..a30fa7d6 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/models/__init__.py @@ -0,0 +1,21 @@ +# coding: utf-8 + +# flake8: noqa +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +# import models into model package +from cloudharness_cli.workflows.models.operation import Operation +from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult +from cloudharness_cli.workflows.models.operation_status import OperationStatus +from cloudharness_cli.workflows.models.search_result_data import SearchResultData diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py b/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py new file mode 100644 index 00000000..2afe71d7 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/models/operation.py @@ -0,0 +1,231 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.workflows.configuration import Configuration + + +class Operation(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'message': 'str', + 'name': 'str', + 'create_time': 'datetime', + 'status': 'OperationStatus', + 'workflow': 'str' + } + + attribute_map = { + 'message': 'message', + 'name': 'name', + 'create_time': 'createTime', + 'status': 'status', + 'workflow': 'workflow' + } + + def __init__(self, message=None, name=None, create_time=None, status=None, workflow=None, local_vars_configuration=None): # noqa: E501 + """Operation - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._message = None + self._name = None + self._create_time = None + self._status = None + self._workflow = None + self.discriminator = None + + if message is not None: + self.message = message + if name is not None: + self.name = name + if create_time is not None: + self.create_time = create_time + if status is not None: + self.status = status + if workflow is not None: + self.workflow = workflow + + @property + def message(self): + """Gets the message of this Operation. # noqa: E501 + + usually set when an error occurred # noqa: E501 + + :return: The message of this Operation. # noqa: E501 + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this Operation. + + usually set when an error occurred # noqa: E501 + + :param message: The message of this Operation. # noqa: E501 + :type: str + """ + + self._message = message + + @property + def name(self): + """Gets the name of this Operation. # noqa: E501 + + operation name # noqa: E501 + + :return: The name of this Operation. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this Operation. + + operation name # noqa: E501 + + :param name: The name of this Operation. # noqa: E501 + :type: str + """ + + self._name = name + + @property + def create_time(self): + """Gets the create_time of this Operation. # noqa: E501 + + + :return: The create_time of this Operation. # noqa: E501 + :rtype: datetime + """ + return self._create_time + + @create_time.setter + def create_time(self, create_time): + """Sets the create_time of this Operation. + + + :param create_time: The create_time of this Operation. # noqa: E501 + :type: datetime + """ + + self._create_time = create_time + + @property + def status(self): + """Gets the status of this Operation. # noqa: E501 + + + :return: The status of this Operation. # noqa: E501 + :rtype: OperationStatus + """ + return self._status + + @status.setter + def status(self, status): + """Sets the status of this Operation. + + + :param status: The status of this Operation. # noqa: E501 + :type: OperationStatus + """ + + self._status = status + + @property + def workflow(self): + """Gets the workflow of this Operation. # noqa: E501 + + low level representation as an Argo json # noqa: E501 + + :return: The workflow of this Operation. # noqa: E501 + :rtype: str + """ + return self._workflow + + @workflow.setter + def workflow(self, workflow): + """Sets the workflow of this Operation. + + low level representation as an Argo json # noqa: E501 + + :param workflow: The workflow of this Operation. # noqa: E501 + :type: str + """ + + self._workflow = workflow + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, Operation): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, Operation): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py b/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py new file mode 100644 index 00000000..50455c02 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_search_result.py @@ -0,0 +1,147 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.workflows.configuration import Configuration + + +class OperationSearchResult(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'meta': 'SearchResultData', + 'items': 'list[Operation]' + } + + attribute_map = { + 'meta': 'meta', + 'items': 'items' + } + + def __init__(self, meta=None, items=None, local_vars_configuration=None): # noqa: E501 + """OperationSearchResult - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._meta = None + self._items = None + self.discriminator = None + + if meta is not None: + self.meta = meta + if items is not None: + self.items = items + + @property + def meta(self): + """Gets the meta of this OperationSearchResult. # noqa: E501 + + + :return: The meta of this OperationSearchResult. # noqa: E501 + :rtype: SearchResultData + """ + return self._meta + + @meta.setter + def meta(self, meta): + """Sets the meta of this OperationSearchResult. + + + :param meta: The meta of this OperationSearchResult. # noqa: E501 + :type: SearchResultData + """ + + self._meta = meta + + @property + def items(self): + """Gets the items of this OperationSearchResult. # noqa: E501 + + + :return: The items of this OperationSearchResult. # noqa: E501 + :rtype: list[Operation] + """ + return self._items + + @items.setter + def items(self, items): + """Sets the items of this OperationSearchResult. + + + :param items: The items of this OperationSearchResult. # noqa: E501 + :type: list[Operation] + """ + + self._items = items + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, OperationSearchResult): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, OperationSearchResult): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py b/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py new file mode 100644 index 00000000..c777091a --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/models/operation_status.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.workflows.configuration import Configuration + + +class OperationStatus(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + allowed enum values + """ + PENDING = "Pending" + RUNNING = "Running" + ERROR = "Error" + SUCCEEDED = "Succeeded" + SKIPPED = "Skipped" + FAILED = "Failed" + + allowable_values = [PENDING, RUNNING, ERROR, SUCCEEDED, SKIPPED, FAILED] # noqa: E501 + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + } + + attribute_map = { + } + + def __init__(self, local_vars_configuration=None): # noqa: E501 + """OperationStatus - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + self.discriminator = None + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, OperationStatus): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, OperationStatus): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py b/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py new file mode 100644 index 00000000..d422eb95 --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/models/search_result_data.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +import pprint +import re # noqa: F401 + +import six + +from cloudharness_cli.workflows.configuration import Configuration + + +class SearchResultData(object): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + """ + Attributes: + openapi_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + openapi_types = { + 'continue_token': 'str' + } + + attribute_map = { + 'continue_token': 'continueToken' + } + + def __init__(self, continue_token=None, local_vars_configuration=None): # noqa: E501 + """SearchResultData - a model defined in OpenAPI""" # noqa: E501 + if local_vars_configuration is None: + local_vars_configuration = Configuration() + self.local_vars_configuration = local_vars_configuration + + self._continue_token = None + self.discriminator = None + + if continue_token is not None: + self.continue_token = continue_token + + @property + def continue_token(self): + """Gets the continue_token of this SearchResultData. # noqa: E501 + + token to use for pagination # noqa: E501 + + :return: The continue_token of this SearchResultData. # noqa: E501 + :rtype: str + """ + return self._continue_token + + @continue_token.setter + def continue_token(self, continue_token): + """Sets the continue_token of this SearchResultData. + + token to use for pagination # noqa: E501 + + :param continue_token: The continue_token of this SearchResultData. # noqa: E501 + :type: str + """ + + self._continue_token = continue_token + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, SearchResultData): + return False + + return self.to_dict() == other.to_dict() + + def __ne__(self, other): + """Returns true if both objects are not equal""" + if not isinstance(other, SearchResultData): + return True + + return self.to_dict() != other.to_dict() diff --git a/client/cloudharness_cli/cloudharness_cli/workflows/rest.py b/client/cloudharness_cli/cloudharness_cli/workflows/rest.py new file mode 100644 index 00000000..ab9f0b4c --- /dev/null +++ b/client/cloudharness_cli/cloudharness_cli/workflows/rest.py @@ -0,0 +1,297 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import io +import json +import logging +import re +import ssl + +import certifi +# python 2 and python 3 compatibility library +import six +from six.moves.urllib.parse import urlencode +import urllib3 + +from cloudharness_cli.workflows.exceptions import ApiException, ApiValueError + + +logger = logging.getLogger(__name__) + + +class RESTResponse(io.IOBase): + + def __init__(self, resp): + self.urllib3_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = resp.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.urllib3_response.getheaders() + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.urllib3_response.getheader(name, default) + + +class RESTClientObject(object): + + def __init__(self, configuration, pools_size=4, maxsize=None): + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + # ca_certs + if configuration.ssl_ca_cert: + ca_certs = configuration.ssl_ca_cert + else: + # if not set certificate file, use Mozilla's root certificates. + ca_certs = certifi.where() + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + + if configuration.retries is not None: + addition_pool_args['retries'] = configuration.retries + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy: + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=ca_certs, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + **addition_pool_args + ) + + def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None): + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 + timeout = urllib3.Timeout(total=_request_timeout) + elif (isinstance(_request_timeout, tuple) and + len(_request_timeout) == 2): + timeout = urllib3.Timeout( + connect=_request_timeout[0], read=_request_timeout[1]) + + if 'Content-Type' not in headers: + headers['Content-Type'] = 'application/json' + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if query_params: + url += '?' + urlencode(query_params) + if re.search('json', headers['Content-Type'], re.IGNORECASE): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=False, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=True, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if _preload_content: + r = RESTResponse(r) + + # In the python 3, the response.data is bytes. + # we need to decode it to string. + if six.PY3: + r.data = r.data.decode('utf8') + + # log response body + logger.debug("response body: %s", r.data) + + if not 200 <= r.status <= 299: + raise ApiException(http_resp=r) + + return r + + def GET(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def OPTIONS(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None): + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def POST(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PATCH(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) diff --git a/client/cloudharness_cli/docs/samples/AuthApi.md b/client/cloudharness_cli/docs/samples/AuthApi.md new file mode 100644 index 00000000..059b4a6e --- /dev/null +++ b/client/cloudharness_cli/docs/samples/AuthApi.md @@ -0,0 +1,69 @@ +# cloudharness_cli.samples.AuthApi + +All URIs are relative to *https://samples.cloudharness.metacell.us/api* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**valid_token**](AuthApi.md#valid_token) | **GET** /valid | Check if the token is valid. Get a token by logging into the base url + + +# **valid_token** +> list[Valid] valid_token() + +Check if the token is valid. Get a token by logging into the base url + +Check if the token is valid + +### Example + +* Bearer (JWT) Authentication (bearerAuth): +```python +from __future__ import print_function +import time +import cloudharness_cli.samples +from cloudharness_cli.samples.rest import ApiException +from pprint import pprint +configuration = cloudharness_cli.samples.Configuration() +# Configure Bearer authorization (JWT): bearerAuth +configuration.access_token = 'YOUR_BEARER_TOKEN' + +# Defining host is optional and default to https://samples.cloudharness.metacell.us/api +configuration.host = "https://samples.cloudharness.metacell.us/api" + +# Enter a context with an instance of the API client +with cloudharness_cli.samples.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.samples.AuthApi(api_client) + + try: + # Check if the token is valid. Get a token by logging into the base url + api_response = api_instance.valid_token() + pprint(api_response) + except ApiException as e: + print("Exception when calling AuthApi->valid_token: %s\n" % e) +``` + +### Parameters +This endpoint does not need any parameter. + +### Return type + +[**list[Valid]**](Valid.md) + +### Authorization + +[bearerAuth](../README.md#bearerAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Check if token is valid | - | +**400** | bad input parameter | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/client/cloudharness_cli/docs/samples/InlineResponse202.md b/client/cloudharness_cli/docs/samples/InlineResponse202.md new file mode 100644 index 00000000..88560593 --- /dev/null +++ b/client/cloudharness_cli/docs/samples/InlineResponse202.md @@ -0,0 +1,10 @@ +# InlineResponse202 + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**task** | [**InlineResponse202Task**](InlineResponse202Task.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/docs/samples/InlineResponse202Task.md b/client/cloudharness_cli/docs/samples/InlineResponse202Task.md new file mode 100644 index 00000000..b41e90e5 --- /dev/null +++ b/client/cloudharness_cli/docs/samples/InlineResponse202Task.md @@ -0,0 +1,11 @@ +# InlineResponse202Task + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**href** | **str** | the url where to check the operation status | [optional] +**name** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/docs/samples/Valid.md b/client/cloudharness_cli/docs/samples/Valid.md new file mode 100644 index 00000000..321af53c --- /dev/null +++ b/client/cloudharness_cli/docs/samples/Valid.md @@ -0,0 +1,10 @@ +# Valid + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**response** | **str** | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/docs/samples/WorkflowsApi.md b/client/cloudharness_cli/docs/samples/WorkflowsApi.md new file mode 100644 index 00000000..45b2dcc3 --- /dev/null +++ b/client/cloudharness_cli/docs/samples/WorkflowsApi.md @@ -0,0 +1,167 @@ +# cloudharness_cli.samples.WorkflowsApi + +All URIs are relative to *https://samples.cloudharness.metacell.us/api* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**submit_async**](WorkflowsApi.md#submit_async) | **GET** /operation_async | Send an asynchronous operation +[**submit_sync**](WorkflowsApi.md#submit_sync) | **GET** /operation_sync | Send a synchronous operation +[**submit_sync_with_results**](WorkflowsApi.md#submit_sync_with_results) | **GET** /operation_sync_results | Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + + +# **submit_async** +> InlineResponse202 submit_async() + +Send an asynchronous operation + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.samples +from cloudharness_cli.samples.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.samples.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.samples.WorkflowsApi(api_client) + + try: + # Send an asynchronous operation + api_response = api_instance.submit_async() + pprint(api_response) + except ApiException as e: + print("Exception when calling WorkflowsApi->submit_async: %s\n" % e) +``` + +### Parameters +This endpoint does not need any parameter. + +### Return type + +[**InlineResponse202**](InlineResponse202.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**202** | Submitted operation. See also https://restfulapi.net/http-status-202-accepted/ | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **submit_sync** +> str submit_sync() + +Send a synchronous operation + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.samples +from cloudharness_cli.samples.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.samples.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.samples.WorkflowsApi(api_client) + + try: + # Send a synchronous operation + api_response = api_instance.submit_sync() + pprint(api_response) + except ApiException as e: + print("Exception when calling WorkflowsApi->submit_sync: %s\n" % e) +``` + +### Parameters +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Operation result | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **submit_sync_with_results** +> str submit_sync_with_results(a=a, b=b) + +Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.samples +from cloudharness_cli.samples.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.samples.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.samples.WorkflowsApi(api_client) + a = 10 # float | first number to sum (optional) +b = 10 # float | second number to sum (optional) + + try: + # Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud + api_response = api_instance.submit_sync_with_results(a=a, b=b) + pprint(api_response) + except ApiException as e: + print("Exception when calling WorkflowsApi->submit_sync_with_results: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **a** | **float**| first number to sum | [optional] + **b** | **float**| second number to sum | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | Operation result | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md b/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md new file mode 100644 index 00000000..7ee177da --- /dev/null +++ b/client/cloudharness_cli/docs/workflows/CreateAndAccessApi.md @@ -0,0 +1,243 @@ +# cloudharness_cli.workflows.CreateAndAccessApi + +All URIs are relative to *https://workflows.cloudharness.metacell.us* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**delete_operation**](CreateAndAccessApi.md#delete_operation) | **DELETE** /operations/{name} | deletes operation by name +[**get_operation**](CreateAndAccessApi.md#get_operation) | **GET** /operations/{name} | get operation by name +[**list_operations**](CreateAndAccessApi.md#list_operations) | **GET** /operations | lists operations +[**log_operation**](CreateAndAccessApi.md#log_operation) | **GET** /operations/{name}/logs | get operation by name + + +# **delete_operation** +> delete_operation(name) + +deletes operation by name + +delete operation by its name + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.workflows +from cloudharness_cli.workflows.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.workflows.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client) + name = 'name_example' # str | + + try: + # deletes operation by name + api_instance.delete_operation(name) + except ApiException as e: + print("Exception when calling CreateAndAccessApi->delete_operation: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **str**| | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | delete OK | - | +**404** | not found | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_operation** +> list[Operation] get_operation(name) + +get operation by name + +retrieves an operation by its name + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.workflows +from cloudharness_cli.workflows.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.workflows.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client) + name = 'name_example' # str | + + try: + # get operation by name + api_response = api_instance.get_operation(name) + pprint(api_response) + except ApiException as e: + print("Exception when calling CreateAndAccessApi->get_operation: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **str**| | + +### Return type + +[**list[Operation]**](Operation.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | search results matching criteria | - | +**404** | not found | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **list_operations** +> OperationSearchResult list_operations(status=status, previous_search_token=previous_search_token, limit=limit) + +lists operations + +see all operations for the user + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.workflows +from cloudharness_cli.workflows.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.workflows.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client) + status = cloudharness_cli.workflows.OperationStatus() # OperationStatus | filter by status (optional) +previous_search_token = 'previous_search_token_example' # str | continue previous search (pagination chunks) (optional) +limit = 10 # int | maximum number of records to return per page (optional) (default to 10) + + try: + # lists operations + api_response = api_instance.list_operations(status=status, previous_search_token=previous_search_token, limit=limit) + pprint(api_response) + except ApiException as e: + print("Exception when calling CreateAndAccessApi->list_operations: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **status** | [**OperationStatus**](.md)| filter by status | [optional] + **previous_search_token** | **str**| continue previous search (pagination chunks) | [optional] + **limit** | **int**| maximum number of records to return per page | [optional] [default to 10] + +### Return type + +[**OperationSearchResult**](OperationSearchResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | search results matching criteria | - | +**400** | bad input parameter | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **log_operation** +> str log_operation(name) + +get operation by name + +retrieves an operation log by its name + +### Example + +```python +from __future__ import print_function +import time +import cloudharness_cli.workflows +from cloudharness_cli.workflows.rest import ApiException +from pprint import pprint + +# Enter a context with an instance of the API client +with cloudharness_cli.workflows.ApiClient() as api_client: + # Create an instance of the API class + api_instance = cloudharness_cli.workflows.CreateAndAccessApi(api_client) + name = 'name_example' # str | + + try: + # get operation by name + api_response = api_instance.log_operation(name) + pprint(api_response) + except ApiException as e: + print("Exception when calling CreateAndAccessApi->log_operation: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **str**| | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/plain + +### HTTP response details +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | search results matching criteria | - | +**404** | not found | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/client/cloudharness_cli/docs/workflows/Operation.md b/client/cloudharness_cli/docs/workflows/Operation.md new file mode 100644 index 00000000..a821a115 --- /dev/null +++ b/client/cloudharness_cli/docs/workflows/Operation.md @@ -0,0 +1,15 @@ +# Operation + +represents the status of a distributed API call +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**message** | **str** | usually set when an error occurred | [optional] +**name** | **str** | operation name | [optional] +**create_time** | **datetime** | | [optional] [readonly] +**status** | [**OperationStatus**](OperationStatus.md) | | [optional] +**workflow** | **str** | low level representation as an Argo json | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/docs/workflows/OperationSearchResult.md b/client/cloudharness_cli/docs/workflows/OperationSearchResult.md new file mode 100644 index 00000000..105beb74 --- /dev/null +++ b/client/cloudharness_cli/docs/workflows/OperationSearchResult.md @@ -0,0 +1,12 @@ +# OperationSearchResult + +a list of operations with meta data about the result +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**meta** | [**SearchResultData**](SearchResultData.md) | | [optional] +**items** | [**list[Operation]**](Operation.md) | | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/docs/workflows/OperationStatus.md b/client/cloudharness_cli/docs/workflows/OperationStatus.md new file mode 100644 index 00000000..21ef9a0c --- /dev/null +++ b/client/cloudharness_cli/docs/workflows/OperationStatus.md @@ -0,0 +1,9 @@ +# OperationStatus + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/docs/workflows/SearchResultData.md b/client/cloudharness_cli/docs/workflows/SearchResultData.md new file mode 100644 index 00000000..141f5aa1 --- /dev/null +++ b/client/cloudharness_cli/docs/workflows/SearchResultData.md @@ -0,0 +1,11 @@ +# SearchResultData + +describes a search +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**continue_token** | **str** | token to use for pagination | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/client/cloudharness_cli/requirements.txt b/client/cloudharness_cli/requirements.txt new file mode 100644 index 00000000..eb358efd --- /dev/null +++ b/client/cloudharness_cli/requirements.txt @@ -0,0 +1,6 @@ +certifi >= 14.05.14 +future; python_version<="2.7" +six >= 1.10 +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.15.1 diff --git a/client/cloudharness_cli/setup.py b/client/cloudharness_cli/setup.py new file mode 100644 index 00000000..96353a80 --- /dev/null +++ b/client/cloudharness_cli/setup.py @@ -0,0 +1,42 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from setuptools import setup, find_packages # noqa: H301 + +NAME = "cloudharness-cli" +VERSION = "0.1.0" +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"] + +setup( + name=NAME, + version=VERSION, + description="CloudHarness Python API Client", + author="OpenAPI Generator community", + author_email="cloudharness@metacell.us", + url="", + keywords=["OpenAPI", "CloudHarness Sample API"], + install_requires=REQUIRES, + packages=find_packages(exclude=["test", "tests"]), + include_package_data=True, + license="UNLICENSED", + long_description="""\ + CloudHarness Python API Client # noqa: E501 + """ +) diff --git a/client/cloudharness_cli/test-requirements.txt b/client/cloudharness_cli/test-requirements.txt new file mode 100644 index 00000000..4ed3991c --- /dev/null +++ b/client/cloudharness_cli/test-requirements.txt @@ -0,0 +1,3 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 diff --git a/client/cloudharness_cli/test/samples/__init__.py b/client/cloudharness_cli/test/samples/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/client/cloudharness_cli/test/samples/test_auth_api.py b/client/cloudharness_cli/test/samples/test_auth_api.py new file mode 100644 index 00000000..a7dccd01 --- /dev/null +++ b/client/cloudharness_cli/test/samples/test_auth_api.py @@ -0,0 +1,41 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import cloudharness_cli.samples +from cloudharness_cli.samples.api.auth_api import AuthApi # noqa: E501 +from cloudharness_cli.samples.rest import ApiException + + +class TestAuthApi(unittest.TestCase): + """AuthApi unit test stubs""" + + def setUp(self): + self.api = cloudharness_cli.samples.api.auth_api.AuthApi() # noqa: E501 + + def tearDown(self): + pass + + def test_valid_token(self): + """Test case for valid_token + + Check if the token is valid. Get a token by logging into the base url # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/samples/test_inline_response202.py b/client/cloudharness_cli/test/samples/test_inline_response202.py new file mode 100644 index 00000000..09e667f5 --- /dev/null +++ b/client/cloudharness_cli/test/samples/test_inline_response202.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.samples +from cloudharness_cli.samples.models.inline_response202 import InlineResponse202 # noqa: E501 +from cloudharness_cli.samples.rest import ApiException + +class TestInlineResponse202(unittest.TestCase): + """InlineResponse202 unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test InlineResponse202 + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.samples.models.inline_response202.InlineResponse202() # noqa: E501 + if include_optional : + return InlineResponse202( + task = cloudharness_cli.samples.models.inline_response_202_task.inline_response_202_task( + href = 'http://workflows.cloudharness.metacell.us/api/operation/my-op', + name = 'my-op', ) + ) + else : + return InlineResponse202( + ) + + def testInlineResponse202(self): + """Test InlineResponse202""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/samples/test_inline_response202_task.py b/client/cloudharness_cli/test/samples/test_inline_response202_task.py new file mode 100644 index 00000000..d2ddf489 --- /dev/null +++ b/client/cloudharness_cli/test/samples/test_inline_response202_task.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.samples +from cloudharness_cli.samples.models.inline_response202_task import InlineResponse202Task # noqa: E501 +from cloudharness_cli.samples.rest import ApiException + +class TestInlineResponse202Task(unittest.TestCase): + """InlineResponse202Task unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test InlineResponse202Task + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.samples.models.inline_response202_task.InlineResponse202Task() # noqa: E501 + if include_optional : + return InlineResponse202Task( + href = 'http://workflows.cloudharness.metacell.us/api/operation/my-op', + name = 'my-op' + ) + else : + return InlineResponse202Task( + ) + + def testInlineResponse202Task(self): + """Test InlineResponse202Task""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/samples/test_valid.py b/client/cloudharness_cli/test/samples/test_valid.py new file mode 100644 index 00000000..8ce6e217 --- /dev/null +++ b/client/cloudharness_cli/test/samples/test_valid.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.samples +from cloudharness_cli.samples.models.valid import Valid # noqa: E501 +from cloudharness_cli.samples.rest import ApiException + +class TestValid(unittest.TestCase): + """Valid unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test Valid + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.samples.models.valid.Valid() # noqa: E501 + if include_optional : + return Valid( + response = '0' + ) + else : + return Valid( + ) + + def testValid(self): + """Test Valid""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/samples/test_workflows_api.py b/client/cloudharness_cli/test/samples/test_workflows_api.py new file mode 100644 index 00000000..944a51ad --- /dev/null +++ b/client/cloudharness_cli/test/samples/test_workflows_api.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + CloudHarness Sample API + + CloudHarness Sample api # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import cloudharness_cli.samples +from cloudharness_cli.samples.api.workflows_api import WorkflowsApi # noqa: E501 +from cloudharness_cli.samples.rest import ApiException + + +class TestWorkflowsApi(unittest.TestCase): + """WorkflowsApi unit test stubs""" + + def setUp(self): + self.api = cloudharness_cli.samples.api.workflows_api.WorkflowsApi() # noqa: E501 + + def tearDown(self): + pass + + def test_submit_async(self): + """Test case for submit_async + + Send an asynchronous operation # noqa: E501 + """ + pass + + def test_submit_sync(self): + """Test case for submit_sync + + Send a synchronous operation # noqa: E501 + """ + pass + + def test_submit_sync_with_results(self): + """Test case for submit_sync_with_results + + Send a synchronous operation and get results using the event queue. Just a sum, but in the cloud # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/workflows/__init__.py b/client/cloudharness_cli/test/workflows/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/client/cloudharness_cli/test/workflows/test_create_and_access_api.py b/client/cloudharness_cli/test/workflows/test_create_and_access_api.py new file mode 100644 index 00000000..f270c116 --- /dev/null +++ b/client/cloudharness_cli/test/workflows/test_create_and_access_api.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest + +import cloudharness_cli.workflows +from cloudharness_cli.workflows.api.create_and_access_api import CreateAndAccessApi # noqa: E501 +from cloudharness_cli.workflows.rest import ApiException + + +class TestCreateAndAccessApi(unittest.TestCase): + """CreateAndAccessApi unit test stubs""" + + def setUp(self): + self.api = cloudharness_cli.workflows.api.create_and_access_api.CreateAndAccessApi() # noqa: E501 + + def tearDown(self): + pass + + def test_delete_operation(self): + """Test case for delete_operation + + deletes operation by name # noqa: E501 + """ + pass + + def test_get_operation(self): + """Test case for get_operation + + get operation by name # noqa: E501 + """ + pass + + def test_list_operations(self): + """Test case for list_operations + + lists operations # noqa: E501 + """ + pass + + def test_log_operation(self): + """Test case for log_operation + + get operation by name # noqa: E501 + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/workflows/test_operation.py b/client/cloudharness_cli/test/workflows/test_operation.py new file mode 100644 index 00000000..4f0389e1 --- /dev/null +++ b/client/cloudharness_cli/test/workflows/test_operation.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.workflows +from cloudharness_cli.workflows.models.operation import Operation # noqa: E501 +from cloudharness_cli.workflows.rest import ApiException + +class TestOperation(unittest.TestCase): + """Operation unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test Operation + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.workflows.models.operation.Operation() # noqa: E501 + if include_optional : + return Operation( + message = '0', + name = '0', + create_time = '2016-08-29T09:12:33.001Z', + status = 'Pending', + workflow = '0' + ) + else : + return Operation( + ) + + def testOperation(self): + """Test Operation""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/workflows/test_operation_search_result.py b/client/cloudharness_cli/test/workflows/test_operation_search_result.py new file mode 100644 index 00000000..594060a9 --- /dev/null +++ b/client/cloudharness_cli/test/workflows/test_operation_search_result.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.workflows +from cloudharness_cli.workflows.models.operation_search_result import OperationSearchResult # noqa: E501 +from cloudharness_cli.workflows.rest import ApiException + +class TestOperationSearchResult(unittest.TestCase): + """OperationSearchResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test OperationSearchResult + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.workflows.models.operation_search_result.OperationSearchResult() # noqa: E501 + if include_optional : + return OperationSearchResult( + meta = cloudharness_cli.workflows.models.search_result_data.SearchResultData( + continue_token = '0', ), + items = [ + cloudharness_cli.workflows.models.operation.Operation( + message = '0', + name = '0', + create_time = '2016-08-29T09:12:33.001Z', + status = 'Pending', + workflow = '0', ) + ] + ) + else : + return OperationSearchResult( + ) + + def testOperationSearchResult(self): + """Test OperationSearchResult""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/workflows/test_operation_status.py b/client/cloudharness_cli/test/workflows/test_operation_status.py new file mode 100644 index 00000000..af426a85 --- /dev/null +++ b/client/cloudharness_cli/test/workflows/test_operation_status.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.workflows +from cloudharness_cli.workflows.models.operation_status import OperationStatus # noqa: E501 +from cloudharness_cli.workflows.rest import ApiException + +class TestOperationStatus(unittest.TestCase): + """OperationStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test OperationStatus + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.workflows.models.operation_status.OperationStatus() # noqa: E501 + if include_optional : + return OperationStatus( + ) + else : + return OperationStatus( + ) + + def testOperationStatus(self): + """Test OperationStatus""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/client/cloudharness_cli/test/workflows/test_search_result_data.py b/client/cloudharness_cli/test/workflows/test_search_result_data.py new file mode 100644 index 00000000..843d4a92 --- /dev/null +++ b/client/cloudharness_cli/test/workflows/test_search_result_data.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + Workflows API + + Workflows API # noqa: E501 + + The version of the OpenAPI document: 0.1.0 + Contact: cloudharness@metacell.us + Generated by: https://openapi-generator.tech +""" + + +from __future__ import absolute_import + +import unittest +import datetime + +import cloudharness_cli.workflows +from cloudharness_cli.workflows.models.search_result_data import SearchResultData # noqa: E501 +from cloudharness_cli.workflows.rest import ApiException + +class TestSearchResultData(unittest.TestCase): + """SearchResultData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional): + """Test SearchResultData + include_option is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # model = cloudharness_cli.workflows.models.search_result_data.SearchResultData() # noqa: E501 + if include_optional : + return SearchResultData( + continue_token = '0' + ) + else : + return SearchResultData( + ) + + def testSearchResultData(self): + """Test SearchResultData""" + inst_req_only = self.make_instance(include_optional=False) + inst_req_and_optional = self.make_instance(include_optional=True) + + +if __name__ == '__main__': + unittest.main() diff --git a/deployment.yaml b/deployment.yaml new file mode 100644 index 00000000..e69de29b diff --git a/infrastructure/README.md b/infrastructure/README.md new file mode 100644 index 00000000..940966a9 --- /dev/null +++ b/infrastructure/README.md @@ -0,0 +1,18 @@ +# Infrastructure + +Here we put all the resources intended to install and deploy the platform on Kubernetes. + +## Relevant files and directory structure + - `base-images`: base Docker images. Those images can used as base images in CloudHarness apps and tasks. + - `common-images`: Static images. Those images can derive from base images can be also used as base images in CloudHarness apps and tasks. + +## Base images and common images + +The main difference between the base images and common images is that base images are built in the root context, while +common images are built in a local context. +So, base images are general purpose and are mainly used to provide access to custom libraries, while common images can have +a specific purpose (e.g. enable widely used libraries for tasks). + + + + diff --git a/infrastructure/base-images/README.md b/infrastructure/base-images/README.md new file mode 100644 index 00000000..052b5c48 --- /dev/null +++ b/infrastructure/base-images/README.md @@ -0,0 +1,5 @@ +# Base images + +Here we put the base images which every app and task can inherit from. + +Inheriting from the base image is not needed if there is no use of cloudharness or other prerequisites included in the base image. \ No newline at end of file diff --git a/infrastructure/base-images/cloudharness-base-debian/Dockerfile b/infrastructure/base-images/cloudharness-base-debian/Dockerfile new file mode 100644 index 00000000..2602fda6 --- /dev/null +++ b/infrastructure/base-images/cloudharness-base-debian/Dockerfile @@ -0,0 +1,8 @@ +ARG PARENT=python:3.7.6 +FROM ${PARENT} + +COPY libraries/cloudharness-common /libraries/cloudharness-common + +RUN pip install /libraries/cloudharness-common + +WORKDIR / \ No newline at end of file diff --git a/infrastructure/base-images/cloudharness-base/Dockerfile b/infrastructure/base-images/cloudharness-base/Dockerfile new file mode 100644 index 00000000..591e0bea --- /dev/null +++ b/infrastructure/base-images/cloudharness-base/Dockerfile @@ -0,0 +1,12 @@ +ARG PARENT=python:3.7.6-alpine +FROM ${PARENT} + +RUN apk update +RUN apk upgrade +RUN apk add bash + +COPY libraries/cloudharness-common /libraries/cloudharness-common + +RUN pip install /libraries/cloudharness-common + +WORKDIR / \ No newline at end of file diff --git a/infrastructure/common-images/README.md b/infrastructure/common-images/README.md new file mode 100644 index 00000000..1d6cc40b --- /dev/null +++ b/infrastructure/common-images/README.md @@ -0,0 +1,3 @@ +#Common images + +Place here common Dockerfiles for images meant to be reused for specific functionalities. \ No newline at end of file diff --git a/libraries/cloudharness-common/.coveragerc b/libraries/cloudharness-common/.coveragerc new file mode 100644 index 00000000..e916e6fb --- /dev/null +++ b/libraries/cloudharness-common/.coveragerc @@ -0,0 +1,3 @@ +[run] +branch = True +omit = project/tests/* \ No newline at end of file diff --git a/libraries/cloudharness-common/.gitignore b/libraries/cloudharness-common/.gitignore new file mode 100644 index 00000000..66f183b8 --- /dev/null +++ b/libraries/cloudharness-common/.gitignore @@ -0,0 +1,3 @@ +.tox +*egg-info +.venv \ No newline at end of file diff --git a/libraries/cloudharness-common/.travis.yml b/libraries/cloudharness-common/.travis.yml new file mode 100644 index 00000000..276c407c --- /dev/null +++ b/libraries/cloudharness-common/.travis.yml @@ -0,0 +1,7 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.7" +# command to install dependencies +install: pip install tox-travis +script: tox diff --git a/libraries/cloudharness-common/MANIFEST.in b/libraries/cloudharness-common/MANIFEST.in new file mode 100644 index 00000000..6021ca4c --- /dev/null +++ b/libraries/cloudharness-common/MANIFEST.in @@ -0,0 +1 @@ +include cloudharness/utils/resources/* \ No newline at end of file diff --git a/libraries/cloudharness-common/README.md b/libraries/cloudharness-common/README.md new file mode 100644 index 00000000..e901157d --- /dev/null +++ b/libraries/cloudharness-common/README.md @@ -0,0 +1,29 @@ +# CloudHarness backend library +CloudHarness - Python core library. + +The Cloudharness core library provides horizontal utilities needed inside custom +applications and tasks. + +## How to use + +In order to use all `cloudharness` functionalities inside the cluster you must +define your Dockerfile depending on the base cloudharness as following: + +```Dockerfile +ARG REGISTRY=r.cfcr.io/tarelli/ +ARG TAG=latest +FROM ${REGISTRY}cloudharness-base:${TAG} +``` + +## Requirements + +Python 3.4+ + +## Installation + +Install with setuptools from sources + +``` +cd libraries/cloudharness +pip install . +``` \ No newline at end of file diff --git a/libraries/cloudharness-common/cloudharness/__init__.py b/libraries/cloudharness-common/cloudharness/__init__.py new file mode 100644 index 00000000..1421bd3c --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/__init__.py @@ -0,0 +1,16 @@ +import logging +import sys + +log = logging + +# logging.basicConfig(stream=sys.stdout, level=logging.INFO) + +def set_debug(): + logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) + + + + +__all__ = ['log'] + +# TODO log will write through a rest service diff --git a/libraries/cloudharness-common/cloudharness/auth/__init__.py b/libraries/cloudharness-common/cloudharness/auth/__init__.py new file mode 100644 index 00000000..df314e32 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/auth/__init__.py @@ -0,0 +1 @@ +from .keycloak import * \ No newline at end of file diff --git a/libraries/cloudharness-common/cloudharness/auth/keycloak/__init__.py b/libraries/cloudharness-common/cloudharness/auth/keycloak/__init__.py new file mode 100644 index 00000000..dd5cac4f --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/auth/keycloak/__init__.py @@ -0,0 +1,40 @@ +import os +import jwt +import sys +import json +import requests +from urllib.parse import urljoin +from typing import List +from flask import current_app +from cloudharness.utils import env + +def decode_token(token): + """ + Check and retrieve authentication information from custom bearer token. + Returned value will be passed in 'token_info' parameter of your operation function, if there is one. + 'sub' or 'uid' will be set in 'user' parameter of your operation function, if there is one. + + :param token Token provided by Authorization header + :type token: str + :return: Decoded token information or None if token is invalid + :rtype: dict | None + """ + SCHEMA = 'https://' + AUTH_DOMAIN = env.get_auth_service_cluster_address() + AUTH_REALM = env.get_auth_realm() + BASE_PATH = f"//{os.path.join(AUTH_DOMAIN, 'auth/realms', AUTH_REALM)}" + AUTH_PUBLIC_KEY_URL = urljoin(SCHEMA, BASE_PATH) + + KEY = json.loads(requests.get(AUTH_PUBLIC_KEY_URL, verify=False).text)['public_key'] + + KEY = f"-----BEGIN PUBLIC KEY-----\n{KEY}\n-----END PUBLIC KEY-----" + + try: + decoded = jwt.decode(token, KEY, audience='accounts', algorithms='RS256') + except: + current_app.logger.debug(f"Error validating user: {sys.exc_info()}") + return None + + valid = 'offline_access' in decoded['realm_access']['roles'] + current_app.logger.debug(valid) + return {'uid': 'user_id'} \ No newline at end of file diff --git a/libraries/cloudharness-common/cloudharness/errors.py b/libraries/cloudharness-common/cloudharness/errors.py new file mode 100644 index 00000000..b5343692 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/errors.py @@ -0,0 +1,25 @@ +class EventTopicProduceException(Exception): + def __init__(self, topic_id): + self.topic_id = topic_id + Exception.__init__(self, f'Events: unable to produce message to topic -> {topic_id}') + +class EventTopicCreationException(Exception): + def __init__(self, topic_id): + self.topic_id = topic_id + Exception.__init__(self, f'Events: unable to create topic -> {topic_id}') + +class EventTopicConsumeException(Exception): + def __init__(self, topic_id): + self.topic_id = topic_id + Exception.__init__(self, f'Events: unable to consume messages from topic -> {topic_id}') + +class EventTopicDeleteException(Exception): + def __init__(self, topic_id): + self.topic_id = topic_id + Exception.__init__(self, f'Events: unable to delete topic -> {topic_id}') + +class EventGeneralException(Exception): + pass + +class MongoDBConfError(Exception): + pass \ No newline at end of file diff --git a/libraries/cloudharness-common/cloudharness/events/__init__.py b/libraries/cloudharness-common/cloudharness/events/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/events/client.py b/libraries/cloudharness-common/cloudharness/events/client.py new file mode 100644 index 00000000..5096a5ee --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/events/client.py @@ -0,0 +1,113 @@ +import os +import sys +from time import sleep +from json import dumps, loads +from kafka import KafkaProducer, KafkaConsumer +from kafka.admin import KafkaAdminClient, NewTopic +from kafka.errors import TopicAlreadyExistsError, UnknownTopicOrPartitionError, KafkaTimeoutError + +from cloudharness import log +from cloudharness.errors import * +from cloudharness.utils import env + + +class EventClient: + def __init__(self, topic_id): + self.client_id = env.get_cloudharness_events_client_id() + self.topic_id = topic_id + self.service = env.get_cloudharness_events_service() + + def create_topic(self): + """ Connects to cloudharness Events and creates a new topic + Return: + True if topic was created correctly, False otherwise. + """ + ## Connect to kafka + log.info(f"Creating topic {self.topic_id}") + admin_client = KafkaAdminClient(bootstrap_servers=self.service, + client_id=self.client_id) + # ## Create topic + + topic_list = [NewTopic(name=self.topic_id, num_partitions=1, replication_factor=1)] + + try: + return admin_client.create_topics(new_topics=topic_list, validate_only=False) + except TopicAlreadyExistsError as e: + log.error(f"Topic {self.topic_id} already exists.") + raise EventTopicCreationException from e + except Exception as e: + log.error(f"Ups... We had an error creating the new Topic --> {e}") + raise EventGeneralException from e + + def produce(self, message: dict): + ''' Write a message to the current topic + Params: + message: dict with message to be published. + Return: + True if the message was published correctly, False otherwise. + ''' + producer = KafkaProducer(bootstrap_servers=self.service, + value_serializer=lambda x: dumps(x).encode('utf-8')) + try: + return producer.send(self.topic_id, value=message) + except KafkaTimeoutError as e: + log.error("Ups... Not able to fetch topic metadata") + raise EventTopicProduceException from e + except Exception as e: + raise EventGeneralException(f"Ups... We had an error creating the new Topic --> {e}") from e + finally: + producer.close() + + def consume_all(self, group_id='default') -> list: + ''' Return a list of messages published in the topic ''' + + consumer = KafkaConsumer(self.topic_id, + bootstrap_servers=self.service, + auto_offset_reset='earliest', + enable_auto_commit=True, + group_id=group_id, + value_deserializer=lambda x: loads(x.decode('utf-8'))) + try: + for topic in consumer.poll(10000).values(): + return [record.value for record in topic] + except Exception as e: + log.error(f"Ups... We had an error trying to create a CloudHarnessEvents consumer for topic {self.topic_id} --> {e}") + raise EventTopicConsumeException from e + finally: + consumer.close() + + def delete_topic(self) -> bool: + + log.debug("Deleting topic " + self.topic_id) + ## Connect to kafka + admin_client = KafkaAdminClient(bootstrap_servers=self.service, + client_id=self.client_id) + ## Delete topic + try: + admin_client.delete_topics([self.topic_id]) + return True + except UnknownTopicOrPartitionError as e: + log.error(f"Topic {self.topic_id} does not exists.") + raise EventTopicDeleteException from e + + except Exception as e: + log.error(f"Ups... We had an error deleteing the Topic {self.topic_id} --> {e}") + raise EventGeneralException from e + + +if __name__ == "__main__": + # creat the required os env variables + os.environ['CLOUDHARNESS_EVENTS_CLIENT_ID'] = 'my-client' + os.environ['CLOUDHARNESS_EVENTS_SERVICE'] = 'bootstrap.cloudharness.svc.cluster.local:9092' + + # instantiate the client + client = EventClient('test-sync-op-results-qcwbc') + + # create a topic from env variables + # print(client.create_topic()) + # publish to the prev created topic + # print(client.produce({"message": "In God we trust, all others bring data..."})) + # read from the topic + print(client.consume_all('my-group')) + # delete the topic + # print(client.delete_topic()) diff --git a/libraries/cloudharness-common/cloudharness/persistence/__init__.py b/libraries/cloudharness-common/cloudharness/persistence/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/persistence/graph_database/__init__.py b/libraries/cloudharness-common/cloudharness/persistence/graph_database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/persistence/graph_database/neo4j/__init__.py b/libraries/cloudharness-common/cloudharness/persistence/graph_database/neo4j/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/persistence/nosql_database/__init__.py b/libraries/cloudharness-common/cloudharness/persistence/nosql_database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/persistence/sql_database/__init__.py b/libraries/cloudharness-common/cloudharness/persistence/sql_database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/utils/__init__.py b/libraries/cloudharness-common/cloudharness/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/utils/env.py b/libraries/cloudharness-common/cloudharness/utils/env.py new file mode 100644 index 00000000..f2849d09 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/utils/env.py @@ -0,0 +1,130 @@ +import os + +import yaml + +from .. import log + +TEST = 'TEST' +PROD = 'PROD' + +VARIABLE_IMAGE_REGISTRY = 'CH_IMAGE_REGISTRY' + +SUFFIX_TAG = 'IMAGE_TAG' +SUFFIX_PORT = 'PORT' +SUFFIX_NAME = 'NAME' + +DEFAULT_IMAGE_REGISTRY = '' + +HERE = os.path.dirname(os.path.realpath(__file__)) + + +def set_default_environment(): + with open(HERE + '/resources/values.yaml') as f: + values = yaml.safe_load(f) + os.environ.update({v['name']: str(v['value']) for v in values['env'] if v['name'] not in os.environ}) + + +set_default_environment() + +class VariableNotFound(Exception): + def __init__(self, variable_name): + self.variable_name = variable_name + Exception.__init__(self, f'{variable_name} environment variable was not set.') + + +def get_cloudharness_variables(): + return {k: v for k, v in os.environ.items() if 'CH_' in k} + + +def get_variable(variable_name): + variable_name = name_to_variable(variable_name) + confstr = os.getenv(variable_name) + if confstr is None: + raise VariableNotFound(variable_name) + return confstr + + +def get_image_full_tag(image_repository_name): + tagged = f"{image_repository_name}:{get_image_tag(image_repository_name)}" + registry = get_image_registry() + if registry: + return registry + '/' + tagged + return tagged + + +def get_image_registry(): + try: + return get_variable(VARIABLE_IMAGE_REGISTRY) + except VariableNotFound as e: + log.warning(f"Variable not found {VARIABLE_IMAGE_REGISTRY}. Using default: {DEFAULT_IMAGE_REGISTRY}") + + return DEFAULT_IMAGE_REGISTRY + + +def get_image_tag(application_name): + try: + + return get_sub_variable(application_name, SUFFIX_TAG) + except VariableNotFound as e: + default_tag = get_variable('CH_IMAGE_TAG') + log.warning(f"Image tag specification not found for {application_name}: variable not found {e.variable_name}. " + f"Using default: {default_tag}") + + return default_tag + + +def name_to_variable(application_name): + return application_name.upper().replace('-', '_') + + +# CloudHarness Events +def get_cloudharness_events_client_id(): + return get_variable('CH_KEYCLOAK_WEBCLIENT_ID') + + +def get_cloudharness_events_service(): + return get_service_cluster_address('CH_KAFKA') + + +def get_service_cluster_address(cloudharness_app_name): + if use_public_services(): + return get_service_public_address(cloudharness_app_name) + return cluster_service_address(get_sub_variable(cloudharness_app_name, SUFFIX_NAME)) + ':' + get_sub_variable(cloudharness_app_name, SUFFIX_PORT) + + +def cluster_service_address(service_name): + return + f'{service_name}.{namespace}.svc.cluster.local' + + +def use_public_services(): + try: + return get_variable('CH_USE_PUBLIC').lower() == 'true' + except VariableNotFound: + return False + +def get_sub_variable(*vars): + return get_variable(name_to_variable('_'.join(vars))) + + +def get_service_public_address(app_name): + return ".".join([get_sub_variable(app_name, 'SUBDOMAIN'), get_public_domain()]) + + +def get_public_domain(): + return get_variable('CH_DOMAIN') + + +def get_cloudharness_workflows_service_url(): + return get_service_public_address('CH_WORKFLOWS') + + +def get_auth_service_cluster_address(): + return get_service_cluster_address('CH_KEYCLOAK') + + +def get_auth_service_url(): + return get_service_public_address('CH_KEYCLOAK') + + +def get_auth_realm(): + return get_variable('CH_KEYCLOAK_REALM') diff --git a/libraries/cloudharness-common/cloudharness/utils/resources/values.yaml b/libraries/cloudharness-common/cloudharness/utils/resources/values.yaml new file mode 100644 index 00000000..2bcab846 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/utils/resources/values.yaml @@ -0,0 +1,245 @@ +apps: + argo: + enabled: true + name: argo-ui-gk + port: 80 + subdomain: argo + docs: + enabled: true + harvest: false + image: + name: cloudharness-docs + tag: 1 + name: cloudharness-docs + port: 8080 + subdomain: docs + events: + enabled: true + name: kafka-manager-gk + port: 80 + subdomain: events + kafka: + name: bootstrap + port: 9092 + keycloak: + admin: + pass: metacell + role: administrator + user: admin + client: + id: rest-client + secret: 5678eb6e-9e2c-4ee5-bd54-34e7411339e8 + db: + image: + name: postgres + tag: 10.4 + initialdb: auth_db + name: keycloak-postgress + pass: password + user: user + enabled: true + harvest: true + image: + name: keycloak + tag: 1 + name: keycloak + port: 8080 + realm: cloudharness + subdomain: accounts + webclient: + id: web-client + secret: 452952ae-922c-4766-b912-7b106271e34b + keycloak-gatekeeper: + enabled: true + image: + name: keycloak-gatekeeper + tag: 1 + name: keycloak-gatekeeper + test: + enabled: true + harvest: true + image: + name: samples + tag: 1 + name: samples + port: 8080 + subdomain: test + workflows: + enabled: true + harvest: false + image: + name: workflows + tag: 1 + name: workflows + port: 8080 + subdomain: workflows +domain: cloudharness.metacell.us +env: + - name: CH_VERSION + value: 0.0.1 + - name: CH_CHART_VERSION + value: 0.0.1 + - name: CH_ELASTICSEARCH_ENABLED + value: true + - name: CH_ELASTICSEARCH_NAME + value: elasticsearch + - name: CH_ELASTICSEARCH_IMAGE_NAME + value: docker.elastic.co/elasticsearch/elasticsearch + - name: CH_ELASTICSEARCH_IMAGE_TAG + value: 7.2.0 + - name: CH_ELASTICSEARCH_PORT + value: 9200 + - name: CH_ELASTICSEARCH_NODEPORT + value: 9300 + - name: CH_ELASTICSEARCH_STORAGE + value: latest0Gi + - name: CH_KIBANA_ENABLED + value: true + - name: CH_KIBANA_SECUREME + value: true + - name: CH_KIBANA_NAME + value: el-kibana + - name: CH_KIBANA_IMAGE_NAME + value: docker.elastic.co/kibana/kibana + - name: CH_KIBANA_IMAGE_TAG + value: 7.2.0 + - name: CH_KIBANA_PORT + value: 5601 + - name: CH_KIBANA_SUBDOMAIN + value: monitoring + - name: CH_KIBANA_GATEKEEPER_IMAGE + value: keycloak-gatekeeper + - name: CH_KIBANA_GATEKEEPER_TAG + value: latest + - name: CH_EVENTS_ENABLED + value: true + - name: CH_EVENTS_NAME + value: kafka-manager-gk + - name: CH_EVENTS_SUBDOMAIN + value: events + - name: CH_EVENTS_PORT + value: 80 + - name: CH_KAFKA_NAME + value: bootstrap + - name: CH_KAFKA_PORT + value: 9092 + - name: CH_ARGO_ENABLED + value: true + - name: CH_ARGO_NAME + value: argo-ui-gk + - name: CH_ARGO_SUBDOMAIN + value: argo + - name: CH_ARGO_PORT + value: 80 + - name: CH_KEYCLOAK_GATEKEEPER_ENABLED + value: true + - name: CH_KEYCLOAK_GATEKEEPER_NAME + value: keycloak-gatekeeper + - name: CH_KEYCLOAK_GATEKEEPER_IMAGE_NAME + value: keycloak-gatekeeper + - name: CH_KEYCLOAK_GATEKEEPER_IMAGE_TAG + value: latest + - name: CH_WORKFLOWS_ENABLED + value: true + - name: CH_WORKFLOWS_NAME + value: workflows + - name: CH_WORKFLOWS_IMAGE_NAME + value: workflows + - name: CH_WORKFLOWS_IMAGE_TAG + value: latest + - name: CH_WORKFLOWS_HARVEST + value: false + - name: CH_WORKFLOWS_PORT + value: 8080 + - name: CH_WORKFLOWS_SUBDOMAIN + value: workflows + - name: CH_KEYCLOAK_ENABLED + value: true + - name: CH_KEYCLOAK_NAME + value: keycloak + - name: CH_KEYCLOAK_IMAGE_NAME + value: keycloak + - name: CH_KEYCLOAK_IMAGE_TAG + value: latest + - name: CH_KEYCLOAK_ADMIN_PASS + value: metacell + - name: CH_KEYCLOAK_ADMIN_USER + value: admin + - name: CH_KEYCLOAK_ADMIN_ROLE + value: administrator + - name: CH_KEYCLOAK_CLIENT_ID + value: rest-client + - name: CH_KEYCLOAK_CLIENT_SECRET + value: 5678eb6e-9e2c-4ee5-bd54-34e7411339e8 + - name: CH_KEYCLOAK_DB_IMAGE_NAME + value: postgres + - name: CH_KEYCLOAK_DB_IMAGE_TAG + value: latest0.4 + - name: CH_KEYCLOAK_DB_INITIALDB + value: auth_db + - name: CH_KEYCLOAK_DB_NAME + value: keycloak-postgress + - name: CH_KEYCLOAK_DB_PASS + value: password + - name: CH_KEYCLOAK_DB_USER + value: user + - name: CH_KEYCLOAK_HARVEST + value: true + - name: CH_KEYCLOAK_WEBCLIENT_ID + value: web-client + - name: CH_KEYCLOAK_WEBCLIENT_SECRET + value: 452952ae-922c-4766-b912-7b106271e34b + - name: CH_KEYCLOAK_PORT + value: 8080 + - name: CH_KEYCLOAK_REALM + value: cloudharness + - name: CH_KEYCLOAK_SUBDOMAIN + value: accounts + - name: CH_TEST_ENABLED + value: true + - name: CH_TEST_NAME + value: test + - name: CH_TEST_IMAGE_NAME + value: test + - name: CH_TEST_IMAGE_TAG + value: latest + - name: CH_TEST_HARVEST + value: true + - name: CH_TEST_PORT + value: 8080 + - name: CH_TEST_SUBDOMAIN + value: test + - name: CH_DOCS_ENABLED + value: true + - name: CH_DOCS_NAME + value: docs + - name: CH_DOCS_IMAGE_NAME + value: docs + - name: CH_DOCS_IMAGE_TAG + value: latest + - name: CH_DOCS_HARVEST + value: false + - name: CH_DOCS_PORT + value: 8080 + - name: CH_DOCS_SUBDOMAIN + value: docs + - name: CH_DOMAIN + value: cloudharness.metacell.us + - name: CH_IMAGE_REGISTRY + value: localhost:5000 + - name: CH_IMAGE_TAG + value: latest +fullnameOverride: "" +ingress: + enabled: true + letsencrypt: + email: facundo@metacell.us + name: cloudharness-ingress +minikube: true +nameOverride: "" +privenv: + - name: CH_SECRET + value: 'In God we trust; all others must bring data. ― W. Edwards Deming' +registry: localhost:5000 +serviceaccount: argo-workflows +tag: 1 diff --git a/libraries/cloudharness-common/cloudharness/utils/settings.py b/libraries/cloudharness-common/cloudharness/utils/settings.py new file mode 100644 index 00000000..bffbd718 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/utils/settings.py @@ -0,0 +1,2 @@ + +CODEFRESH_PULL_SECRET = 'codefresh-generated-r.cfcr.io-cfcr-argo-workflows' \ No newline at end of file diff --git a/libraries/cloudharness-common/cloudharness/workflows/__init__.py b/libraries/cloudharness-common/cloudharness/workflows/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/cloudharness/workflows/argo.py b/libraries/cloudharness-common/cloudharness/workflows/argo.py new file mode 100644 index 00000000..3103c62d --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/workflows/argo.py @@ -0,0 +1,236 @@ +""" +Access workflows using Argo REST API +Reference: https://argoproj.github.io/docs/argo/docs/rest-api.html +https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/CustomObjectsApi.md +""" +import kubernetes + +import yaml +import os +from pathlib import Path + +from cloudharness import log + +group = 'argoproj.io' +version = 'v1alpha1' + +plural = 'workflows' +namespace = 'argo-workflows' + +CUSTOM_OBJECT_URL = f"/apis/{group}/{version}/{plural}" + + +class WorkflowException(Exception): + def __init__(self, status, message=''): + super().__init__(message) + self.status = status + + +class WorkflowNotFound(WorkflowException): + def __init__(self): + super().__init__(404) + + +class BadParam(WorkflowException): + def __init__(self, param_name, message=''): + super().__init__(400, message) + self.param = param_name + + +class ArgoObject: + + @classmethod + def from_spec(cls): + pass + + def spec(self): + raise NotImplemented + + +# --- Wrapper objects for api results --- + +class Phase: + NodePending = "Pending" + NodeRunning = "Running" + NodeSucceeded = "Succeeded" + NodeSkipped = "Skipped" + NodeFailed = "Failed" + NodeError = "Error" + + @classmethod + def phases(cls): + return tuple(value for key, value in cls.__dict__.items() if 'Node' in key) + + +class Workflow: + def __init__(self, raw_dict): + self.name = raw_dict['metadata']['name'] + self.status = raw_dict['status']['phase'] if 'status' in raw_dict else None + self.create_time = raw_dict['metadata']['creationTimestamp'] + self.raw = raw_dict + + def is_finished(self): + return self.status in (Phase.NodeError, Phase.NodeSucceeded, Phase.NodeSkipped, Phase.NodeFailed) + + def __str__(self): + return yaml.dump(self.raw) + + def succeeded(self): + return self.status == Phase.NodeSucceeded + + def failed(self): + return self.status == Phase.NodeFailed + + def get_status_message(self): + return self.raw['status']['message'] + +class SearchResult: + def __init__(self, raw_dict): + self.items = tuple(Workflow(item) for item in raw_dict['items']) + self.continue_token = raw_dict['metadata']['continue'] + self.raw = raw_dict + + def __str__(self): + return self.raw + + def __repr__(self): + return str(self.raw) + + +# --- Api functions --- ` + +def get_api_client(): + configuration = get_configuration() + + # configuration.api_key['authorization'] = 'YOUR_API_KEY' # TODO verify if we need an api key + api_instance = kubernetes.client.CustomObjectsApi(kubernetes.client.ApiClient(configuration)) + return api_instance + + +def get_configuration(): + try: + configuration = kubernetes.config.load_incluster_config() + + except: + log.warning('Kubernetes cluster configuration not found. Trying local configuration') + + try: + configuration = kubernetes.config.load_kube_config( + config_file=os.path.join(str(Path.home()), '.kube', 'config')) + except: + log.warning('Kubernetes local configuration not found. Using localhost proxy') + configuration = kubernetes.client.configuration.Configuration() + host = 'http://localhost:8001' + configuration.host = host + return configuration + + +api_instance = get_api_client() + +def check_namespace(): + api_instance = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(get_configuration())) + try: + api_response = api_instance.read_namespace(namespace, exact=True) + except kubernetes.client.rest.ApiException as e: + + raise Exception("Namespace for argo workflows does not exist:" + namespace) from e + +def create_namespace(): + api_instance = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(get_configuration())) + body = kubernetes.client.V1Namespace(metadata=kubernetes.client.V1ObjectMeta(name=namespace)) # V1Namespace | + + + try: + api_response = api_instance.create_namespace(body) + except Exception as e: + raise Exception("Error creating namespace:" + namespace) from e +try: + check_namespace() +except Exception as e: + log.error('Namespace for argo workflows not found', exc_info=e) + log.info("Creating namespace " + namespace) + try: + create_namespace() + except Exception as e: + log.error('Cannot connect with argo', exc_info=e) + + +def get_workflows(status=None, limit=10, continue_token=None, timeout_seconds=3) -> SearchResult: + """https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/CustomObjectsApi.md#list_namespaced_custom_object""" + # Notice: field selector doesn't work though advertised, except fot metadata.name and metadata.namespace https://github.com/kubernetes/kubernetes/issues/51046 + # The filtering by phase can be obtained through labels: https://github.com/argoproj/argo/issues/496 + + params = dict(pretty=False, timeout_seconds=timeout_seconds) + if status is not None: + if (status not in Phase.phases()): + raise BadParam(status, 'Status must be one of {}'.format(Phase.phases())) + params['label_selector'] = f'workflows.argoproj.io/phase={status}' + api_response = api_instance.list_namespaced_custom_object(group, version, namespace, plural, **params) + + # TODO implement limit and continue, see https://github.com/kubernetes-client/python/issues/965 + # api_response = api_instance.list_cluster_custom_object(group, version, plural, pretty=False, timeout_seconds=timeout_seconds, watch=watch, limit=limit, continue_token=continue_token) + return SearchResult(api_response) + + +def submit_workflow(spec) -> Workflow: + """https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/CustomObjectsApi.md#create_namespaced_custom_object""" + log.debug(f"Submitting workflow\n{spec}") + workflow = Workflow( + api_instance.create_namespaced_custom_object(group, version, namespace, plural, spec, pretty=False)) + log.info(f"Submitted argo workflow {workflow.name}") + if workflow.failed(): + raise WorkflowException("Workflow failed: " + workflow.get_status_message()) + return workflow + + +def delete_workflow(workflow_name): + """https://github.com/kubernetes-client/python/blob/release-11.0/kubernetes/docs/CustomObjectsApi.md#delete_namespaced_custom_object""" + try: + api_instance.delete_namespaced_custom_object(group, version, namespace, plural, workflow_name, + kubernetes.client.V1DeleteOptions(), grace_period_seconds=0) + except kubernetes.client.rest.ApiException as e: + if e.status == 404: + raise WorkflowNotFound() + raise WorkflowException(e.status) from e + + +def get_workflow(workflow_name) -> Workflow: + try: + workflow = Workflow(api_instance.get_namespaced_custom_object(group, version, namespace, plural, workflow_name)) + except kubernetes.client.rest.ApiException as e: + if e.status == 404: + raise WorkflowNotFound() + raise WorkflowException(e.status) from e + if workflow.failed(): + raise WorkflowException("Workflow failed: " + workflow.get_status_message()) + return workflow + +def get_workflow_logs(workflow_name) -> str: + core_api_instance = kubernetes.client.CoreV1Api(kubernetes.client.ApiClient(get_configuration())) + + try: + wf = api_instance.get_namespaced_custom_object(group, version, namespace, plural, workflow_name) + except kubernetes.client.rest.ApiException as e: + if e.status == 404: + raise WorkflowNotFound() + raise WorkflowException(e.status) from e + + pod_names = [node['id'] for node in wf['status']['nodes'].values() if not 'children' in node] + + if len(pod_names) == 0: + return '' + + try: + return core_api_instance.read_namespaced_pod_log(name=pod_names[0], namespace=namespace, container="main") + except kubernetes.client.rest.ApiException as e: + if e.status == 400: + return "This step has not emitted logs yet..." + raise WorkflowException(e.status) from e + + +if __name__ == '__main__': + from pprint import pprint + + pprint(CUSTOM_OBJECT_URL) + pprint(get_workflows('Succeeded').raw) + # pprint(get_workflow('hello-world-sfzd4')) diff --git a/libraries/cloudharness-common/cloudharness/workflows/operations.py b/libraries/cloudharness-common/cloudharness/workflows/operations.py new file mode 100644 index 00000000..56516536 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/workflows/operations.py @@ -0,0 +1,325 @@ +from collections.abc import Iterable +import time +import yaml, pyaml + +SERVICE_ACCOUNT = 'argo-workflows' +from cloudharness_cli.workflows.models.operation_status import OperationStatus + +from cloudharness.events.client import EventClient +from cloudharness.utils.settings import CODEFRESH_PULL_SECRET +from cloudharness.utils import env + +from . import argo + +from .tasks import Task, SendResultTask + +from cloudharness import log + +POLLING_WAIT_SECONDS = 1 + + +class BadOperationConfiguration(RuntimeError): + pass + + +class ManagedOperation: + """Abstract definition of an operation. Operation is an abstraction of an Argo workflow + based on a collection of tasks that run according to the operation type and configuration. + """ + + def __init__(self, name): + self.name = name + + def execute(self, **parameters): + raise NotImplementedError(f"{self.__class__.__name__} is abstract") + + +class ContainerizedOperation(ManagedOperation): + """ + Abstract Containarized operation based on an argo workflow + """ + + def __init__(self, basename): + """ + :param status: + :param parameters: + """ + super(ContainerizedOperation, self).__init__(basename) + + self.persisted = None + + @property + def entrypoint(self): + raise NotImplemented + + @property + def templates(self): + raise NotImplemented + + def to_workflow(self, **arguments): + workflow = { + 'apiVersion': 'argoproj.io/v1alpha1', + 'kind': 'Workflow', + 'metadata': {'generateName': self.name}, + 'spec': self.spec() + + } + return workflow + + def spec(self): + return { + 'entrypoint': self.entrypoint, + 'templates': tuple(self.modify_template(template) for template in self.templates), + 'serviceAccountName': SERVICE_ACCOUNT, + 'imagePullSecrets': [{'name': CODEFRESH_PULL_SECRET}] + } + + def modify_template(self, template): + """Hook to modify templates (e.g. add volumes)""" + return template + + def submit(self): + """Created and submits the Argo workflow""" + op = self.to_workflow() + + log.debug("Submitting workflow\n" + pyaml.dump(op)) + + self.persisted = argo.submit_workflow(op) # TODO use rest api for that? Include this into cloudharness.workflows? + + return self.persisted + + def is_running(self): + if self.persisted: + self.refresh() + return self.persisted.status in (OperationStatus.RUNNING, OperationStatus.PENDING) + return False + + def refresh(self): + self.persisted = argo.get_workflow(self.persisted.name) + + def is_error(self): + if self.persisted: + self.refresh() + return self.persisted.status in (OperationStatus.ERROR, OperationStatus.FAILED) + return False + + def name_from_path(self, path): + return path.replace('/', '').lower() + + +class SyncOperation(ManagedOperation): + """A Sync operation returns the result directly with the execute method""" + + +class DirectOperation(SyncOperation): + """A DirectOperation is running directly inside the service container. Whether an operation is direct or distributed + is a design choice of the single service/application. The common scenario depicted here is operations that are + querying a database rather than making calculations. Also, there is no need of a real api to define a direct + operation, is just code running from the REST controller""" + + def __init__(self, name, callback): + super().__init__(name) + self.callback = callback + + def execute(self, *args, **kwargs): + self.callback(*args, **kwargs) + + +class ResourceQueryOperation(DirectOperation): + """Queries for a resource""" + + +class DataQueryOperation(DirectOperation): + """Queries the Graph database""" + pass + + +class SingleTaskOperation(ContainerizedOperation): + def __init__(self, name, task: Task): + """ + Using a single task is a simplification we may want to + :param task: + """ + super().__init__(name) + self.task = task + + @property + def entrypoint(self): + return self.task.name + + @property + def templates(self): + return [self.task.spec()] + + +class ExecuteAndWaitOperation(ContainerizedOperation, SyncOperation): + + def execute(self, timeout=None): + self.persisted = self.submit() + start_time = time.time() + while not self.persisted.is_finished(): + time.sleep(POLLING_WAIT_SECONDS) + log.info(f"Polling argo workflow {self.persisted.name}") + self.persisted = argo.get_workflow(self.persisted.name) + log.info(f"Polling succeeded for {self.persisted.name}. Current phase: {self.persisted.status}") + if timeout and time.time() - start_time > timeout: + log.error("Timeout exceeded while polling for results") + return self.persisted + return self.persisted + + +class DistributedSyncOperation(ExecuteAndWaitOperation, SingleTaskOperation): + """Sync operation that runs on a separate container""" + + +class DataFrameOperation(DistributedSyncOperation): + """Uses Spark dataframe abstraction to implement parallel big data query and processing""" + pass + + +class AsyncOperation(ContainerizedOperation): + """The operation is made asynchronously in an Argo workflow. + The workflow can be monitored during the execution""" + + def execute(self): + op = self.submit() + return op + + def get_operation_update_url(self): + return f"{env.get_cloudharness_workflows_service_url()}/operations/{self.persisted.name}" + + +class CompositeOperation(AsyncOperation): + """Operation with multiple tasks""" + + def __init__(self, basename, tasks, shared_directory="", shared_volume_size=10): + """ + + :param basename: + :param tasks: + :param shared_directory: can set to True or a path. If set, tasks will use that directory to store results. It + will also be available from the container as environment variable `shared_directory` + :param shared_volume_size: size of the shared volume in MB (is shared_directory is not set, it is ignored) + """ + AsyncOperation.__init__(self, basename) + self.tasks = tasks + + if shared_directory: + shared_path = '/mnt/shared' if shared_directory is True else shared_directory + self.volumes = (shared_path,) + for task in self.task_list(): + task.add_env('shared_directory', shared_path) + else: + self.volumes = tuple() + self.shared_volume_size = shared_volume_size + if len(self.task_list()) != len(set(self.task_list())): + raise BadOperationConfiguration('Tasks in the same operation must have different names') + self.entrypoint_template = {'name': self.entrypoint, 'steps': self.steps_spec()} + + def steps_spec(self): + raise NotImplemented + + def task_list(self): + return self.tasks + + @property + def templates(self): + return [self.entrypoint_template] + [task.spec() for task in self.task_list()] + + def spec(self): + spec = super().spec() + if self.volumes: + spec['volumeClaimTemplates'] = [self.spec_volume(volume) for volume in self.volumes] + return spec + + def modify_template(self, template): + # TODO verify the following condition. Can we mount volumes also with source based templates + if self.volumes and 'container' in template: + template['container']['volumeMounts'] = \ + [{'name': self.name_from_path(volume), 'mountPath': volume} for volume in self.volumes] + return template + + def spec_volume(self, volume): + return { + 'metadata': { + 'name': self.name_from_path(volume), + }, + 'spec': { + 'accessModes': ["ReadWriteOnce"], + 'resources': { + 'requests': + { + 'storage': f'{self.shared_volume_size}Mi' + } + + } + + } + } + + +class PipelineOperation(CompositeOperation): + + def steps_spec(self): + return [[task.instance()] for task in self.tasks] + + @property + def entrypoint(self): + return self.name + '-pipeline' + + +class DistributedSyncOperationWithResults(PipelineOperation, ExecuteAndWaitOperation, SyncOperation): + """Synchronously returns the result from the workflow. Uses a shared volume and a queue""" + + def __init__(self, name, task: Task): + PipelineOperation.__init__(self, name, [task, SendResultTask()], shared_directory="/mnt/shared") + self.client = None + + def submit(self): + op = super().submit() + topic_name = op.name + self.client = EventClient(topic_name) + self.client.create_topic() + return op + + def execute(self, timeout=None): + op = ExecuteAndWaitOperation.execute(self, timeout) + + result = self.client.consume_all() + if result is None: + raise RuntimeError("Operation `" + op.name + "` did not put results in the queue. Check your workflow configuration") + self.client.delete_topic() + return result + + + +class ParallelOperation(CompositeOperation): + + def steps_spec(self): + return [[task.instance() for task in self.tasks]] + + @property + def entrypoint(self): + return self.name + '-parallel' + + +class SimpleDagOperation(CompositeOperation): + """Simple DAG definition limited to a pipeline of parallel operations""" + + def __init__(self, basename, *task_groups, shared_directory=None): + task_groups = tuple( + task_group if isinstance(task_group, Iterable) else (task_group,) for task_group in task_groups) + super().__init__(basename, tasks=task_groups, shared_directory=shared_directory) + + def steps_spec(self): + return [[task.instance() for task in task_group] for task_group in self.tasks] + + @property + def entrypoint(self): + return self.name + '-simpledag' + + def task_list(self): + return [task for task_group in self.tasks for task in task_group] + + +__all__ = [c for c in dir() if c.endswith('Operation')] diff --git a/libraries/cloudharness-common/cloudharness/workflows/tasks.py b/libraries/cloudharness-common/cloudharness/workflows/tasks.py new file mode 100644 index 00000000..ebacf445 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/workflows/tasks.py @@ -0,0 +1,130 @@ +from . import argo +import time + +SERVICE_ACCOUNT = 'argo-workflows' + +from cloudharness import log +from cloudharness.utils.env import get_cloudharness_variables, get_image_full_tag + +from .utils import WORKFLOW_NAME_VARIABLE_NAME + +class Task(argo.ArgoObject): + """ + Abstract interface for a task. + """ + + def __init__(self, name): + self.name = name.replace(' ', '-').lower() + + @property + def image_name(self): + raise NotImplemented + + def __hash__(self): + return hash(self.name) + + def __eq__(self, other): + return self.name == other.name + + def instance(self): + # We are not considering arguments, we are always having a single template for each step + return { + 'name': self.name, + 'template': self.name + } + + +class ContainerizedTask(Task): + + def __init__(self, name, resources={}, image_pull_policy='IfNotPresent', **env_args): + super().__init__(name) + + self.__envs = get_cloudharness_variables() + self.resources = resources + self.image_pull_policy = image_pull_policy + + for k in env_args: + self.__envs[k] = str(env_args[k]) + + @property + def envs(self): + envs = [dict(name=key, value=value) for key, value in self.__envs.items()] + # Add the name of the workflow to task env + envs.append({'name': WORKFLOW_NAME_VARIABLE_NAME, 'valueFrom': {'fieldRef': {'fieldPath': 'metadata.name'}}}) + return envs + + def add_env(self, name, value): + self.__envs[name] = value + + def spec(self): + spec = { + 'container': { + 'image': self.image_name, + 'env': self.envs, + 'resources': self.resources, + 'imagePullPolicy': self.image_pull_policy + }, + 'inputs': {}, + 'metadata': {}, + 'name': self.name, + 'outputs': {} + + } + return spec + + +class InlinedTask(Task): + """ + Allows to run Python tasks + """ + + def __init__(self, name, source): + super().__init__(name) + self.source = source + + def spec(self): + return { + 'name': self.name, + 'script': + { + 'image': self.image_name, + 'source': self.source, + 'command': [self.command] + } + } + + @property + def command(self): + raise NotImplemented + + +class PythonTask(InlinedTask): + def __init__(self, name, func): + import inspect + super().__init__(name, (inspect.getsource(func) + f"\n{func.__name__}()").strip()) + + @property + def image_name(self): + return 'python:3' + + @property + def command(self): + return 'python' + + +class CustomTask(ContainerizedTask): + def __init__(self, name, image_name, resources={}, image_pull_policy='IfNotPresent', **env_args): + super().__init__(name, resources, image_pull_policy, **env_args) + self.__image_name = get_image_full_tag(image_name) + + @property + def image_name(self): + return self.__image_name + + +class SendResultTask(CustomTask): + """Special task used to send the a workflow result to a queue. + The workflow result consists of all the files inside the shared directory""" + + def __init__(self): + super().__init__('send-result-event', 'workflows-send-result-event') diff --git a/libraries/cloudharness-common/cloudharness/workflows/utils.py b/libraries/cloudharness-common/cloudharness/workflows/utils.py new file mode 100644 index 00000000..6410a946 --- /dev/null +++ b/libraries/cloudharness-common/cloudharness/workflows/utils.py @@ -0,0 +1,17 @@ +import os + +from cloudharness.utils.env import get_variable + +WORKFLOW_NAME_VARIABLE_NAME = "CH_WORKFLOW_NAME" + + +SHARED_DIRECTORY_VARIABLE_NAME = "shared_directory" + +def get_workflow_name(): + """Get the workflow name from inside a workflow""" + name = get_variable(WORKFLOW_NAME_VARIABLE_NAME) + remove = name.split("-")[-1] + return name[0:-len(remove)-1] + +def get_shared_directory(): + return os.getenv(SHARED_DIRECTORY_VARIABLE_NAME) diff --git a/libraries/cloudharness-common/requirements.txt b/libraries/cloudharness-common/requirements.txt new file mode 100644 index 00000000..9119e5dc --- /dev/null +++ b/libraries/cloudharness-common/requirements.txt @@ -0,0 +1,18 @@ +asn1crypto==0.24.0 +certifi==2019.3.9 +cffi==1.12.2 +chardet==3.0.4 +cryptography==2.6.1 +idna==2.8 +pycosat==0.6.3 +pycparser==2.19 +pyOpenSSL==19.0.0 +PySocks==1.6.8 +requests==2.21.0 +ruamel-yaml==0.15.46 +six==1.12.0 +urllib3==1.24.1 +pykafka==2.8.0 +pyaml +kafka-python +kubernetes \ No newline at end of file diff --git a/libraries/cloudharness-common/setup.py b/libraries/cloudharness-common/setup.py new file mode 100644 index 00000000..284c0aaa --- /dev/null +++ b/libraries/cloudharness-common/setup.py @@ -0,0 +1,35 @@ +# coding: utf-8 +from setuptools import setup, find_packages + + +NAME = "cloudharness" +VERSION = "0.1.0" +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIREMENTS = [ + 'kubernetes', + 'kafka-python', + 'pyaml' +] + + + +setup( + name=NAME, + version=VERSION, + description="CloudHarness common library", + author_email="cloudharness@metacell.us", + url="", + keywords=["cloudharness", "cloud"], + install_requires=REQUIREMENTS, + packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), + include_package_data=True, + long_description="""\ + Cloudharness common library + """ +) diff --git a/libraries/cloudharness-common/test-requirements.txt b/libraries/cloudharness-common/test-requirements.txt new file mode 100644 index 00000000..1ec5c478 --- /dev/null +++ b/libraries/cloudharness-common/test-requirements.txt @@ -0,0 +1,7 @@ +coverage>=4.0.3 +nose>=1.3.7 +pluggy>=0.3.1 +py>=1.4.31 +randomize>=0.13 +pytest>=5.0.0 +pytest-cov diff --git a/libraries/cloudharness-common/tests/__init__.py b/libraries/cloudharness-common/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/cloudharness-common/tests/test_env.py b/libraries/cloudharness-common/tests/test_env.py new file mode 100644 index 00000000..adcd7dc7 --- /dev/null +++ b/libraries/cloudharness-common/tests/test_env.py @@ -0,0 +1,31 @@ +import pytest +import yaml +from cloudharness.utils.env import * + + + + + + +def test_variables(): + os.environ['CH_USE_PUBLIC'] = "False" + assert 'CH_DOMAIN' in os.environ + + assert get_variable('CH_DOMAIN') == 'cloudharness.metacell.us' + assert get_sub_variable('CH_DOCS', 'NAME') == 'ch-docs' + assert get_sub_variable('CH-docs', 'NAME') == 'ch-docs' + assert get_sub_variable('CH_DOCS', 'IMAGE_NAME') == 'ch-docs' + + assert get_sub_variable('CH_DOCS', 'PORT') == '8080' + + assert get_image_registry() == 'localhost:5000' + assert get_auth_service_url() == 'accounts.cloudharness.metacell.us' + assert get_auth_service_cluster_address() == 'keycloak:8080' + assert get_cloudharness_events_client_id() == 'web-client' + assert get_cloudharness_workflows_service_url() == 'workflows.cloudharness.metacell.us' + assert get_image_full_tag('workflows-extract-download') == 'localhost:5000/workflows-extract-download:latest' + + with pytest.raises(VariableNotFound) as raised: + get_variable('CH_FAKE') + + assert raised.value.variable_name == 'CH_FAKE' diff --git a/libraries/cloudharness-common/tests/test_integration.py b/libraries/cloudharness-common/tests/test_integration.py new file mode 100644 index 00000000..5139c625 --- /dev/null +++ b/libraries/cloudharness-common/tests/test_integration.py @@ -0,0 +1,2 @@ +def test_something(): + assert True \ No newline at end of file diff --git a/libraries/cloudharness-common/tests/test_workflow.py b/libraries/cloudharness-common/tests/test_workflow.py new file mode 100644 index 00000000..8a8e09ca --- /dev/null +++ b/libraries/cloudharness-common/tests/test_workflow.py @@ -0,0 +1,91 @@ +"""Notice, this test needs a fully operating kubernetes with argo environment in the container running the test""" +import time + +from cloudharness.workflows import operations +from cloudharness import set_debug + +set_debug() + +import yaml +from .test_env import set_default_environment + +set_default_environment() + + +def test_sync_workflow(): + def f(): + import time + time.sleep(2) + print('whatever') + + task = operations.PythonTask('my-task', f) + + op = operations.DistributedSyncOperation('test-sync-op-', task) + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + + +def test_pipeline_workflow(): + def f(): + import time + time.sleep(2) + print('whatever') + + op = operations.PipelineOperation('test-pipeline-op-', (operations.PythonTask('step1', f), operations.PythonTask('step2', f))) + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + + +def test_parallel_workflow(): + def f(): + import time + time.sleep(2) + print('whatever') + + op = operations.ParallelOperation('test-parallel-op-', (operations.PythonTask('p1', f), operations.PythonTask('p2', f))) + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + + +def test_simpledag_workflow(): + def f(): + import time + time.sleep(2) + print('whatever') + + # p3 runs after p1 and p2 finish + op = operations.SimpleDagOperation('test-dag-op-', (operations.PythonTask('p1', f), operations.PythonTask('p2', f)), + operations.PythonTask('p3', f)) + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + +def test_custom_task_workflow(): + task = operations.CustomTask('download-file', 'cloudharness-workflows-extract-download', url='https://www.bing.com') + op = operations.PipelineOperation('test-custom-op-', (task, )) + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + + +def test_custom_connected_task_workflow(): + shared_directory = '/mnt/shared' + task_write = operations.CustomTask('download-file', 'cloudharness-workflows-extract-download', url='https://raw.githubusercontent.com/openworm/org.geppetto/master/README.md') + task_print = operations.CustomTask('print-file', 'cloudharness-workflows-print-file', file_path=shared_directory + '/README.md') + op = operations.PipelineOperation('test-custom-connected-op-', (task_write, task_print), shared_directory=shared_directory) + # op.execute() + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + + +def test_result_task_workflow(): + task_write = operations.CustomTask('download-file', 'cloudharness-workflows-extract-download', url='https://raw.githubusercontent.com/openworm/org.geppetto/master/README.md') + + op = operations.DistributedSyncOperationWithResults('test-sync-results-', task_write) + + + # op.execute() + print('\n', yaml.dump(op.to_workflow())) + print(op.execute()) + + + +# op = operations.ParallelOperation('my_op', [task, operations.CustomTask('my-coreg', 'coregistration-init')]) diff --git a/libraries/cloudharness-common/tox.ini b/libraries/cloudharness-common/tox.ini new file mode 100644 index 00000000..b600af8d --- /dev/null +++ b/libraries/cloudharness-common/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/test-requirements.txt + +commands= + coverage run --source cloudharness -m pytest -v + coverage report -m diff --git a/libraries/package-lock.json b/libraries/package-lock.json new file mode 100644 index 00000000..48e341a0 --- /dev/null +++ b/libraries/package-lock.json @@ -0,0 +1,3 @@ +{ + "lockfileVersion": 1 +} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..1c3cc80d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +-e utilities/cloudharness-deploy +pyaml diff --git a/utilities/.gitignore b/utilities/.gitignore new file mode 100644 index 00000000..d0bb6f44 --- /dev/null +++ b/utilities/.gitignore @@ -0,0 +1,5 @@ +.idea +*.pyc +.vscode +.coverage +*.DS_Store diff --git a/utilities/Dockerfile b/utilities/Dockerfile new file mode 100644 index 00000000..25a32cd2 --- /dev/null +++ b/utilities/Dockerfile @@ -0,0 +1,13 @@ +FROM python:3.7-alpine +ENV DEPLOY_DIR=deployment + +COPY . cloudharness_utilities +RUN pip install ./cloudharness_utilities + +RUN apk --update add git less openssh && \ + rm -rf /var/lib/apt/lists/* && \ + rm /var/cache/apk/* + +WORKDIR $DEPLOY_DIR + +CMD ['python', 'deployment-generate.py'] \ No newline at end of file diff --git a/utilities/MANIFEST.in b/utilities/MANIFEST.in new file mode 100644 index 00000000..34a27b48 --- /dev/null +++ b/utilities/MANIFEST.in @@ -0,0 +1,7 @@ +include cloudharness_utilities/deployment-configuration/* +include cloudharness_utilities/deployment-configuration/**/* +include cloudharness_utilities/deployment-configuration/**/**/* +include cloudharness_utilities/deployment-configuration/**/**/**/* +include cloudharness_utilities/application-template/* +include cloudharness_utilities/application-template/**/* +include cloudharness_utilities/application-template/**/**/* \ No newline at end of file diff --git a/utilities/README.md b/utilities/README.md new file mode 100644 index 00000000..218e9b7a --- /dev/null +++ b/utilities/README.md @@ -0,0 +1,3 @@ +#CloudHarness Deploy + +CloudHarness Deploy is a collection of Python utilities to create CloudHarness deployments. \ No newline at end of file diff --git a/utilities/cloudharness_utilities.egg-info/PKG-INFO b/utilities/cloudharness_utilities.egg-info/PKG-INFO new file mode 100644 index 00000000..048dcc65 --- /dev/null +++ b/utilities/cloudharness_utilities.egg-info/PKG-INFO @@ -0,0 +1,12 @@ +Metadata-Version: 1.0 +Name: cloudharness-utilities +Version: 0.1.0 +Summary: CloudHarness deploy utilities library +Home-page: UNKNOWN +Author: UNKNOWN +Author-email: cloudharness@metacell.us +License: UNKNOWN +Description: MetaCell Neuroscience Platform - CloudHarness deploy library + +Keywords: Cloud,Kubernetes,Helm,Deploy +Platform: UNKNOWN diff --git a/utilities/cloudharness_utilities.egg-info/SOURCES.txt b/utilities/cloudharness_utilities.egg-info/SOURCES.txt new file mode 100644 index 00000000..d185c324 --- /dev/null +++ b/utilities/cloudharness_utilities.egg-info/SOURCES.txt @@ -0,0 +1,39 @@ +MANIFEST.in +README.md +harness-application +harness-codefresh +harness-deployment +harness-generate +setup.py +cloudharness_utilities/__init__.py +cloudharness_utilities/build.py +cloudharness_utilities/codefresh.py +cloudharness_utilities/constants.py +cloudharness_utilities/helm.py +cloudharness_utilities/openapi.py +cloudharness_utilities/utils.py +cloudharness_utilities.egg-info/PKG-INFO +cloudharness_utilities.egg-info/SOURCES.txt +cloudharness_utilities.egg-info/dependency_links.txt +cloudharness_utilities.egg-info/requires.txt +cloudharness_utilities.egg-info/top_level.txt +cloudharness_utilities/application-template/api/config.json +cloudharness_utilities/application-template/api/samples.yaml +cloudharness_utilities/application-template/deploy/values.yaml +cloudharness_utilities/deployment-configuration/README.md +cloudharness_utilities/deployment-configuration/codefresh-build-template.yaml +cloudharness_utilities/deployment-configuration/codefresh-template.yaml +cloudharness_utilities/deployment-configuration/value-template.yaml +cloudharness_utilities/deployment-configuration/values-template.yaml +cloudharness_utilities/deployment-configuration/helm/.helmignore +cloudharness_utilities/deployment-configuration/helm/Chart.yaml +cloudharness_utilities/deployment-configuration/helm/README.md +cloudharness_utilities/deployment-configuration/helm/values.yaml +cloudharness_utilities/deployment-configuration/helm/templates/NOTES.txt +cloudharness_utilities/deployment-configuration/helm/templates/_helpers.tpl +cloudharness_utilities/deployment-configuration/helm/templates/auto-deployments.yaml +cloudharness_utilities/deployment-configuration/helm/templates/auto-gatekeepers.yaml +cloudharness_utilities/deployment-configuration/helm/templates/auto-services.yaml +cloudharness_utilities/deployment-configuration/helm/templates/ingress.yaml +cloudharness_utilities/deployment-configuration/helm/templates/secrets.yaml +cloudharness_utilities/deployment-configuration/helm/templates/certs/letsencrypt.yaml \ No newline at end of file diff --git a/utilities/cloudharness_utilities.egg-info/dependency_links.txt b/utilities/cloudharness_utilities.egg-info/dependency_links.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/utilities/cloudharness_utilities.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/utilities/cloudharness_utilities.egg-info/requires.txt b/utilities/cloudharness_utilities.egg-info/requires.txt new file mode 100644 index 00000000..243869a0 --- /dev/null +++ b/utilities/cloudharness_utilities.egg-info/requires.txt @@ -0,0 +1,2 @@ +pyaml +docker diff --git a/utilities/cloudharness_utilities.egg-info/top_level.txt b/utilities/cloudharness_utilities.egg-info/top_level.txt new file mode 100644 index 00000000..8612b548 --- /dev/null +++ b/utilities/cloudharness_utilities.egg-info/top_level.txt @@ -0,0 +1 @@ +cloudharness_utilities diff --git a/utilities/cloudharness_utilities/__init__.py b/utilities/cloudharness_utilities/__init__.py new file mode 100644 index 00000000..b821e067 --- /dev/null +++ b/utilities/cloudharness_utilities/__init__.py @@ -0,0 +1,11 @@ +import os +import logging + +logging.basicConfig( + level=logging.INFO, + handlers=[ + logging.StreamHandler() + ] +) + +HERE = os.path.dirname(os.path.realpath(__file__)) \ No newline at end of file diff --git a/utilities/cloudharness_utilities/application-template/api/config.json b/utilities/cloudharness_utilities/application-template/api/config.json new file mode 100644 index 00000000..0dd0c28f --- /dev/null +++ b/utilities/cloudharness_utilities/application-template/api/config.json @@ -0,0 +1,3 @@ +{ + "packageName": "api_samples" +} \ No newline at end of file diff --git a/utilities/cloudharness_utilities/application-template/api/openapi.yaml b/utilities/cloudharness_utilities/application-template/api/openapi.yaml new file mode 100644 index 00000000..691a41c9 --- /dev/null +++ b/utilities/cloudharness_utilities/application-template/api/openapi.yaml @@ -0,0 +1,47 @@ +openapi: 3.0.0 +info: + description: CloudHarness Sample api + version: 0.1.0 + title: CloudHarness Sample API + contact: + email: cloudharness@metacell.us + license: + name: UNLICENSED + +tags: + - name: auth + - name: workflows + +paths: + /test: + get: + summary: Test + security: + - bearerAuth: [] + tags: + - auth + operationId: test + description: | + Check if the token is valid + responses: + "200": + description: Check if the application works + "400": + description: bad input parameter + + +servers: + - url: https://samples.cloudharness.metacell.us/api +components: + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT + x-bearerInfoFunc: cloudharness.auth.decode_token + schemas: + Valid: + type: object + properties: + response: + type: string diff --git a/utilities/cloudharness_utilities/application-template/deploy/values.yaml b/utilities/cloudharness_utilities/application-template/deploy/values.yaml new file mode 100644 index 00000000..91949d61 --- /dev/null +++ b/utilities/cloudharness_utilities/application-template/deploy/values.yaml @@ -0,0 +1,4 @@ +port: 8080 +subdomain: samples +autodeploy: true +autoservice: true \ No newline at end of file diff --git a/utilities/cloudharness_utilities/build.py b/utilities/cloudharness_utilities/build.py new file mode 100644 index 00000000..6968fa71 --- /dev/null +++ b/utilities/cloudharness_utilities/build.py @@ -0,0 +1,97 @@ +import os +import logging + +from docker import from_env as DockerClient + +from .constants import NODE_BUILD_IMAGE, APPS_PATH, STATIC_IMAGES_PATH, BASE_IMAGES_PATH +from .utils import find_dockerfiles_paths, image_name_from_docker_path + + +class Builder: + + def __init__(self, root_paths, images, tag, registry='', interactive=False, exclude=tuple()): + self.images = images + self.tag = tag + self.root_paths = root_paths + self.registry = registry + self.interactive = interactive + self.exclude = exclude + + # connect to docker + try: + self.client = DockerClient() + self.client.ping() + except: + raise ConnectionRefusedError( + '\n\nIs docker running? Run "eval(minikube docker-env)" if you are using minikube...') + + def push(self, image_repository): + + logging.info(f"Pushing image {image_repository}") + for line in self.client.images.push(image_repository, stream=True, decode=True): + if not 'progressDetail' in line: + logging.info(line) + if 'errorDetail' in line: + raise Exception("Error occurred while pushing image: " + line['errorDetail']['message']) + + # filter the images to build + + def should_build_image(self, image_name) -> bool: + if image_name in self.exclude: + return False + if len(self.images) == 0: + if self.interactive: + answer = input("Do you want to build " + image_name + "? [Y/n]") + return answer.upper() != 'N' + return True + + if image_name in self.images: + return True + logging.info("Skipping build for image", image_name) + return False + + def run(self): + for root_path in self.root_paths: + self.find_and_build_under_path(BASE_IMAGES_PATH, context_path=root_path, root_path=root_path) + # Build static images that will be use as base for other images + self.find_and_build_under_path(STATIC_IMAGES_PATH, root_path=root_path) + + self.find_and_build_under_path(APPS_PATH, root_path=root_path) + + + def find_and_build_under_path(self, base_path, context_path=None, root_path=None): + abs_base_path = os.path.join(root_path, base_path) + docker_files = (path for path in find_dockerfiles_paths(abs_base_path) if + self.should_build_image(path)) + + for dockerfile_path in docker_files: + + dockerfile_rel_path = "" if not context_path else os.path.relpath(dockerfile_path, start=context_path) + # extract image name + image_name = image_name_from_docker_path(os.path.relpath(dockerfile_path, start=abs_base_path)) + self.build_image(image_name, dockerfile_rel_path, context_path=context_path if context_path else dockerfile_path) + + def build_image(self, image_name, dockerfile_rel_path, context_path=None): + + registry = "" if not self.registry else self.registry + '/' + # build image + image_tag = f'{registry}{image_name}:{self.tag}' if self.tag else image_name + + buildargs = dict(TAG=self.tag, REGISTRY=registry) + + # print header + logging.info(f'\n{80 * "#"}\nBuilding {image_tag} \n{80 * "#"}\n') + + logging.info("Build args: " + ",".join(key + ':' + value for key, value in buildargs.items())) + image, response = self.client.images.build(path=context_path, + tag=image_tag, + buildargs=buildargs, + dockerfile=os.path.join(dockerfile_rel_path, "Dockerfile") if dockerfile_rel_path else None + ) + + # log stream + for line in response: + if 'stream' in line and line['stream'] != '\n': + logging.info(line['stream'].replace('\n', ' ').replace('\r', '')) + if self.registry: + self.push(image_tag) diff --git a/utilities/cloudharness_utilities/codefresh.py b/utilities/cloudharness_utilities/codefresh.py new file mode 100644 index 00000000..b6ada5cb --- /dev/null +++ b/utilities/cloudharness_utilities/codefresh.py @@ -0,0 +1,71 @@ +import os +import pyaml +import yaml +import logging + +from .constants import HERE, BUILD_STEP_BASE, BUILD_STEP_STATIC, BUILD_STEP_PARALLEL, BUILD_STEP_INSTALL, \ + CODEFRESH_REGISTRY, K8S_IMAGE_EXCLUDE, CODEFRESH_PATH, CODEFRESH_BUILD_PATH, \ + CODEFRESH_TEMPLATE_PATH, APPS_PATH, STATIC_IMAGES_PATH, BASE_IMAGES_PATH +from .helm import collect_helm_values +from .utils import find_dockerfiles_paths, image_name_from_docker_path, \ + get_image_name, get_template, merge_to_yaml_file + +logging.getLogger().setLevel(logging.INFO) + +def create_codefresh_deployment_scripts(deployment_root, tag="${{CF_REVISION}}", codefresh_path=CODEFRESH_PATH): + """ + Entry point to create deployment scripts for codefresh: codefresh.yaml and helm chart + """ + + codefresh = get_template(CODEFRESH_TEMPLATE_PATH) + + codefresh['steps'][BUILD_STEP_BASE]['steps'] = {} + codefresh['steps'][BUILD_STEP_STATIC]['steps'] = {} + codefresh['steps'][BUILD_STEP_PARALLEL]['steps'] = {} + + def codefresh_build_step_from_base_path(base_path, build_step, root_context=None): + abs_base_path = os.path.join(deployment_root, base_path) + for dockerfile_path in find_dockerfiles_paths(abs_base_path): + app_relative_to_root = os.path.relpath(dockerfile_path, deployment_root) + app_relative_to_base = os.path.relpath(dockerfile_path, abs_base_path) + app_name = image_name_from_docker_path(app_relative_to_base) + if app_name in K8S_IMAGE_EXCLUDE: + continue + build = codefresh_app_build_spec(app_name, os.path.relpath(root_context, deployment_root) if root_context else app_relative_to_root, + dockerfile_path=os.path.join(os.path.relpath(dockerfile_path, root_context) if root_context else '', "Dockerfile")) + codefresh['steps'][build_step]['steps'][app_name] = build + + codefresh_build_step_from_base_path(BASE_IMAGES_PATH, BUILD_STEP_BASE, root_context=deployment_root) + codefresh_build_step_from_base_path(STATIC_IMAGES_PATH, BUILD_STEP_STATIC) + codefresh_build_step_from_base_path(APPS_PATH, BUILD_STEP_PARALLEL) + + codefresh['steps'] = {k:step for k, step in codefresh['steps'].items() if 'type' not in step or step['type'] != 'parallel' or step['steps']} + + with open(codefresh_path, 'w') as f: + pyaml.dump(codefresh, f) + + +def codefresh_build_spec(**kwargs): + """ + Create Codefresh build specification + :return: + """ + + build = get_template(CODEFRESH_BUILD_PATH) + + build.update(kwargs) + return build + + +def codefresh_app_build_spec(app_name, app_path, dockerfile_path="Dockerfile"): + logging.info('Generating build script for ' + app_name) + title = app_name.capitalize().replace('-', ' ').replace('/', ' ').replace('.', ' ').strip() + build = codefresh_build_spec(image_name=get_image_name(app_name), title=title, working_directory= './' + app_path, dockerfile=dockerfile_path) + + specific_build_template_path = os.path.join(app_path, 'build.yaml') + if os.path.exists(specific_build_template_path): + logging.info("Specific build template found:", specific_build_template_path) + with open(specific_build_template_path) as f: + build_specific = yaml.safe_load(f) + build.update(build_specific) + return build diff --git a/utilities/cloudharness_utilities/constants.py b/utilities/cloudharness_utilities/constants.py new file mode 100644 index 00000000..28def2ae --- /dev/null +++ b/utilities/cloudharness_utilities/constants.py @@ -0,0 +1,35 @@ +import os + +NODE_BUILD_IMAGE = 'node:8.16.1-alpine' + +HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/') +ROOT = os.path.dirname(HERE) + +HELM_PATH = "helm" +HELM_CHART_PATH = HELM_PATH + +INFRASTRUCTURE_PATH = 'infrastructure' +STATIC_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'common-images') +BASE_IMAGES_PATH = os.path.join(INFRASTRUCTURE_PATH, 'base-images') +NEUTRAL_PATHS = ('src', 'tasks', 'server') +APPS_PATH = 'applications' + +CODEFRESH_PATH = 'codefresh/codefresh.yaml' + +CODEFRESH_BUILD_PATH = 'deployment-templates/codefresh-build-template.yaml' +CODEFRESH_TEMPLATE_PATH = 'deployment-templates/codefresh-template.yaml' +CODEFRESH_REGISTRY = "r.cfcr.io/tarelli" + +VALUES_MANUAL_PATH = 'values.yaml' +VALUE_TEMPLATE_PATH = 'deployment-templates/value-template.yaml' + +CH_BASE_IMAGES = {'cloudharness-base': 'python:3.7-alpine', 'cloudharness-base-debian': 'python:3'} + +K8S_IMAGE_EXCLUDE = ('accounts-keycloak-gatekeeper',) + +BUILD_STEP_BASE = 'x1_build_base_image' +BUILD_STEP_STATIC = 'x2_static_build' +BUILD_STEP_PARALLEL = 'x3_parallel_build' +BUILD_STEP_INSTALL = 'x4_deployment' + +DEPLOYMENT_CONFIGURATION_PATH = 'deployment-configuration' \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/README.md b/utilities/cloudharness_utilities/deployment-configuration/README.md new file mode 100644 index 00000000..ff3d1c51 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/README.md @@ -0,0 +1,8 @@ +# Default deployment templates +Templates used to personalize the automatic infrastructure definition. +Those files are used by the script `insfrastructure-generate.py` + +- `values-template.yaml`: base for `helm/values.yaml`. Modify this file to add values related to new infrastructure elements not defined as a CloudHarness application (e.g. a new database) +- `value-template.yaml`: base for cloudharness application configuration inside `values.yaml`. Prefer adding a custom `values.yaml` to your application over changing this file. +- `codefresh-template.yaml`: base for `codefresh/codefresh.yaml`. Modify this file if you want to change the build steps inside codefresh +- `codefresh-build-template.yaml`: base for a single build entry in `codefresh.yaml` diff --git a/utilities/cloudharness_utilities/deployment-configuration/codefresh-build-template.yaml b/utilities/cloudharness_utilities/deployment-configuration/codefresh-build-template.yaml new file mode 100644 index 00000000..dec571af --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/codefresh-build-template.yaml @@ -0,0 +1,8 @@ +type: build +stage: build +tag: '${{CF_REVISION}}' +dockerfile: Dockerfile +when: + branch: + only: + - '${{CF_BRANCH}}' \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/codefresh-template.yaml b/utilities/cloudharness_utilities/deployment-configuration/codefresh-template.yaml new file mode 100644 index 00000000..c89d07c5 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/codefresh-template.yaml @@ -0,0 +1,67 @@ +version: '1.0' +stages: +- prepare +- build +- deploy +steps: + main_clone: + title: Cloning main repository... + type: git-clone + stage: prepare + repo: '${{CF_REPO_OWNER}}/${{CF_REPO_NAME}}' + revision: '${{CF_BRANCH}}' + git: github + x1_build_base_image: + type: parallel + stage: build + steps: + REPLACE_ME + x2_static_build: + type: parallel + stage: build + steps: + REPLACE_ME + x3_parallel_build: + type: parallel + stage: build + steps: + REPLACE_ME + + prepare_deployment: + title: "Prepare helm chart" + image: r.cfcr.io/tarelli/cloudharness-deploy + stage: deploy + working_directory: ./deployment + commands: + - python deployment-generate.py + + prepare_deployment_view2: + commands: + - 'helm template ./deployment/helm --debug -n '${{NAME}} + environment: + - ACTION=auth + - KUBE_CONTEXT=${{NAME}} + image: codefresh/cfstep-helm:2.16.1 + stage: prepare + title: 'View helm chart' + + x4_deployment: + stage: deploy + image: 'codefresh/cfstep-helm:2.16.1' + title: Installing chart + environment: + - CHART_REF=./deployment/helm + - RELEASE_NAME=${{NAME}} + - KUBE_CONTEXT=${{NAME}} + - NAMESPACE=${{NAMESPACE}} + - TILLER_NAMESPACE=kube-system + - CHART_VERSION=0.0.1 + - HELM_REPO_USE_HTTP=false + - HELM_REPO_CONTEXT_PATH= + - TIMEOUT=600 + - VALUESFILE_values=./deployment/helm/values.yaml + + when: + branch: + only: + - '${{CF_BRANCH}}' \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/.helmignore b/utilities/cloudharness_utilities/deployment-configuration/helm/.helmignore new file mode 100644 index 00000000..50af0317 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/.helmignore @@ -0,0 +1,22 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/Chart.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/Chart.yaml new file mode 100644 index 00000000..bb11a716 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/Chart.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +appVersion: "0.0.1" +description: CloudHarness Helm Chart +name: cloudharness +version: 0.0.1 +maintainers: + - name: Facundo Rodriguez + email: facundo@metacell.us + - name: Filippo Ledda + email: filippo@metacell.us \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/README.md b/utilities/cloudharness_utilities/deployment-configuration/helm/README.md new file mode 100644 index 00000000..fe3e842e --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/README.md @@ -0,0 +1,121 @@ +# CloudHarness Helm chart: deploy CloudHarness to k8s + +Helm is used to define the CloudHarness deployment on Kubernetes. For further information about Helm, see https://helm.sh. + +## Before starting + +### Prerequisites + +#### Kubectl + +The Kubernetes shell (`kubectl`) must be installed and configured on the deployment cluster. +Possible alternatives are a google cloud provider or Minikube (for testing). + +To install kubectl with google cloud, see https://cloud.google.com/kubernetes-engine/docs/how-to/cluster-access-for-kubectl + +#### Helm + +A helm chart is used to perform the installation of your CloudHarness deployment on the cluster. +To install helm, see https://helm.sh/docs/intro/install. + +With snap: +```bash +snap install helm --classic +helm init --wait +``` + +### If planning to use cloud provider + +* cert-manager and nginx-ingress charts should be installed in the cluster. + +### If planning to use minikube (run only once) + +For the first run, specify a fair amount of cpus (>=4) and ram (>=5000mb) + +``` +minikube start --disk-size="120000mb" --cpus=4 --memory="5000mb" +``` + +If not installed, add the ingress addon to minikube: +`minikube addons enable ingress` + +* Adding local volume + +```bash +alias miniku=helm +``` + +NOTE: start minikube using the alias + +* Adding domains to local host + +```bash +sudo echo "$(minikube ip) [DOMAIN] airflow.[DOMAIN] keycloak.[DOMAIN] api.[DOMAIN] mapper.[DOMAIN] docs.[DOMAIN] neo4j.[DOMAIN] atlas.[DOMAIN] database.[DOMAIN] " >> /etc/hosts +``` + +NOTE: wildcard don't work. + +* Create (or get) a trusted *.pem certificate for your machine and put inside `./certs` + * macos -> `Keychain Access` > `Trust` > `Always trust` + * linux -> + +```bash + cp ./certs/mycert.pem /usr/local/share/ca-certificates/extra/cacert.crt + update-ca-certificates --verbose + cat /usr/local/share/ca-certificates/extra/cacert.crt >> /usr/local/lib/python3.7/site-packages/certifi/cacert.pem +``` + +NOTE: Some python packages (such as certifi) have their own list of trusted CA. You might or might not need to perform the last step. + +## Deployment + +* Namespace (only once) + +```bash +kubectl create namespace ch +``` +(any namespace will do the job) + +* Deploy + +Use helm to install chart (this will install all CloudHarness applications in the cluster) + +```bash +helm install ./ --name ch --namespace ch --set minikubeIp='$(minikube ip)' +``` + +* Update + +```bash +helm upgrade cloudharness ./ --namespace ch --install --force --reset-values minikubeIp='$(minikube ip)' +``` + +* List + +```bash +helm ls +``` + +* Delete + +```bash +helm del --purge ch +``` + +## Values + +* `minikube` + * (Boolean | true) Configures to deploy to minikube or cloud provider. +* `localIp` + * (String | required) Configures to deploy to minikube or cloud provider. +* `domain` + * Base domain +* `env` + * Key value pairs inside `env` are copied to all containers +* `privenv` + * These are opaque secrets transfered to container env variables during spawning. + (Don't push them to GitHub) + +## Debug Chart + +* `--dry-run --debug` diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/NOTES.txt b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/NOTES.txt new file mode 100644 index 00000000..012476ed --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/NOTES.txt @@ -0,0 +1,11 @@ +{{- if .Values.ingress.enabled }} +1. Domains: + + https://{{ printf "%s" $.Values.domain }} + +{{- range $app := .Values.apps }} +{{- if and $app.subdomain }} + https://{{ printf "%s.%s" $app.subdomain $.Values.domain }} +{{- end}} +{{- end }} +{{- end }} \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/_helpers.tpl b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/_helpers.tpl new file mode 100644 index 00000000..033cc2c4 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/_helpers.tpl @@ -0,0 +1,113 @@ +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "deploy_utils.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "deploy_utils.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "deploy_utils.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{/* +For custom images: if images are coming from local(e.g minikube) registry, image pull policy is "Never". Otherwise it is "IfNotPresent" +*/}} +{{- define "deploy_utils.pullpolicy" -}} +{{- if and .Values.local (not .Values.registry.name) -}} +{{- print "Never" -}} +{{- else -}} +{{- print "IfNotPresent" -}} +{{- end -}} +{{- end -}} +{{/* +Add environmental variables to all containers +*/}} +{{- define "deploy_utils.env" -}} +{{- range $pair := .Values.env }} +- name: {{ $pair.name | quote }} + value: {{ $pair.value | quote }} +{{- end }} +- name: CH_ACCOUNTS_CLIENT_SECRET + value: {{ .Values.apps.accounts.client.secret | quote }} +- name: CH_ACCOUNTS_REALM + value: {{ .Values.namespace | quote }} +- name: CH_ACCOUNTS_AUTH_DOMAIN + value: {{ printf "%s.%s" .Values.apps.accounts.subdomain .Values.domain | quote }} +- name: CH_ACCOUNTS_CLIENT_ID + value: {{ .Values.apps.accounts.client.id | quote }} +- name: DOMAIN + value: {{ .Values.domain | quote }} +{{- end -}} +{{/* +Add private environmental variables to all containers +*/}} +{{- define "deploy_utils.privenv" -}} +{{- range $env := .Values.privenv }} +- name: {{ $env.name | quote }} + valueFrom: + secretKeyRef: + name: deployment-secrets + key: {{ $env.name | quote }} +{{- end }} +{{- end -}} +{{/* +Defines docker registry +*/}} +{{- define "deploy_utils.registry" }} +{{- if not (eq .Values.registry.name "") }} +{{- printf "%s/" .Values.registry.name }} +{{- end }} +{{- end }} + +{{/* Create chart name and version as used by the chart label. */}} +{{- define "deploy_utils.chartref" -}} +{{- replace "+" "_" $.Chart.Version | printf "%s-%s" $.Chart.Name -}} +{{- end }} + +{{/* Generate basic labels */}} +{{- define "deploy_utils.labels" }} +chart: {{ template "deploy_utils.chartref" . }} +release: {{ $.Release.Name | quote }} +heritage: {{ $.Release.Service | quote }} +{{- if .Values.commonLabels}} +{{ toYaml .Values.commonLabels }} +{{- end }} +{{- end }} + + +{{/* /etc/hosts */}} +{{- define "deploy_utils.etcHosts" }} +{{- if .Values.local }} +{{ $domain := .Values.domain }} +hostAliases: + - ip: {{ .Values.localIp }} + hostnames: + {{ printf "- %s" .Values.domain }} + {{- range $app := .Values.apps }} + {{- if $app.subdomain }} + {{ printf "- %s.%s" $app.subdomain $domain }} + {{- end }} + {{- end }} +{{- end }} +{{- end }} diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-deployments.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-deployments.yaml new file mode 100644 index 00000000..fcd561f0 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-deployments.yaml @@ -0,0 +1,62 @@ +{{- define "deploy_utils.deployment" }} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .app.name | quote }} + namespace: {{ .root.Values.namespace }} + labels: + app: {{ .app.name | quote }} +{{ include "deploy_utils.labels" .root | indent 4 }} +spec: + replicas: {{ .app.replicas | default 1 }} + selector: + matchLabels: + app: {{ .app.name | quote }} +{{ include "deploy_utils.labels" .root | indent 6 }} + template: + metadata: + {{- if .app.harvest }} + annotations: + co.elastic.logs/enabled: "true" + metricbeat: "true" + {{- end }} + labels: + app: {{ .app.name | quote }} +{{ include "deploy_utils.labels" .root | indent 8 }} + spec: + {{ if .root.Values.registry.secret }} + imagePullSecrets: + - name: {{ .root.Values.registry.secret }} + {{- end }} + containers: + - name: {{ .app.name | default "cloudharness-docs" | quote }} + image: {{ .app.image }} + imagePullPolicy: {{ include "deploy_utils.pullpolicy" .root }} + env: + {{- include "deploy_utils.env" .root | nindent 8 }} + {{- include "deploy_utils.privenv" .root | nindent 8 }} + ports: + - containerPort: {{ .app.port | default 8080 }} + resources: + requests: + memory: {{ .app.resources.requests.memory | default "32Mi" }} + cpu: {{ .app.resources.requests.cpu | default "25m" }} + limits: + memory: {{ .app.resources.limits.memory | default "64Mi" }} + cpu: {{ .app.resources.limits.cpu | default "50m" }} +--- +{{- end }} +{{- range $app := .Values.apps }} + {{- if and (hasKey $app "port") $app.autodeploy | default false }} +--- + {{ include "deploy_utils.deployment" (dict "root" $ "app" $app) }} + {{- end }} + {{- range $subapp := $app }} + {{- if contains "map" (typeOf $subapp) }} + {{- if and (hasKey $subapp "port") $subapp.autodeploy | default false }} +--- + {{ include "deploy_utils.deployment" (dict "root" $ "app" $subapp) }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-gatekeepers.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-gatekeepers.yaml new file mode 100644 index 00000000..00762314 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-gatekeepers.yaml @@ -0,0 +1,116 @@ +{{/* Secured Services/Deployments */}} +{{- define "deploy_utils.securedservice" }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: "{{ .app.name }}-gk" + labels: + app: "{{ .app.name }}-gk" +data: + proxy.yml: |- + verbose: true + discovery-url: https://{{ .root.Values.apps.accounts.subdomain }}.{{ .root.Values.domain }}/auth/realms/{{ .root.Values.namespace }} + client-id: {{ .root.Values.apps.accounts.webclient.id | quote }} + client-secret: {{ .root.Values.apps.accounts.webclient.secret }} + listen: 0.0.0.0:80 + enable-refresh-tokens: true + tls-cert: + tls-private-key: + redirection-url: https://{{ .app.subdomain }}.{{ .root.Values.domain }} + encryption-key: AgXa7xRcoClDEU0ZDSH4X0XhL5Qy2Z2j + upstream-url: http://{{ .app.name }}.{{ .app.namespace | default .root.Release.Namespace }}:{{ .app.port | default 80}} + scopes: + - vpn-user + resources: + - uri: /* + methods: + - GET + roles: + - {{ .root.Values.apps.accounts.admin.role }} +--- +apiVersion: v1 +kind: Service +metadata: + name: "{{ .app.name }}-gk" + labels: + app: "{{ .app.name }}-gk" +spec: + ports: + - name: http + port: 80 + selector: + app: "{{ .app.name }}-gk" + type: ClusterIP +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: "{{ .app.name }}-gk" + labels: + app: "{{ .app.name }}-gk" +spec: + replicas: 1 + selector: + matchLabels: + app: "{{ .app.name }}-gk" + template: + metadata: + labels: + app: "{{ .app.name }}-gk" + spec: + {{ if .root.Values.registry.secret }} + imagePullSecrets: + - name: {{ .root.Values.registry.secret }} + {{- end }} +{{ include "deploy_utils.etcHosts" .root | indent 6 }} + containers: + - name: {{ .app.name | quote }} + {{ if .root.Values.local }} + image: {{ include "deploy_utils.registry" .root }}{{ .root.Values.apps.accounts.gatekeeper.image}}:{{ .root.Values.tag }} + imagePullPolicy: {{ include "deploy_utils.pullpolicy" .root }} + {{ else }} + image: "keycloak/keycloak-gatekeeper:9.0.2" + imagePullPolicy: IfNotPresent + {{- end }} + + env: + - name: PROXY_CONFIG_FILE + value: /opt/proxy.yml + volumeMounts: + - name: "{{ .app.name }}-gk-proxy-config" + mountPath: /opt/proxy.yml + subPath: proxy.yml + ports: + - name: http + containerPort: {{ .root.Values.apps.accounts.port | default 8080 }} + - name: https + containerPort: 8443 + resources: + requests: + memory: "32Mi" + cpu: "50m" + limits: + memory: "64Mi" + cpu: "100m" + volumes: + - name: "{{ .app.name }}-gk-proxy-config" + configMap: + name: "{{ .app.name }}-gk" +--- +{{- end }} +{{- if .Values.secured_gatekeepers }} +{{- range $app := .Values.apps }} + {{- if and (hasKey $app "port") ($app.secureme) }} +--- + {{ include "deploy_utils.securedservice" (dict "root" $ "app" $app) }} + {{- end }} + {{- range $subapp := $app }} + {{- if contains "map" (typeOf $subapp) }} + {{- if and (hasKey $subapp "port") $subapp.secureme }} +--- + {{ include "deploy_utils.securedservice" (dict "root" $ "app" $subapp) }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-services.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-services.yaml new file mode 100644 index 00000000..15fd1345 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/auto-services.yaml @@ -0,0 +1,35 @@ +{{/* Services */}} +{{- define "deploy_utils.service" }} +apiVersion: v1 +kind: Service +metadata: + name: {{ .app.name | quote }} + labels: + app: {{ .app.name | quote }} +{{ include "deploy_utils.labels" .root | indent 4 }} +spec: + selector: + app: {{ .app.name | quote }} + ports: + - port: {{ .app.port }} + name: http +{{- end }} + + +{{- range $app := .Values.apps }} + {{- if and (hasKey $app "port") ($app.autoservice | default true) }} +--- + {{ include "deploy_utils.service" (dict "root" $ "app" $app) }} + {{- range $subapp := $app }} + {{- if contains "map" (typeOf $subapp) }} + {{- if and (hasKey $subapp "port") ($subapp.autoservice | default false) }} +--- + {{ include "deploy_utils.service" (dict "root" $ "app" $subapp) }} + {{- end }} + {{- end }} + {{- end }} + + {{- end }} + {{- end }} + + diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/certs/letsencrypt.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/certs/letsencrypt.yaml new file mode 100644 index 00000000..9e6357b9 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/certs/letsencrypt.yaml @@ -0,0 +1,16 @@ +{{ if not .Values.local }} +apiVersion: cert-manager.io/v1alpha2 +kind: ClusterIssuer +metadata: + name: letsencrypt +spec: + acme: + server: https://acme-v02.api.letsencrypt.org/directory + email: {{ .Values.ingress.letsencrypt.email }} + privateKeySecretRef: + name: letsencrypt + solvers: + - http01: + ingress: + ingressName: cloudharness-ingress +{{ end }} \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/ingress.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/ingress.yaml new file mode 100644 index 00000000..204778a7 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/ingress.yaml @@ -0,0 +1,62 @@ +{{- if .Values.ingress.enabled }} +{{ $domain := .Values.domain }} +{{ $tls := and .Values.tls }} + {{ $secured_gatekeepers := and .Values.secured_gatekeepers }} +apiVersion: extensions/v1beta1 +kind: Ingress +metadata: + name: {{ .Values.ingress.name | quote }} + annotations: + {{- if not .Values.local }} + kubernetes.io/tls-acme: 'true' + kubernetes.io/ingress.class: nginx + cert-manager.io/cluster-issuer: letsencrypt + {{- end }} + nginx.ingress.kubernetes.io/ssl-redirect: 'false' +spec: + rules: + {{- range $app := .Values.apps }} + {{- if $app.domain }} + - host: {{ $app.domain | quote }} + http: + paths: + - path: / + backend: + serviceName: {{- if (and $app.secureme $secured_gatekeepers) }} "{{ $app.name }}-gk" {{- else }} {{ $app.name | quote }}{{- end }} + servicePort: {{- if (and $app.secureme $secured_gatekeepers) }} 80 {{- else }} {{ $app.port | default 80 }}{{- end }} + {{- else if $app.subdomain }} + - host: {{ printf "%s.%s" $app.subdomain $domain | quote }} + http: + paths: + - path: / + backend: + serviceName: {{- if (and $app.secureme $secured_gatekeepers) }} "{{ $app.name }}-gk" {{- else }} {{ $app.name | quote }}{{- end }} + servicePort: {{- if (and $app.secureme $secured_gatekeepers) }} 80 {{- else }} {{ $app.port | default 80 }}{{- end }} + {{- range $subapp := $app }} + {{- if contains "map" (typeOf $subapp) }} + {{- if and $subapp (hasKey $subapp "subdomain") }} + - host: {{ printf "%s.%s.%s" $subapp.subdomain $app.subdomain $domain | quote }} + http: + paths: + - path: / + backend: + serviceName: {{- if (and $app.secureme $secured_gatekeepers) }} "{{ $subapp.name }}-gk" {{- else }} {{ $subapp.name | quote }}{{- end }} + servicePort: {{- if (and $app.secureme $secured_gatekeepers) }} 80 {{- else }} {{ $subapp.port | default 80 }}{{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- end }} + {{- if $tls }} + tls: + - hosts: + {{- range $app := .Values.apps }} + {{- if $app.subdomain }} + - {{ printf "%s.%s" $app.subdomain $domain | quote }} + {{- else if $app.domain }} + - {{ $app.domain | quote }} + {{- end }} + {{- end }} + secretName: {{ $tls | quote }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/templates/secrets.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/secrets.yaml new file mode 100644 index 00000000..c1a14486 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/helm/templates/secrets.yaml @@ -0,0 +1,12 @@ +{{- if .Values.privenv }} +apiVersion: v1 +kind: Secret +metadata: + name: deployment-secrets +type: Opaque +data: + {{- range $secret := .Values.privenv }} + {{ $secret.name }}: {{ $secret.value | quote | b64enc | quote }} + {{- end }} +--- +{{- end }} \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/helm/values.yaml b/utilities/cloudharness_utilities/deployment-configuration/helm/values.yaml new file mode 100644 index 00000000..e69de29b diff --git a/utilities/cloudharness_utilities/deployment-configuration/value-template.yaml b/utilities/cloudharness_utilities/deployment-configuration/value-template.yaml new file mode 100644 index 00000000..6f4d9c66 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/value-template.yaml @@ -0,0 +1,12 @@ +autoservice: true +name: CHANGE ME +image: + name: CHANGE ME + tag: ${{CF_REVISION}} +resources: + requests: + memory: "32Mi" + cpu: "25m" + limits: + memory: "500Mi" + cpu: "500m" \ No newline at end of file diff --git a/utilities/cloudharness_utilities/deployment-configuration/values-template.yaml b/utilities/cloudharness_utilities/deployment-configuration/values-template.yaml new file mode 100644 index 00000000..c494ad18 --- /dev/null +++ b/utilities/cloudharness_utilities/deployment-configuration/values-template.yaml @@ -0,0 +1,26 @@ +local: false +secured_gatekeepers: true +nameOverride: "" +fullnameOverride: "" +domain: ${{DOMAIN}} +namespace: ch +registry: + name: "localhost:5000" + secret: ${{REGISTRY_SECRET}} +tag: latest +serviceaccount: argo-workflows +apps: + FILL_ME +env: + - name: CH_VERSION + value: 0.0.1 + - name: CH_CHART_VERSION + value: 0.0.1 +privenv: + - name: CH_SECRET + value: In God we trust; all others must bring data. ― W. Edwards Deming +ingress: + enabled: true + name: cloudharness-ingress + letsencrypt: + email: facundo@metacell.us \ No newline at end of file diff --git a/utilities/cloudharness_utilities/helm.py b/utilities/cloudharness_utilities/helm.py new file mode 100644 index 00000000..64903d70 --- /dev/null +++ b/utilities/cloudharness_utilities/helm.py @@ -0,0 +1,202 @@ +""" +Utilities to create a helm chart from a CloudHarness directory structure +""" +import yaml +import os +import shutil +import logging +import subprocess + +from .constants import VALUES_MANUAL_PATH, VALUE_TEMPLATE_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, HERE, DEPLOYMENT_CONFIGURATION_PATH +from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, image_name_from_docker_path, \ + get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge + + + + +def create_helm_chart(root_paths, tag='latest', registry='', local=True, domain=None, exclude=(), secured=True, output_path='./deployment'): + """ + Creates values file for the helm chart + """ + dest_deployment_path = os.path.join(output_path, HELM_CHART_PATH) + + # Initialize with default + copy_merge_base_deployment(dest_deployment_path, os.path.join(HERE, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) + helm_values = collect_helm_values(HERE, tag=tag, registry=registry, exclude=exclude) + + # Override for every cloudharness scaffolding + for root_path in root_paths: + copy_merge_base_deployment(dest_helm_chart_path=dest_deployment_path, base_helm_chart=os.path.join(root_path, DEPLOYMENT_CONFIGURATION_PATH, HELM_PATH)) + collect_apps_helm_templates(root_path, exclude=exclude, dest_helm_chart_path=dest_deployment_path) + helm_values = dict_merge(helm_values, collect_helm_values(root_path, tag=tag, registry=registry, exclude=exclude)) + + finish_helm_values(values=helm_values, tag=tag, registry=registry, local=local, domain=domain, secured=secured) + # Save values file for manual helm chart + merged_values = merge_to_yaml_file(helm_values, os.path.join(dest_deployment_path, VALUES_MANUAL_PATH)) + return merged_values + + +def merge_helm_chart(source_templates_path, dest_helm_chart_path=HELM_CHART_PATH): + pass + + +def collect_apps_helm_templates(search_root, dest_helm_chart_path, exclude=()): + """ + Searches recursively for helm templates inside the applications and collects the templates in the destination + + :param search_root: + :param dest_helm_chart_path: collected helm templates destination folder + :param exclude: + :return: + """ + app_base_path = os.path.join(search_root, APPS_PATH) + + for app_path in get_sub_paths(app_base_path): + app_name = image_name_from_docker_path(os.path.relpath(app_path, app_base_path)) + if app_name in exclude: + continue + template_dir = os.path.join(app_path, 'deploy/templates') + if os.path.exists(template_dir): + dest_dir = os.path.join(dest_helm_chart_path, 'templates', app_name) + + logging.info(f"Collecting templates for application {app_name} to {dest_dir}") + if os.path.exists(dest_dir): + logging.warning("Merging/overriding all files in directory " + dest_dir) + merge_configuration_directories(template_dir, dest_dir) + else: + shutil.copytree(template_dir, dest_dir) + resources_dir = os.path.join(app_path, 'deploy/resources') + if os.path.exists(resources_dir): + dest_dir = os.path.join(dest_helm_chart_path, 'resources', app_name) + + logging.info(f"Collecting resources for application {app_name} to {dest_dir}") + if os.path.exists(dest_dir): + shutil.rmtree(dest_dir) + shutil.copytree(resources_dir, dest_dir) + + +def copy_merge_base_deployment(dest_helm_chart_path, base_helm_chart): + if not os.path.exists(base_helm_chart): + return + if os.path.exists(dest_helm_chart_path): + logging.info("Merging/overriding all files in directory {}".format(dest_helm_chart_path)) + merge_configuration_directories(base_helm_chart, dest_helm_chart_path) + else: + logging.info("Copying base deployment chart from {} to {}".format(base_helm_chart, dest_helm_chart_path)) + shutil.copytree(base_helm_chart, dest_helm_chart_path) + + +def collect_helm_values(deployment_root, exclude=(), tag='latest', registry=''): + """ + Creates helm values from a cloudharness deployment scaffolding + """ + + values_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'values-template.yaml') + value_spec_template_path = os.path.join(deployment_root, DEPLOYMENT_CONFIGURATION_PATH, 'value-template.yaml') + if not os.path.exists(values_template_path): + values = {} + else: + values = get_template(values_template_path) + + values['apps'] = {} + + app_base_path = os.path.join(deployment_root, APPS_PATH) + for app_path in get_sub_paths(app_base_path): + app_name = image_name_from_docker_path(os.path.relpath(app_path, app_base_path)) + + if app_name in exclude: + continue + + app_values = create_values_spec(app_name, app_path, tag=tag, registry=registry, template_path=value_spec_template_path) + values['apps'][app_name.replace('-', '_')] = app_values + + return values + + +def finish_helm_values(values, tag='latest', registry='', local=True, domain=None, secured=True): + """ + Sets default overridden values + """ + if registry: + logging.info(f"Registry set: {registry}") + if local: + values['registry']['secret'] = '' + values['registry']['name'] = registry # Otherwise leave default for codefresh + values['tag'] = tag # Otherwise leave default for codefresh + values['secured_gatekeepers'] = secured + + if domain: + values['domain'] = domain + + values['local'] = local + if local: + try: + values['localIp'] = get_cluster_ip() + except subprocess.TimeoutExpired: + logging.warning("Minikube not available") + + # Create environment variables + create_env_variables(values) + return values + +def create_values_spec(app_name, app_path, tag=None, registry='', template_path=VALUE_TEMPLATE_PATH): + logging.info('Generating values script for ' + app_name) + + values = get_template(template_path) + if registry and registry[-1] != '/': + registry = registry + '/' + values['name'] = app_name + + values['image'] = registry + get_image_name(app_name) + f':{tag}' if tag else '' + + specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml') + if os.path.exists(specific_template_path): + logging.info("Specific values template found: " + specific_template_path) + with open(specific_template_path) as f: + values_specific = yaml.safe_load(f) + values.update(values_specific) + return values + + +def extract_env_variables_from_values(values, envs=tuple(), prefix=''): + if isinstance(values, dict): + newenvs = list(envs) + for key, value in values.items(): + v = extract_env_variables_from_values(value, envs, f"{prefix}_{key}".replace('-', '_').upper()) + if key in ('name', 'port', 'subdomain'): + newenvs.extend(v) + return newenvs + else: + return [env_variable(prefix, values)] + + +def create_env_variables(values): + for app_name, value in values['apps'].items(): + values['env'].extend(extract_env_variables_from_values(value, prefix='CH_' + app_name)) + values['env'].append(env_variable('CH_DOMAIN', values['domain'])) + values['env'].append(env_variable('CH_IMAGE_REGISTRY', values['registry']['name'])) + values['env'].append(env_variable('CH_IMAGE_TAG', values['tag'])) + + +def hosts_info(values): + + domain = values['domain'] + namespace = values['namespace'] + subdomains = (app['subdomain'] for app in values['apps'].values() if 'subdomain' in app and app['subdomain']) + try: + ip = get_cluster_ip() + except: + return + logging.info("\nTo test locally, update your hosts file" + f"\n{ip}\t{' '.join(sd + '.' + domain for sd in subdomains)}") + + services = (app['name'].replace("-", "_") for app in values['apps'].values() if 'name' in app) + + logging.info("\nTo run locally some apps, also those references may be needed") + for appname in values['apps']: + app = values['apps'][appname] + if 'name' not in app or 'port' not in app: continue + print( + "kubectl port-forward -n {namespace} deployment/{app} {port}:{port}".format( + app=appname, port=app['port'], namespace=namespace)) + + print(f"127.0.0.1\t{' '.join(s + '.cloudharness' for s in services)}") diff --git a/utilities/cloudharness_utilities/openapi.py b/utilities/cloudharness_utilities/openapi.py new file mode 100644 index 00000000..1ea3452f --- /dev/null +++ b/utilities/cloudharness_utilities/openapi.py @@ -0,0 +1,55 @@ +import os + +import subprocess +import sys +import json +import glob +import urllib.request +from cloudharness_utilities import HERE +import logging +CODEGEN = os.path.join(HERE, 'bin', 'openapi-generator-cli.jar') +APPLICATIONS_SRC_PATH = os.path.join('applications') +LIB_NAME = 'cloudharness_cli' + +OPENAPI_GEN_URL = 'https://repo1.maven.org/maven2/org/openapitools/openapi-generator-cli/4.3.0/openapi-generator-cli-4.3.0.jar' + +def generate_server(app_path): + openapi_dir = os.path.join(app_path, 'api') + openapi_file = glob.glob(os.path.join(openapi_dir, '*.yaml'))[0] + command = f"java -jar {CODEGEN} generate -i {openapi_file} -g python-flask -o {app_path}/server -c {openapi_dir}/config.json" + os.system(command) + + +def generate_client(module, openapi_file, CLIENT_SRC_PATH): + with open('config-client.json', 'w') as f: + f.write(json.dumps(dict(packageName=f"{LIB_NAME}.{module}"))) + command = f"java -jar {CODEGEN} generate " \ + f"-i {openapi_file} " \ + f"-g python " \ + f"-o {CLIENT_SRC_PATH}/tmp-{module} " \ + f"-c config-client.json" + os.system(command) + os.remove('config-client.json') + + +def get_dependencies(): + """ + Checks if java is installed + Checks if swagger-codegen-cli.jar exists + File paths assume script is ran from the script directory + swagger-codegen-cli version should be 2.4.6 or higher + """ + try: + subprocess.check_output(['java', '-version']) + except Exception as e: + sys.exit('java not found') + + if not os.path.exists(CODEGEN): + logging.warning("Code generator client not found \n") + cdir = os.path.dirname(CODEGEN) + if not os.path.exists(cdir): + os.makedirs(cdir) + urllib.request.urlretrieve(OPENAPI_GEN_URL, CODEGEN) + + +get_dependencies() diff --git a/utilities/cloudharness_utilities/utils.py b/utilities/cloudharness_utilities/utils.py new file mode 100644 index 00000000..38f47389 --- /dev/null +++ b/utilities/cloudharness_utilities/utils.py @@ -0,0 +1,152 @@ +import glob +import subprocess +import os +import collections +import yaml +import pyaml +import shutil +import logging + + + +from .constants import HERE, NEUTRAL_PATHS, DEPLOYMENT_CONFIGURATION_PATH + + +def image_name_from_docker_path(dockerfile_path): + return get_image_name("-".join(p for p in dockerfile_path.split("/") if p not in NEUTRAL_PATHS)) + + +def get_sub_paths(base_path): + return tuple(path for path in glob.glob(base_path + "/*") if os.path.isdir(path)) + + +def find_dockerfiles_paths(base_directory): + return tuple(os.path.dirname(path).replace(os.path.sep, "/") for path in + glob.glob(f"{base_directory}/**/Dockerfile", recursive=True)) + + +def get_parent_app_name(app_relative_path): + return app_relative_path.split("/")[0] if "/" in app_relative_path else "" + + +def get_image_name(app_name, base_name=None): + return base_name + '-' + app_name if base_name else app_name + + +def get_image_name_from_dockerfile_path(self, dockerfile_path, base_name): + return get_image_name(os.path.basename(os.path.dirname(dockerfile_path)), base_name) + + +def env_variable(name, value): + return {'name': f"{name}".upper(), 'value': value} + + +def get_cluster_ip(): + out = str(subprocess.check_output(['kubectl', 'cluster-info'], timeout=1)) + ip = out.split('://')[1].split(':')[0] + return ip + + +def get_template(yaml_path): + with open(os.path.join(HERE, DEPLOYMENT_CONFIGURATION_PATH, os.path.basename(yaml_path))) as f: + dict_template = yaml.safe_load(f) + if os.path.exists(yaml_path): + with open(yaml_path) as f: + override_tpl = yaml.safe_load(f) + if override_tpl: + dict_template = dict_merge(dict_template, override_tpl) + return dict_template + + +def file_is_yaml(fname): + return fname[-4:] == 'yaml' or fname[-3:] == 'yml' + + +def merge_configuration_directories(source, dest): + if not os.path.exists(dest): + shutil.copytree(source, dest) + return + + for fname in glob.glob(source + "/*"): + frel = os.path.relpath(fname, start=source) + fdest = os.path.join(dest, frel) + + if os.path.isdir(fname): + merge_configuration_directories(fname, fdest) + continue + + if not os.path.exists(fdest): + shutil.copy(fname, fdest) + elif file_is_yaml(fname): + + try: + merge_yaml_files(fname, fdest) + logging.info(f"Merged/overridden file content of {fdest} with {fname}") + except yaml.YAMLError as e: + logging.warning(f"Overwriting file {fdest} with {fname}") + shutil.copy(fname, fdest) + else: + logging.warning(f"Overwriting file {fdest} with {fname}") + shutil.copy(fname, fdest) + + +def merge_yaml_files(fname, fdest): + with open(fname) as f: + content_src = yaml.safe_load(f) + merge_to_yaml_file(content_src, fdest) + + +def merge_to_yaml_file(content_src, fdest): + if not content_src: + return + if not os.path.exists(fdest): + merged = content_src + else: + with open(fdest) as f: + content_dest = yaml.safe_load(f) + + merged = dict_merge(content_dest, content_src) if content_dest else content_src + + if not os.path.exists(os.path.dirname(fdest)): + os.makedirs(os.path.dirname(fdest)) + with open(fdest, "w") as f: + pyaml.dump(merged, f) + return merged + + +def dict_merge(dct, merge_dct, add_keys=True): + """ Recursive dict merge. Inspired by :meth:``dict.update()``, instead of + updating only top-level keys, dict_merge recurses down into dicts nested + to an arbitrary depth, updating keys. The ``merge_dct`` is merged into + ``dct``. + + This version will return a copy of the dictionary and leave the original + arguments untouched. + + The optional argument ``add_keys``, determines whether keys which are + present in ``merge_dict`` but not ``dct`` should be included in the + new dict. + + Args: + dct (dict) onto which the merge is executed + merge_dct (dict): dct merged into dct + add_keys (bool): whether to add new keys + + Returns: + dict: updated dict + """ + dct = dct.copy() + if not add_keys: + merge_dct = { + k: merge_dct[k] + for k in set(dct).intersection(set(merge_dct)) + } + + for k, v in merge_dct.items(): + if (k in dct and isinstance(dct[k], dict) + and isinstance(merge_dct[k], collections.Mapping)): + dct[k] = dict_merge(dct[k], merge_dct[k], add_keys=add_keys) + else: + dct[k] = merge_dct[k] + + return dct diff --git a/utilities/harness-application b/utilities/harness-application new file mode 100644 index 00000000..b5daec5e --- /dev/null +++ b/utilities/harness-application @@ -0,0 +1,31 @@ +#!/usr/bin/env python + +import sys +import os +import shutil + +from cloudharness_utilities import HERE +import cloudharness_utilities.openapi +from cloudharness_utilities.openapi import generate_server, APPLICATIONS_SRC_PATH + +APPLICATION_TEMPLATE = os.path.join(HERE, 'application-template') + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser( + description='Creates a new Application.') + parser.add_argument('name', metavar='name', type=str, + help='Application name') + args, unknown = parser.parse_known_args(sys.argv[1:]) + + if unknown: + print('There are unknown args. Make sure to call the script with the accepted args. Try --help') + print(f'unknown: {unknown}') + else: + app_path = os.path.join(APPLICATIONS_SRC_PATH, args.name) + shutil.copytree(APPLICATION_TEMPLATE, app_path) # TODO replace the name inside the template + generate_server(app_path) + + + diff --git a/utilities/harness-codefresh b/utilities/harness-codefresh new file mode 100644 index 00000000..8a95a89c --- /dev/null +++ b/utilities/harness-codefresh @@ -0,0 +1,27 @@ +#!/usr/bin/env python + +import sys +import os + +from cloudharness_utilities.codefresh import create_codefresh_deployment_scripts +from cloudharness_utilities.helm import create_helm_chart + + +HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/') +ROOT = os.path.dirname(HERE).replace(os.path.sep, '/') + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser( + description='Walk filesystem inside ./applications and define build and deploy scripts.') + parser.add_argument('paths', metavar='paths', default=ROOT, type=str, nargs='*', + help='Base paths to start looking for applications. The paths will be processed in the given order.') + args, unknown = parser.parse_known_args(sys.argv[1:]) + + if unknown: + print('There are unknown args. Make sure to call the script with the accepted args. Try --help') + print(f'unknown: {unknown}') + else: + create_codefresh_deployment_scripts(args.paths) + diff --git a/utilities/harness-deployment b/utilities/harness-deployment new file mode 100644 index 00000000..398e7bda --- /dev/null +++ b/utilities/harness-deployment @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +import sys +import os + +from cloudharness_utilities.build import Builder +from cloudharness_utilities.helm import collect_apps_helm_templates, create_helm_chart, hosts_info + + +HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/') +ROOT = os.path.dirname(os.path.dirname(HERE)).replace(os.path.sep, '/') + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser( + description='Walk filesystem inside ./applications and define build and deploy scripts.') + + parser.add_argument('paths', metavar='paths', default=ROOT, type=str, nargs='*', + help='Base paths to start looking for applications. The paths will be processed in the given order.') + + parser.add_argument('-i', '--image', dest='image', action="append", default=[], + help='Specify the images to build. Omit to build images for all Docker files.') + + parser.add_argument('-t', '--tag', dest='tag', action="store", default='latest', + help='Add a tag with the current version (default `latest`)') + + parser.add_argument('-r', '--registry', dest='registry', action="store", default='', + help='Specify image registry prefix') + parser.add_argument('-o', '--output', dest='output_path', action="store", default='./deployment', + help='Specify helm chart base path (default `./deployment)`') + parser.add_argument('-b', '--build', dest='build', action="store_true", + help='Builds and pushes Docker images in the specified registry (if any)') + parser.add_argument('-d', '--domain', dest='domain', action="store", default="cloudharness.metacell.us", + help='Specify a domain different from cloudharness.metacell.us') + parser.add_argument('-bi', '--build-interactive', dest='build_interactive', action="store_true", + help='Builds and pushes Docker images in the specified registry (if any).' + 'Asks interactively what images to build') + parser.add_argument('-l', '--local', dest='local', action="store_true", + help='Specify for local deployments info and setup') + parser.add_argument('-u', '--disable-security', dest='unsecured', action="store_true", + help='Disable secured gatekeepers access') + parser.add_argument('-e', '--exclude', dest='exclude', action="append", default=(), + help='Specify application to exclude from the deployment') + args, unknown = parser.parse_known_args(sys.argv[1:]) + + root_paths = [os.path.join(os.getcwd(), path) for path in args.paths] + if unknown: + print('There are unknown args. Make sure to call the script with the accepted args. Try --help') + print(f'unknown: {unknown}') + else: + if args.build or args.build_interactive: + Builder(root_paths, args.image, tag=args.tag, registry=args.registry, interactive=args.build_interactive, exclude=args.exclude).run() + + values_manual_deploy = create_helm_chart(root_paths, tag=args.tag, registry=args.registry, domain=args.domain, local=args.local, secured=not args.unsecured, output_path=args.output_path, exclude=args.exclude) + if args.local: + hosts_info(values_manual_deploy) diff --git a/utilities/harness-generate b/utilities/harness-generate new file mode 100644 index 00000000..ca6b275e --- /dev/null +++ b/utilities/harness-generate @@ -0,0 +1,144 @@ +#!/usr/bin/env python + +import glob +import os +import shutil + + +from cloudharness_utilities.openapi import LIB_NAME, generate_client, generate_server + +HERE = os.path.dirname(os.path.realpath(__file__)) +ROOT = os.path.dirname(HERE) +# MODULES = [ 'operations'] +OPENAPI_FILES = [path for path in glob.glob(ROOT + '/applications/*/api/*.yaml')] +MODULES = [os.path.basename(os.path.dirname(os.path.dirname(path))) for path in OPENAPI_FILES] + + +CLIENT_SRC_PATH = os.path.join(ROOT, 'client', LIB_NAME) + + + + +def generate_servers(): + """ + Generates server stubs + """ + + for i in range(len(MODULES)): + openapi_file = OPENAPI_FILES[i] + application_root = os.path.dirname(os.path.dirname(openapi_file)) + generate_server(application_root) + + +def copymergedir(root_src_dir, root_dst_dir): + """ + Does copy and merge (shutil.copytree requires that the destination does not exist) + :param root_src_dir: + :param root_dst_dir: + :return: + """ + for src_dir, dirs, files in os.walk(root_src_dir): + dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + for file_ in files: + src_file = os.path.join(src_dir, file_) + dst_file = os.path.join(dst_dir, file_) + if os.path.exists(dst_file): + os.remove(dst_file) + shutil.copy(src_file, dst_dir) + +def aggregate_packages(): + DOCS_PATH = os.path.join(CLIENT_SRC_PATH, 'docs') + TEST_PATH = os.path.join(CLIENT_SRC_PATH,'test') + README = os.path.join(CLIENT_SRC_PATH, 'README.md') + REQUIREMENTS = os.path.join(CLIENT_SRC_PATH, 'requirements.txt') + TEST_REQUIREMENTS = os.path.join(CLIENT_SRC_PATH, 'test-requirements.txt' ) + + if not os.path.exists(DOCS_PATH): + os.mkdir(DOCS_PATH) + if not os.path.exists(TEST_PATH): + os.mkdir(TEST_PATH) + if os.path.exists(README): + os.remove(README) + if os.path.exists(REQUIREMENTS): + os.remove(REQUIREMENTS) + if os.path.exists(TEST_REQUIREMENTS): + os.remove(TEST_REQUIREMENTS) + + req_lines_seen = set() + test_req_lines_seen = set() + + for MODULE_TMP_PATH in glob.glob(CLIENT_SRC_PATH + '/tmp-*'): + module = MODULE_TMP_PATH.split(f'{LIB_NAME}/tmp-')[-1] + + # Moves package + + code_dest_dir = os.path.join(CLIENT_SRC_PATH, LIB_NAME, module) + copymergedir(os.path.join(MODULE_TMP_PATH, LIB_NAME, module), code_dest_dir) + copymergedir(f"{MODULE_TMP_PATH}/{LIB_NAME}.{module}", code_dest_dir) #Fixes a a bug with nested packages + + # Adds Docs + module_doc_path = os.path.join(DOCS_PATH, module) + if not os.path.exists(module_doc_path): + + os.mkdir(module_doc_path) + copymergedir(f"{CLIENT_SRC_PATH}/tmp-{module}/docs", module_doc_path) + + # Adds Tests + module_test_path = os.path.join(CLIENT_SRC_PATH, 'test', module) + copymergedir(os.path.join(MODULE_TMP_PATH, 'test'), module_test_path) + + + + # Merges Readme + readme_file = f"{MODULE_TMP_PATH}/README.md" + with open(README, 'a+') as outfile: + with open(readme_file) as infile: + filedata = infile.read() + fd = filedata.replace('docs/', f'docs/{module}/') + outfile.write(fd) + + # Merges Requirements + # FIXME: Different package versions will remain in the output file + + requirements_file = f"{MODULE_TMP_PATH}/requirements.txt" + outfile = open(REQUIREMENTS, "a+") + for line in open(requirements_file, "r"): + if line not in req_lines_seen: + outfile.write(line) + req_lines_seen.add(line) + outfile.close() + + # Merges Test Requirements + # FIXME: Different package versions will remain in the output file + test_requirements_file = f"{MODULE_TMP_PATH}/test-requirements.txt" + outfile = open(TEST_REQUIREMENTS, "a+") + for line in open(test_requirements_file, "r"): + if line not in test_req_lines_seen: + outfile.write(line) + test_req_lines_seen.add(line) + outfile.close() + + # Removes Tmp Files + shutil.rmtree(MODULE_TMP_PATH) + + +def generate_clients(): + """ + Generates client stubs + """ + for i in range(len(MODULES)): + module = MODULES[i] + openapi_file = OPENAPI_FILES[i] + generate_client(module, openapi_file, CLIENT_SRC_PATH) + + aggregate_packages() + + + + + +if __name__ == "__main__": + generate_servers() + generate_clients() diff --git a/utilities/requirements.txt b/utilities/requirements.txt new file mode 100644 index 00000000..cd8adc79 --- /dev/null +++ b/utilities/requirements.txt @@ -0,0 +1,2 @@ +pyaml +docker \ No newline at end of file diff --git a/utilities/setup.py b/utilities/setup.py new file mode 100644 index 00000000..1bc8f265 --- /dev/null +++ b/utilities/setup.py @@ -0,0 +1,44 @@ +# coding: utf-8 + +""" + CloudHarness deploy + + OpenAPI spec version: 0.6.5 + Contact: cloudharness@metacell.us +""" + + +from setuptools import setup, find_packages + + +NAME = "cloudharness-utilities" +VERSION = "0.1.0" +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools + +REQUIREMENTS = [ + 'pyaml', + 'docker' +] + + + +setup( + name=NAME, + version=VERSION, + description="CloudHarness deploy utilities library", + author_email="cloudharness@metacell.us", + url="", + keywords=["Cloud", "Kubernetes", "Helm", "Deploy"], + install_requires=REQUIREMENTS, + packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), + include_package_data=True, + scripts=['harness-codefresh', 'harness-deployment', 'harness-generate', 'harness-application'], + long_description="""\ + MetaCell Neuroscience Platform - CloudHarness deploy library + """ +) diff --git a/utilities/tests/resources/conf-source1/a.yaml b/utilities/tests/resources/conf-source1/a.yaml new file mode 100644 index 00000000..f6b91c43 --- /dev/null +++ b/utilities/tests/resources/conf-source1/a.yaml @@ -0,0 +1,4 @@ +a: a +b: + ba: ba + bb: bb \ No newline at end of file diff --git a/utilities/tests/resources/conf-source1/b.yaml b/utilities/tests/resources/conf-source1/b.yaml new file mode 100644 index 00000000..f6b91c43 --- /dev/null +++ b/utilities/tests/resources/conf-source1/b.yaml @@ -0,0 +1,4 @@ +a: a +b: + ba: ba + bb: bb \ No newline at end of file diff --git a/utilities/tests/resources/conf-source1/sub/a.yaml b/utilities/tests/resources/conf-source1/sub/a.yaml new file mode 100644 index 00000000..f6b91c43 --- /dev/null +++ b/utilities/tests/resources/conf-source1/sub/a.yaml @@ -0,0 +1,4 @@ +a: a +b: + ba: ba + bb: bb \ No newline at end of file diff --git a/utilities/tests/resources/conf-source1/sub/b.yaml b/utilities/tests/resources/conf-source1/sub/b.yaml new file mode 100644 index 00000000..f6b91c43 --- /dev/null +++ b/utilities/tests/resources/conf-source1/sub/b.yaml @@ -0,0 +1,4 @@ +a: a +b: + ba: ba + bb: bb \ No newline at end of file diff --git a/utilities/tests/resources/conf-source1/t.txt b/utilities/tests/resources/conf-source1/t.txt new file mode 100644 index 00000000..56a6051c --- /dev/null +++ b/utilities/tests/resources/conf-source1/t.txt @@ -0,0 +1 @@ +1 \ No newline at end of file diff --git a/utilities/tests/resources/conf-source2/a.yaml b/utilities/tests/resources/conf-source2/a.yaml new file mode 100644 index 00000000..82c53630 --- /dev/null +++ b/utilities/tests/resources/conf-source2/a.yaml @@ -0,0 +1,4 @@ +a: a1 +b: + ba: ba1 + bc: bc \ No newline at end of file diff --git a/utilities/tests/resources/conf-source2/c.yaml b/utilities/tests/resources/conf-source2/c.yaml new file mode 100644 index 00000000..f6b91c43 --- /dev/null +++ b/utilities/tests/resources/conf-source2/c.yaml @@ -0,0 +1,4 @@ +a: a +b: + ba: ba + bb: bb \ No newline at end of file diff --git a/utilities/tests/resources/conf-source2/sub/a.yaml b/utilities/tests/resources/conf-source2/sub/a.yaml new file mode 100644 index 00000000..82c53630 --- /dev/null +++ b/utilities/tests/resources/conf-source2/sub/a.yaml @@ -0,0 +1,4 @@ +a: a1 +b: + ba: ba1 + bc: bc \ No newline at end of file diff --git a/utilities/tests/resources/conf-source2/sub/c.yaml b/utilities/tests/resources/conf-source2/sub/c.yaml new file mode 100644 index 00000000..f6b91c43 --- /dev/null +++ b/utilities/tests/resources/conf-source2/sub/c.yaml @@ -0,0 +1,4 @@ +a: a +b: + ba: ba + bb: bb \ No newline at end of file diff --git a/utilities/tests/resources/conf-source2/t.txt b/utilities/tests/resources/conf-source2/t.txt new file mode 100644 index 00000000..d8263ee9 --- /dev/null +++ b/utilities/tests/resources/conf-source2/t.txt @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/utilities/tests/test_utils.py b/utilities/tests/test_utils.py new file mode 100644 index 00000000..8b1526c7 --- /dev/null +++ b/utilities/tests/test_utils.py @@ -0,0 +1,48 @@ +import shutil +import yaml + +from cloudharness_utilities.utils import * + +HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/') + +def test_image_name_from_docker_path(): + assert image_name_from_docker_path("a") == 'a' + assert image_name_from_docker_path("a/b") == 'a-b' + assert image_name_from_docker_path("a/src/b") == 'a-b' + assert image_name_from_docker_path("a/tasks/b") == 'a-b' + assert image_name_from_docker_path("cloudharness/a/b") == 'a-b' + + +def test_merge_configuration_directories(): + basedir = os.path.join(HERE, "resources") + res_path = os.path.join(basedir, 'conf-res') + if os.path.exists(res_path): + shutil.rmtree(res_path) + + merge_configuration_directories(os.path.join(basedir, 'conf-source1'), res_path) + merge_configuration_directories(os.path.join(basedir, 'conf-source2'), res_path) + + assert os.path.exists(os.path.join(res_path, "a.yaml")) + assert os.path.exists(os.path.join(res_path, "b.yaml")) + assert os.path.exists(os.path.join(res_path, "c.yaml")) + + + assert os.path.exists(os.path.join(res_path, "sub", "a.yaml")) + assert os.path.exists(os.path.join(res_path, "sub", "b.yaml")) + assert os.path.exists(os.path.join(res_path, "sub", "c.yaml")) + + with open(os.path.join(res_path, "a.yaml")) as f: + a = yaml.safe_load(f) + assert a['a'] == 'a1' + assert a['b']['ba'] == 'ba1' + assert a['b']['bb'] == 'bb' + assert a['b']['bc'] == 'bc' + + + with open(os.path.join(res_path, "sub", "a.yaml")) as f: + a = yaml.safe_load(f) + assert a['a'] == 'a1' + assert a['b']['ba'] == 'ba1' + assert a['b']['bb'] == 'bb' + assert a['b']['bc'] == 'bc' + shutil.rmtree(res_path)