diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 833029b90..420772f33 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,12 +5,12 @@ on: pull_request: jobs: - test: + code_checks: runs-on: ubuntu-latest strategy: matrix: os: [ubuntu-latest] - component: [formatting, builder, cli, client, machine, reporters, serializer, server, util, workflow, allelse] + code-check: [black, flakehell] python-version: [3.7] steps: - uses: actions/checkout@v1 @@ -23,48 +23,72 @@ jobs: - uses: actions/cache@v2 if: startsWith(runner.os, 'Linux') with: - path: ~/.cache/pip - key: ${{ runner.os }}-${{ matrix.python-version }}-pip-${{ hashFiles('requirements/*requirements.txt') }} + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/*requirements.txt') }} restore-keys: | ${{ runner.os }}-${{ matrix.python-version }}-pip- - - name: Extract branch name (works for both 'pull' and 'push' events) + - name: Extract branch name # works for both 'push' and 'pull_request' events shell: bash run: echo "##[set-output name=branch;]$( if [ ${{ github.event_name }} = push ]; then echo ${GITHUB_REF#refs/heads/}; else echo ${{ github.event.pull_request.head.ref }}; fi )" id: extract_branch - - name: Install + - name: Install test requirements run: | pip install --upgrade pip pip install --upgrade pip-tools - pip uninstall -y aws-sam-cli pyyaml # Workaround for fixing an issue with incompatible package versions - pip install -r requirements/full_requirements.txt - pip install .[tests] - -# - name: Get git diff files for Black -# shell: bash -# run: | -# git diff origin/master..origin/"${{ steps.extract_branch.outputs.branch }}" --name-only -# FILE_DIFFS=$(git diff origin/master..origin/${{ steps.extract_branch.outputs.branch }} --name-only | sed -n "/.a/ p") -# echo "FILE_DIFFS=$FILE_DIFFS" >> $GITHUB_ENV -# echo $GITHUB_ENV -# echo 5 -# id: python_git_diff_files + pip install -r requirements/test_requirements.txt - name: Run code quality checks -# if: ${{ env.FILE_DIFFS }} run: | - FILE_DIFFS=$(git diff origin/master..origin/${{ steps.extract_branch.outputs.branch }} --name-only | sed -n "/.py/ p") - echo "${FILE_DIFFS}" + FILES_TO_CHECK=$(git diff origin/master..origin/${{ steps.extract_branch.outputs.branch }} --name-only --diff-filter=ACM '*py') + echo "> Changed Python files: ${FILES_TO_CHECK}" - if [ -z "${FILE_DIFFS}" ] + if [ -z "${FILES_TO_CHECK}" ] then - echo "No Python files to check." + echo "> No Python files to check, exiting." + exit 0 + fi + + if [ ${{ matrix.code-check }} = black ] + then + echo "> Starting Black code-formatting check for files: ${FILES_TO_CHECK}" + black --diff ${FILES_TO_CHECK} else - git diff origin/master..origin/${{ steps.extract_branch.outputs.branch }} | flakehell lint --diff - black --check "${FILE_DIFFS}" + echo "> Starting Flakehell code-quality check for diff in files: ${FILES_TO_CHECK}" + git diff origin/master..origin/${{ steps.extract_branch.outputs.branch }} -U0 --diff-filter=ACM '*.py' | flakehell lint --diff fi + test: + runs-on: ubuntu-latest + needs: code_checks + strategy: + matrix: + os: [ubuntu-latest] + component: [builder, cli, client, machine, reporters, serializer, server, util, workflow, allelse] + python-version: [3.7] + steps: + - uses: actions/checkout@v1 + + - uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + + - uses: actions/cache@v2 + if: startsWith(runner.os, 'Linux') + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ runner.os }}-${{ matrix.python-version }}-env-${{ hashFiles('**/*requirements.txt') }} + restore-keys: | + ${{ env.pythonLocation }}-${{ runner.os }}-${{ matrix.python-version }}-env- + + - name: Install + run: | + pip install --upgrade pip==20.1.1 + pip install -r requirements/full_requirements.txt + pip install -r requirements/test_requirements.txt + - name: Test ${{ matrix.component }} run: python setup.py test${{ matrix.component }} @@ -100,6 +124,7 @@ jobs: build-docs: runs-on: ubuntu-latest + needs: code_checks steps: - uses: actions/checkout@v1 @@ -108,6 +133,13 @@ jobs: python-version: '3.7' # Version range or exact version of a Python version to use, using semvers version range syntax. architecture: 'x64' + - uses: actions/cache@v2 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-${{ matrix.python-version }}-pip-new-${{ hashFiles('**/*requirements.txt') }} + restore-keys: | + ${{ runner.os }}-${{ matrix.python-version }}-pip-new- + - name: Install deps run: | pip install --upgrade pip diff --git a/.github/workflows/master-ci.yml b/.github/workflows/master-ci.yml index f9f090664..c0c97caab 100644 --- a/.github/workflows/master-ci.yml +++ b/.github/workflows/master-ci.yml @@ -63,76 +63,18 @@ jobs: run: | bash ci/trivy_scan.sh "$IMAGE" - - name: Push base image - run: | - docker push ${{ steps.prep.outputs.base_image }} - - - name: Push gordo-client - uses: docker/build-push-action@v2 - with: - push: true - context: . - file: ./Dockerfile-Client - build-args: | - BASE_IMAGE=${{ steps.prep.outputs.base_image }} - tags: ${{ steps.prep.outputs.tags_gordo_client }} - labels: | - org.opencontainers.image.title=gordo-client - org.opencontainers.image.description=Gordo client - org.opencontainers.image.url=${{ env.IMAGE_HOME_URL }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ env.IMAGE_LICENSE }} - - - name: Push gordo-deploy - uses: docker/build-push-action@v2 - with: - push: true - context: . - file: ./Dockerfile-GordoDeploy - build-args: | - BASE_IMAGE=${{ steps.prep.outputs.base_image }} - tags: ${{ steps.prep.outputs.tags_gordo_deploy }} - labels: | - org.opencontainers.image.title=gordo-deploy - org.opencontainers.image.description=Gordo deploy - org.opencontainers.image.url=${{ env.IMAGE_HOME_URL }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ env.IMAGE_LICENSE }} - - - name: Push gordo-model-builder + - name: Push gordo-base uses: docker/build-push-action@v2 with: push: true context: . - file: ./Dockerfile-ModelBuilder - build-args: | - BASE_IMAGE=${{ steps.prep.outputs.base_image }} - tags: ${{ steps.prep.outputs.tags_gordo_model_builder }} - labels: | - org.opencontainers.image.title=gordo-model-builder - org.opencontainers.image.description=Gordo model builder - org.opencontainers.image.url=${{ env.IMAGE_HOME_URL }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ env.IMAGE_LICENSE }} - - - name: Push gordo-deploy - uses: docker/build-push-action@v2 - with: - push: true - context: . - file: ./Dockerfile-ModelServer + file: ./Dockerfile build-args: | BASE_IMAGE=${{ steps.prep.outputs.base_image }} - tags: ${{ steps.prep.outputs.tags_gordo_model_server }} + tags: ${{ steps.prep.outputs.tags_gordo_base }} labels: | - org.opencontainers.image.title=gordo-model-server - org.opencontainers.image.description=Gordo model server + org.opencontainers.image.title=gordo-base + org.opencontainers.image.description=Gordo org.opencontainers.image.url=${{ env.IMAGE_HOME_URL }} org.opencontainers.image.version=${{ steps.prep.outputs.version }} org.opencontainers.image.created=${{ steps.prep.outputs.created }} diff --git a/.gitignore b/.gitignore index b02219ad3..d0b9d76f1 100644 --- a/.gitignore +++ b/.gitignore @@ -106,3 +106,5 @@ venv.bak/ *.pkl /gordo/_version.py .python-version +*.DS_Store + diff --git a/Dockerfile b/Dockerfile index b0bb0cffe..db0aea21b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -42,8 +42,44 @@ RUN pip install -r full_requirements.txt --no-cache-dir COPY --from=builder /code/dist/gordo-packed.tar.gz . RUN pip install gordo-packed.tar.gz[full] +# Install GordoDeploy dependencies +ARG HTTPS_PROXY +ARG KUBECTL_VERSION="v1.16.9" +ARG ARGO_VERSION="v2.8.2" + +RUN apt-get update && apt-get install -y \ + curl \ + jq \ + && rm -rf /var/lib/apt/lists/* + +#donwload & install kubectl +RUN curl -sSL -o /usr/local/bin/kubectl https://storage.googleapis.com/kubernetes-release/release/$KUBECTL_VERSION/bin/linux/amd64/kubectl &&\ + chmod +x /usr/local/bin/kubectl + +#download & install argo +RUN curl -sSL -o /usr/local/bin/argo https://github.com/argoproj/argo/releases/download/$ARGO_VERSION/argo-linux-amd64 &&\ + chmod +x /usr/local/bin/argo + +COPY ./run_workflow_and_argo.sh ${HOME}/run_workflow_and_argo.sh + +# Baking in example configs for running tests, as docker.client.containers.run +# bind doesn't seem to work correctly for non-root users +# volumes={repo_dir: {"bind": "/home/gordo", "mode": "ro"}}, +COPY ./examples ${HOME}/examples +COPY ./resources ${HOME}/resources + +# Install ModelBuilder dependencies +ADD build.sh ${HOME}/build.sh + +# build.sh (build the model) as executable default command +RUN cp ${HOME}/build.sh /usr/bin/build \ + && chmod a+x /usr/bin/build + + # Make gordo own all in its home RUN chown -R gordo:gordo ${HOME} # Run things from gordo's home to have write access when needed (e.g. Catboost tmp files) WORKDIR ${HOME} +# Switch user +USER gordo diff --git a/Dockerfile-Client b/Dockerfile-Client deleted file mode 100644 index a3756cd62..000000000 --- a/Dockerfile-Client +++ /dev/null @@ -1,7 +0,0 @@ -ARG BASE_IMAGE=gordo/base:latest -FROM $BASE_IMAGE - -# Switch user -USER gordo - -CMD ["gordo", "client"] diff --git a/Dockerfile-GordoDeploy b/Dockerfile-GordoDeploy deleted file mode 100644 index bdcddff50..000000000 --- a/Dockerfile-GordoDeploy +++ /dev/null @@ -1,35 +0,0 @@ -# Installs workflow-generator, kubectl and argo. Runs workflow-generator on the machine-config -# stored in the environment variable MACHINE_CONFIG, or if that is empty, the file -# /code/config.yml, which is then the callers responsibility to mount in. -ARG BASE_IMAGE=gordo/base:latest -FROM $BASE_IMAGE - -ARG HTTPS_PROXY -ARG KUBECTL_VERSION="v1.16.9" -ARG ARGO_VERSION="v2.8.2" - -RUN apt-get update && apt-get install -y \ - curl \ - jq \ - && rm -rf /var/lib/apt/lists/* - -#donwload & install kubectl -RUN curl -sSL -o /usr/local/bin/kubectl https://storage.googleapis.com/kubernetes-release/release/$KUBECTL_VERSION/bin/linux/amd64/kubectl &&\ - chmod +x /usr/local/bin/kubectl - -#download & install argo -RUN curl -sSL -o /usr/local/bin/argo https://github.com/argoproj/argo/releases/download/$ARGO_VERSION/argo-linux-amd64 &&\ - chmod +x /usr/local/bin/argo - -COPY ./run_workflow_and_argo.sh ${HOME}/run_workflow_and_argo.sh - -# Baking in example configs for running tests, as docker.client.containers.run -# bind doesn't seem to work correctly for non-root users -# volumes={repo_dir: {"bind": "/home/gordo", "mode": "ro"}}, -COPY ./examples ${HOME}/examples -COPY ./resources ${HOME}/resources - -# Switch user -USER gordo - -CMD ["bash", "./run_workflow_and_argo.sh"] diff --git a/Dockerfile-ModelBuilder b/Dockerfile-ModelBuilder deleted file mode 100644 index 51a6e5410..000000000 --- a/Dockerfile-ModelBuilder +++ /dev/null @@ -1,13 +0,0 @@ -ARG BASE_IMAGE=gordo/base:latest -FROM $BASE_IMAGE - -ADD build.sh ${HOME}/build.sh - -# build.sh (build the model) as executable default command -RUN cp ${HOME}/build.sh /usr/bin/build \ - && chmod a+x /usr/bin/build - -# Switch user -USER gordo - -CMD ["build"] diff --git a/Dockerfile-ModelServer b/Dockerfile-ModelServer deleted file mode 100644 index 9718d1ce5..000000000 --- a/Dockerfile-ModelServer +++ /dev/null @@ -1,7 +0,0 @@ -ARG BASE_IMAGE=gordo/base:latest -FROM $BASE_IMAGE - -# Switch user -USER gordo - -CMD ["gordo", "run-server"] diff --git a/Makefile b/Makefile index 43c95998a..8e4088fa8 100644 --- a/Makefile +++ b/Makefile @@ -4,6 +4,8 @@ MODEL_SERVER_IMG_NAME := gordo-model-server CLIENT_IMG_NAME := gordo-client WORKFLOW_GENERATOR_IMG_NAME := gordo-deploy +.SILENT: code-quality-locally black-check flakehell-check + base: docker build . -f Dockerfile -t $(BASE_IMG_NAME) @@ -95,13 +97,30 @@ sdist: images: model-builder model-server client -code-quality: flakehell black ## Run code quality tools +code-quality-locally: + @echo "** Make sure that your branch is up to date with origin/master (not to have extra files to check)" + make black-check; make flakehell-check ## run code quality check on changed files in the current branch + +black-check: ## run black code formatting check on changed files in the current branch + @$(eval FILES_TO_CHECK=$(shell git diff origin/master --name-only --diff-filter=ACM '*.py')) -flakehell: ## Run flakehell with plugins - only on changed code - git diff origin/master.. | flakehell lint --diff + if [ -z "${FILES_TO_CHECK}" ]; then \ + echo "> No Python files to check."; \ + else \ + echo "> Starting Black code-formatting check for files: ${FILES_TO_CHECK}"; \ + black --diff ${FILES_TO_CHECK}; \ + fi -black: ## Run black auto code formatter - only on changed code - git diff origin/master.. --name-only | grep '.py' | xargs black --check +flakehell-check: ## run flake8 and its plugins code checks on changes in the current branch + @$(eval FILES_TO_CHECK=$(shell git diff origin/master --name-only --diff-filter=ACM '*.py')) + + if [ -z "${FILES_TO_CHECK}" ]; then \ + echo "> No Python files to check."; \ + else \ + echo "> Starting Flakehell code-quality check for diff in files: ${FILES_TO_CHECK}"; \ + git diff origin/master -U0 --diff-filter=ACM '*.py' | flakehell lint --diff; \ + echo "> 'Flakehell' finished."; \ + fi test: python setup.py test diff --git a/README.md b/README.md index eac6883d6..94ad84384 100644 --- a/README.md +++ b/README.md @@ -71,7 +71,7 @@ This section will explain how to start development of Gordo. ```shell script # create and activate virtualenv. Note: you should use python3.7 (project's tensorflow version is not compatible with python3.8) # then: -make make install_app_requirements +make install_app_requirements ``` ### How to run tests locally diff --git a/ci/github_docker.sh b/ci/github_docker.sh index 90e842977..68da8e361 100644 --- a/ci/github_docker.sh +++ b/ci/github_docker.sh @@ -42,8 +42,7 @@ function version_tags { } function set_output_tags { - var_name=$1 - image_name=$2 + image_name=$1 if [ "$IMAGE_TYPE" == "pr" ]; then tags=$DOCKER_DEV_IMAGE/$image_name:$VERSION else @@ -61,7 +60,7 @@ function set_output_tags { fi fi fi - echo ::set-output name=$var_name::$tags + echo $tags } BASE_IMAGE=$DOCKER_DEV_IMAGE/base @@ -74,7 +73,6 @@ echo ::set-output name=release_type::${RELEASE} echo ::set-output name=image_type::${IMAGE_TYPE} echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') echo ::set-output name=base_image::$BASE_IMAGE:$VERSION -set_output_tags "tags_gordo_client" "gordo-client" -set_output_tags "tags_gordo_deploy" "gordo-deploy" -set_output_tags "tags_gordo_model_builder" "gordo-model-builder" -set_output_tags "tags_gordo_model_server" "gordo-model-server" +gordo_base_tags=$(set_output_tags "gordo-base") +gordo_deploy_tags=$(set_output_tags "gordo-deploy") +echo ::set-output name=tags_gordo_base::$gordo_base_tags,$gordo_deploy_tags diff --git a/gordo/builder/build_model.py b/gordo/builder/build_model.py index 85cd6bbc8..ae0ae1e71 100644 --- a/gordo/builder/build_model.py +++ b/gordo/builder/build_model.py @@ -145,7 +145,7 @@ def build( model=model, machine=machine, output_dir=output_dir # type: ignore ) logger.info(f"Built model, and deposited at {self.cached_model_path}") - logger.info(f"Writing model-location to model registry") + logger.info("Writing model-location to model registry") disk_registry.write_key( # type: ignore model_register_dir, self.cache_key, self.cached_model_path ) diff --git a/gordo/cli/cli.py b/gordo/cli/cli.py index 21625bb84..4090e9f9e 100644 --- a/gordo/cli/cli.py +++ b/gordo/cli/cli.py @@ -160,7 +160,7 @@ def build( # Convert the config into a pipeline, and back into definition to ensure # all default parameters are part of the config. - logger.debug(f"Ensuring the passed model config is fully expanded.") + logger.debug("Ensuring the passed model config is fully expanded.") machine.model = serializer.into_definition( serializer.from_definition(machine.model) ) diff --git a/gordo/cli/client.py b/gordo/cli/client.py index 613959e77..8185b95cf 100644 --- a/gordo/cli/client.py +++ b/gordo/cli/client.py @@ -146,7 +146,7 @@ def predict( # Fire off getting predictions predictions = client.predict( - start, end, targets=target, + start, end, targets=target ) # type: typing.Iterable[typing.Tuple[str, pd.DataFrame, typing.List[str]]] # Loop over all error messages for each result and log them @@ -191,7 +191,8 @@ def metadata( """ client = Client(*ctx.obj["args"], **ctx.obj["kwargs"]) metadata = { - k: v.to_dict() for k, v in client.get_metadata(targets=target).items() # type: ignore + k: v.to_dict() # type: ignore + for k, v in client.get_metadata(targets=target).items() # type: ignore } if output_file: json.dump(metadata, output_file) @@ -226,7 +227,7 @@ def download_model(ctx: click.Context, output_dir: str, target: typing.List[str] f"Writing model '{model_name}' to directory: '{model_out_dir}'...", nl=False ) serializer.dump(model, model_out_dir) - click.secho(f"done") + click.secho("done") click.secho(f"Wrote all models to directory: {output_dir}", fg="green") diff --git a/gordo/cli/custom_types.py b/gordo/cli/custom_types.py index 49c6dd938..127d288c0 100644 --- a/gordo/cli/custom_types.py +++ b/gordo/cli/custom_types.py @@ -26,7 +26,7 @@ def convert(self, value, param, ctx): kwargs = yaml.safe_load(value) if "type" not in kwargs: - self.fail(f"Cannot create DataProvider without 'type' key defined") + self.fail("Cannot create DataProvider without 'type' key defined") kind = kwargs.pop("type") diff --git a/gordo/cli/workflow_generator.py b/gordo/cli/workflow_generator.py index e5cf4bc69..c5597c5eb 100644 --- a/gordo/cli/workflow_generator.py +++ b/gordo/cli/workflow_generator.py @@ -229,11 +229,21 @@ def workflow_generator_cli(gordo_ctx, **ctx): ] context["model_builder_resources_limits_cpu"] = builder_resources["limits"]["cpu"] + context["model_builder_image"] = config.globals["runtime"]["builder"]["image"] + context["server_resources"] = config.globals["runtime"]["server"]["resources"] + context["server_image"] = config.globals["runtime"]["server"]["image"] + context["prometheus_metrics_server_resources"] = config.globals["runtime"][ "prometheus_metrics_server" ]["resources"] + context["prometheus_metrics_server_image"] = config.globals["runtime"][ + "prometheus_metrics_server" + ]["image"] + + context["deployer_image"] = config.globals["runtime"]["deployer"]["image"] + # These are also set in the default globals, and guaranteed to exist client_resources = config.globals["runtime"]["client"]["resources"] context["client_resources_requests_memory"] = client_resources["requests"]["memory"] @@ -241,6 +251,8 @@ def workflow_generator_cli(gordo_ctx, **ctx): context["client_resources_limits_memory"] = client_resources["limits"]["memory"] context["client_resources_limits_cpu"] = client_resources["limits"]["cpu"] + context["client_image"] = config.globals["runtime"]["client"]["image"] + context["client_max_instances"] = config.globals["runtime"]["client"][ "max_instances" ] diff --git a/gordo/machine/model/anomaly/diff.py b/gordo/machine/model/anomaly/diff.py index 3fef6f8cf..a04e8670c 100644 --- a/gordo/machine/model/anomaly/diff.py +++ b/gordo/machine/model/anomaly/diff.py @@ -303,8 +303,7 @@ def _scaled_mse_per_timestep( @staticmethod def _absolute_error( - y_true: Union[pd.DataFrame, np.ndarray], - y_pred: Union[pd.DataFrame, np.ndarray], + y_true: Union[pd.DataFrame, np.ndarray], y_pred: Union[pd.DataFrame, np.ndarray] ) -> pd.DataFrame: return pd.DataFrame(np.abs(y_true - y_pred)) diff --git a/gordo/machine/validators.py b/gordo/machine/validators.py index ef6d8a223..51a72e5d2 100644 --- a/gordo/machine/validators.py +++ b/gordo/machine/validators.py @@ -102,7 +102,7 @@ def __set__(self, instance, value): if value is not None and not any( isinstance(value, Obj) for Obj in (dict, Metadata) ): - raise ValueError(f"Can either be None or an instance of dict or Metadata") + raise ValueError("Can either be None or an instance of dict or Metadata") instance.__dict__[self.name] = value @@ -118,7 +118,7 @@ def __set__(self, instance, value): if not isinstance(value, GordoBaseDataProvider): raise TypeError( - f"Expected value to be an instance of GordoBaseDataProvider, " + "Expected value to be an instance of GordoBaseDataProvider, " f"found {value} " ) instance.__dict__[self.name] = value @@ -132,7 +132,7 @@ class ValidMachineRuntime(BaseDescriptor): def __set__(self, instance, value): if not isinstance(value, dict): - raise ValueError(f"Runtime must be an instance of dict") + raise ValueError("Runtime must be an instance of dict") value = self._verify_reporters(value) value = fix_runtime(value) instance.__dict__[self.name] = value @@ -215,7 +215,7 @@ def fix_resource_limits(resources: dict) -> dict: logger.warning( f"Memory limit {limits_memory} can not be smaller than memory " f"request {request_memory}, increasing memory limit to be equal" - f" to request. " + " to request. " ) limits["memory"] = request_memory if ( @@ -264,7 +264,7 @@ def __set__(self, instance, value): or not isinstance(value, list) or not any(isinstance(value[0], inst) for inst in (str, dict, SensorTag)) ): - raise ValueError(f"Requires setting a non-empty list of strings") + raise ValueError("Requires setting a non-empty list of strings") instance.__dict__[self.name] = value diff --git a/gordo/reporters/mlflow.py b/gordo/reporters/mlflow.py index 3780ec671..91ac293e5 100644 --- a/gordo/reporters/mlflow.py +++ b/gordo/reporters/mlflow.py @@ -473,7 +473,7 @@ def log_machine(mlflow_client: MlflowClient, run_id: str, machine: Machine): # Send configs as JSON artifacts try: with tempfile.TemporaryDirectory(dir="./") as tmp_dir: - fp = os.path.join(tmp_dir, f"metadata.json") + fp = os.path.join(tmp_dir, "metadata.json") with open(fp, "w") as fh: json.dump(machine.to_dict(), fh, cls=MachineEncoder) mlflow_client.log_artifacts(run_id=run_id, local_dir=tmp_dir) diff --git a/gordo/server/server.py b/gordo/server/server.py index e7679479e..973f1dd22 100644 --- a/gordo/server/server.py +++ b/gordo/server/server.py @@ -119,7 +119,7 @@ def wrapper(environ, start_response): def create_prometheus_metrics( - project: Optional[str] = None, registry: Optional[CollectorRegistry] = None, + project: Optional[str] = None, registry: Optional[CollectorRegistry] = None ) -> GordoServerPrometheusMetrics: arg_labels = [("gordo_name", "model")] info = {"version": __version__} @@ -155,7 +155,7 @@ def build_app( if app.config["ENABLE_PROMETHEUS"]: prometheus_metrics = create_prometheus_metrics( - project=app.config.get("PROJECT"), registry=prometheus_registry, + project=app.config.get("PROJECT"), registry=prometheus_registry ) prometheus_metrics.prepare_app(app) elif prometheus_registry is not None: diff --git a/gordo/workflow/config_elements/normalized_config.py b/gordo/workflow/config_elements/normalized_config.py index e741ac5dd..0728eb4a3 100644 --- a/gordo/workflow/config_elements/normalized_config.py +++ b/gordo/workflow/config_elements/normalized_config.py @@ -1,10 +1,13 @@ # -*- coding: utf-8 -*- -from typing import List +from typing import List, Optional +from copy import copy from gordo.machine.validators import fix_runtime from gordo.workflow.workflow_generator.helpers import patch_dict from gordo.machine import Machine +from gordo import __version__ +from packaging.version import parse def _calculate_influx_resources(nr_of_machines): @@ -22,13 +25,34 @@ def _calculate_influx_resources(nr_of_machines): class NormalizedConfig: - """ Handles the conversion of a single Machine representation in config format and updates it with any features which are 'left out' inside of ``globals`` key or the default config globals held here. """ + SPLITED_DOCKER_IMAGES = { + "runtime": { + "deployer": {"image": "gordo-deploy"}, + "server": {"image": "gordo-model-server"}, + "prometheus_metrics_server": {"image": "gordo-model-server"}, + "builder": {"image": "gordo-model-builder"}, + "client": {"image": "gordo-client"}, + } + } + + UNIFYING_GORDO_VERSION = "1.2.0" + + UNIFIED_DOCKER_IMAGES = { + "runtime": { + "deployer": {"image": "gordo-base"}, + "server": {"image": "gordo-base"}, + "prometheus_metrics_server": {"image": "gordo-base"}, + "builder": {"image": "gordo-base"}, + "client": {"image": "gordo-base"}, + } + } + DEFAULT_CONFIG_GLOBALS = { "runtime": { "reporters": [], @@ -72,15 +96,12 @@ class NormalizedConfig: }, } - """ - Represents a fully loaded config file - """ - - machines: List[Machine] - globals: dict - - def __init__(self, config: dict, project_name: str): - default_globals = self.DEFAULT_CONFIG_GLOBALS + def __init__( + self, config: dict, project_name: str, gordo_version: Optional[str] = None + ): + if gordo_version is None: + gordo_version = __version__ + default_globals = self.get_default_globals(gordo_version) default_globals["runtime"]["influx"][ # type: ignore "resources" ] = _calculate_influx_resources( # type: ignore @@ -92,11 +113,22 @@ def __init__(self, config: dict, project_name: str): if patched_globals.get("runtime"): patched_globals["runtime"] = fix_runtime(patched_globals.get("runtime")) self.project_name = project_name - self.machines = [ + self.machines: List[Machine] = [ Machine.from_config( conf, project_name=project_name, config_globals=patched_globals ) for conf in config["machines"] - ] # type: List[Machine] + ] + + self.globals: dict = patched_globals - self.globals = patched_globals + @classmethod + def get_default_globals(cls, gordo_version: str) -> dict: + current_version = parse(gordo_version) + unifying_version = parse(cls.UNIFYING_GORDO_VERSION) + if current_version >= unifying_version: + docker_images = cls.UNIFIED_DOCKER_IMAGES + else: + docker_images = cls.SPLITED_DOCKER_IMAGES + default_globals = cls.DEFAULT_CONFIG_GLOBALS + return patch_dict(copy(default_globals), docker_images) diff --git a/gordo/workflow/workflow_generator/resources/argo-workflow.yml.template b/gordo/workflow/workflow_generator/resources/argo-workflow.yml.template index c7a503be9..099239690 100644 --- a/gordo/workflow/workflow_generator/resources/argo-workflow.yml.template +++ b/gordo/workflow/workflow_generator/resources/argo-workflow.yml.template @@ -48,7 +48,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -104,7 +104,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -298,7 +298,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -466,7 +466,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -673,8 +673,9 @@ spec: - name: machine-name - name: machine container: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-model-builder:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ model_builder_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} + command: [build] env: - name: OUTPUT_DIR value: "/gordo/models/{{project_name}}/models/{{project_revision}}/{{'{{inputs.parameters.machine-name}}'}}" @@ -694,6 +695,11 @@ spec: secretKeyRef: name: dlserviceauth key: tenant_id_secret + - name: DL2_SERVICE_AUTH_STR + valueFrom: + secretKeyRef: + name: dl2serviceauth + key: tenant_id_secret - name: GORDO_LOG_LEVEL value: "{{log_level}}"{% if builder_exceptions_report_file is defined %} - name: EXCEPTIONS_REPORTER_FILE @@ -906,7 +912,7 @@ spec: spec: priorityClassName: server-priority containers: - - image: "{{ docker_registry }}/{{ docker_repository }}/gordo-model-server:{{gordo_version}}"{% if image_pull_policy %} + - image: "{{ docker_registry }}/{{ docker_repository }}/{{ server_image }}:{{gordo_version}}"{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} name: "gordoserver-{{ project_name }}" volumeMounts: @@ -956,7 +962,7 @@ spec: limits: memory: "{{ server_resources['limits']['memory'] }}M" cpu: "{{ server_resources['limits']['cpu'] }}m"{% if not without_prometheus %} - - image: "{{ docker_registry }}/{{ docker_repository }}/gordo-model-server:{{gordo_version}}"{% if image_pull_policy %} + - image: "{{ docker_registry }}/{{ docker_repository }}/{{ server_image }}:{{gordo_version}}"{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} name: "gordoserver-prometheus-{{ project_name }}" volumeMounts: @@ -1088,7 +1094,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -1151,7 +1157,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-model-builder:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ client_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -1190,6 +1196,11 @@ spec: secretKeyRef: name: dlserviceauth key: tenant_id_secret + - name: DL2_SERVICE_AUTH_STR + valueFrom: + secretKeyRef: + name: dl2serviceauth + key: tenant_id_secret - name: GORDO_LOG_LEVEL value: "{{log_level}}" @@ -1207,7 +1218,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -1239,7 +1250,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | @@ -1268,7 +1279,7 @@ spec: applications.gordo.equinor.com/project-name: "{{project_name}}" applications.gordo.equinor.com/project-revision: "{{project_revision}}" script: - image: {{ docker_registry }}/{{ docker_repository }}/gordo-deploy:{{gordo_version}}{% if image_pull_policy %} + image: {{ docker_registry }}/{{ docker_repository }}/{{ deployer_image }}:{{gordo_version}}{% if image_pull_policy %} imagePullPolicy: "{{image_pull_policy}}"{% endif %} command: [bash] source: | diff --git a/pyproject.toml b/pyproject.toml index 3392a8dfd..03f4177e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,8 @@ exclude = [".cache", ".git", "__pycache__", "old", "build", "dist", "junk"] flake8-broken-line = ["+*"] flake8-bugbear = ["+*"] flake8-builtins = ["+*"] -flake8-commas = ["+*"] +# - C812 exclude trailing comma for one line expression (black conflicts) +flake8-commas = ["+*", "-C812"] flake8-comprehensions = ["+*"] flake8-darglint = ["+*"] flake8-debugger = ["+*"] diff --git a/requirements/full_requirements.txt b/requirements/full_requirements.txt index 6c68aac39..bbe4e3724 100644 --- a/requirements/full_requirements.txt +++ b/requirements/full_requirements.txt @@ -52,7 +52,7 @@ gitpython==3.0.5 # via mlflow google-auth-oauthlib==0.4.1 # via tensorboard google-auth==1.10.1 # via google-auth-oauthlib, tensorboard google-pasta==0.1.8 # via tensorflow -gordo-dataset==2.1.2 # via -r requirements.in +gordo-dataset==2.3.0 # via -r requirements.in gorilla==0.3.0 # via mlflow graphviz==0.13.2 # via catboost grpcio==1.26.0 # via tensorboard, tensorflow @@ -91,6 +91,7 @@ numexpr==2.7.1 # via -r requirements.in, gordo-dataset numpy==1.18.1 # via -r requirements.in, catboost, h5py, keras-applications, keras-preprocessing, matplotlib, mlflow, numexpr, opt-einsum, pandas, pyarrow, scikit-learn, scipy, tensorboard, tensorflow oauthlib==3.1.0 # via requests-oauthlib opt-einsum==3.1.0 # via tensorflow +packaging==20.7 # via -r requirements.in pandas==1.1.4 # via -r requirements.in, catboost, gordo-dataset, mlflow pathspec==0.7.0 # via azureml-core peewee==3.13.1 # via -r postgres_requirements.in @@ -106,7 +107,7 @@ pyasn1==0.4.8 # via ndg-httpsclient, pyasn1-modules, rsa pycparser==2.19 # via cffi pyjwt[crypto]==1.7.1 # via adal, azureml-core, msal pyopenssl==19.1.0 # via azureml-core, ndg-httpsclient -pyparsing==2.4.6 # via matplotlib +pyparsing==2.4.6 # via matplotlib, packaging pyrsistent==0.15.7 # via jsonschema python-dateutil==2.8.1 # via -r requirements.in, adal, alembic, azureml-core, influxdb, matplotlib, mlflow, pandas python-editor==1.0.4 # via alembic diff --git a/requirements/requirements.in b/requirements/requirements.in index c8c8bf5d8..72d3d7fc4 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -26,5 +26,6 @@ wrapt~=1.11 prometheus_client~=0.7.1 azure-identity~=1.4.0 PyYAML~=5.3.1 -gordo-dataset~=2.1.2 +gordo-dataset~=2.3.0 jeepney>=0.6 +packaging~=20.7 diff --git a/requirements/test_requirements.in b/requirements/test_requirements.in index e177d0fcc..1b830aa8a 100644 --- a/requirements/test_requirements.in +++ b/requirements/test_requirements.in @@ -9,7 +9,7 @@ pytest-mypy~=0.4 pytest-timeout~=1.3 pytest-cov~=2.8 responses~=0.10 -black==19.10b0 +black==19.3b0 pytest-flakes~=4.0 adal~=1.2 jupyter~=1.0.0 diff --git a/requirements/test_requirements.txt b/requirements/test_requirements.txt index bf83b72a5..ab9938efe 100644 --- a/requirements/test_requirements.txt +++ b/requirements/test_requirements.txt @@ -10,7 +10,7 @@ appdirs==1.4.3 # via black appnope==0.1.0 # via ipykernel, ipython attrs==19.3.0 # via -c full_requirements.txt, black, flake8-bugbear, flake8-eradicate, jsonschema, pytest backcall==0.1.0 # via ipython -black==19.10b0 # via -r test_requirements.in +black==19.3b0 # via -r test_requirements.in bleach==3.1.5 # via -r test_requirements.in, nbconvert certifi==2019.11.28 # via -c full_requirements.txt, requests cffi==1.13.2 # via -c full_requirements.txt, cryptography @@ -64,10 +64,9 @@ mypy==0.761 # via pytest-mypy nbconvert==5.6.1 # via -r test_requirements.in, jupyter, notebook nbformat==5.0.3 # via ipywidgets, nbconvert, notebook notebook==6.0.2 # via -r test_requirements.in, jupyter, widgetsnbextension -packaging==20.0 # via bleach, pytest +packaging==20.7 # via -c full_requirements.txt, bleach, pytest pandocfilters==1.4.2 # via nbconvert parso==0.5.2 # via jedi -pathspec==0.7.0 # via -c full_requirements.txt, black pep8-naming==0.11.1 # via -r test_requirements.in pexpect==4.7.0 # via ipython pickleshare==0.7.5 # via ipython @@ -97,11 +96,10 @@ pytest==5.3.2 # via -r test_requirements.in, pytest-benchmark, pytes python-dateutil==2.8.1 # via -c full_requirements.txt, adal, jupyter-client pyzmq==18.1.1 # via jupyter-client, notebook qtconsole==4.6.0 # via jupyter -regex==2020.1.8 # via black requests==2.22.0 # via -c full_requirements.txt, adal, docker, responses responses==0.10.9 # via -r test_requirements.in send2trash==1.5.0 # via notebook -six==1.14.0 # via -c full_requirements.txt, bleach, cryptography, docker, flake8-debugger, jsonschema, mock, packaging, prompt-toolkit, pyrsistent, pytest-xdist, python-dateutil, responses, traitlets, websocket-client +six==1.14.0 # via -c full_requirements.txt, bleach, cryptography, docker, flake8-debugger, jsonschema, mock, prompt-toolkit, pyrsistent, pytest-xdist, python-dateutil, responses, traitlets, websocket-client snowballstemmer==2.0.0 # via pydocstyle terminado==0.8.3 # via notebook testfixtures==6.15.0 # via flake8-isort @@ -109,7 +107,7 @@ testpath==0.4.4 # via nbconvert toml==0.10.0 # via black, flakehell tornado==6.0.3 # via ipykernel, jupyter-client, notebook, terminado traitlets==4.3.3 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, nbconvert, nbformat, notebook, qtconsole -typed-ast==1.4.0 # via black, mypy +typed-ast==1.4.0 # via mypy typing-extensions==3.7.4.1 # via -c full_requirements.txt, mypy urllib3==1.25.7 # via -c full_requirements.txt, -r test_requirements.in, flakehell, requests wcwidth==0.1.8 # via prompt-toolkit, pytest diff --git a/setup.cfg b/setup.cfg index 14a460488..a0e6db2db 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,9 +21,6 @@ testserver = pytest --addopts "tests/gordo/server" testutil = pytest --addopts "tests/gordo/util" testworkflow = pytest --addopts "tests/gordo/workflow" -# Black formatting -testformatting = pytest --addopts "tests/test_formatting.py" - # all else, ie mypy, flakes, examples, etc. testallelse = pytest --addopts "--ignore tests/gordo/builder diff --git a/setup.py b/setup.py index 5ac7fb8a8..23053492e 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ def requirements(fp: str): package_data={ "": [ "gordo/workflow/workflow_generator/resources/argo-workflow.yml.template", - "gordo/machine/dataset/data_provider/resources/assets_config.yaml" + "gordo/machine/dataset/data_provider/resources/assets_config.yaml", ] }, include_package_data=True, diff --git a/tests/conftest.py b/tests/conftest.py index 9b2057d4c..4e16b1f8b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -336,7 +336,7 @@ def influxdb(base_influxdb): @pytest.fixture(scope="session") def argo_version(repo_dir): - with open(os.path.join(repo_dir, "Dockerfile-GordoDeploy")) as f: + with open(os.path.join(repo_dir, "Dockerfile")) as f: match = next(re.finditer(r'ARGO_VERSION="(\w\d+.\d+.\d+)"', f.read()), None) if match is None: raise LookupError( diff --git a/tests/gordo/builder/test_builder.py b/tests/gordo/builder/test_builder.py index 814b27705..2eddee329 100644 --- a/tests/gordo/builder/test_builder.py +++ b/tests/gordo/builder/test_builder.py @@ -278,7 +278,7 @@ def test_get_metadata_helper(model: BaseEstimator, expect_empty_dict: bool): @pytest.mark.parametrize( "raw_model_config", ( - f""" + """ gordo.machine.model.anomaly.diff.DiffBasedAnomalyDetector: scaler: sklearn.preprocessing.MinMaxScaler base_estimator: @@ -297,7 +297,7 @@ def test_get_metadata_helper(model: BaseEstimator, expect_empty_dict: bool): out_func: linear epochs: 1 """, - f""" + """ sklearn.compose.TransformedTargetRegressor: transformer: sklearn.preprocessing.MinMaxScaler regressor: @@ -313,7 +313,7 @@ def test_get_metadata_helper(model: BaseEstimator, expect_empty_dict: bool): out_func: linear epochs: 1 """, - f""" + """ sklearn.pipeline.Pipeline: steps: - sklearn.preprocessing.MinMaxScaler @@ -340,7 +340,7 @@ def test_scores_metadata(raw_model_config): def test_output_scores_metadata(): data_config = get_random_data() - raw_model_config = f""" + raw_model_config = """ gordo.machine.model.anomaly.diff.DiffBasedAnomalyDetector: scaler: sklearn.preprocessing.MinMaxScaler base_estimator: diff --git a/tests/gordo/cli/test_exception_reporter.py b/tests/gordo/cli/test_exception_reporter.py index fded043e5..48a6173a7 100644 --- a/tests/gordo/cli/test_exception_reporter.py +++ b/tests/gordo/cli/test_exception_reporter.py @@ -105,10 +105,7 @@ def test_with_message_report_level(reporter1): result = report_to_string( _Test1Exception("Test message"), reporter1, ReportLevel.MESSAGE ) - assert result == { - "type": "_Test1Exception", - "message": "Test message", - } + assert result == {"type": "_Test1Exception", "message": "Test message"} def test_with_traceback_report_level(reporter1): @@ -124,9 +121,7 @@ def test_with_type_report_level(reporter1): result = report_to_string( _Test1Exception("Test message"), reporter1, ReportLevel.TYPE ) - assert result == { - "type": "_Test1Exception", - } + assert result == {"type": "_Test1Exception"} def test_with_exit_code_report_level(reporter1): @@ -140,10 +135,7 @@ def test_with_unicode_chars(reporter1): result = report_to_string( _Test1Exception("\t你好 world!\n"), reporter1, ReportLevel.MESSAGE ) - assert result == { - "type": "_Test1Exception", - "message": "\t?? world!\n", - } + assert result == {"type": "_Test1Exception", "message": "\t?? world!\n"} def test_with_max_message_len(reporter1): @@ -153,24 +145,15 @@ def test_with_max_message_len(reporter1): ReportLevel.MESSAGE, max_message_len=8, ) - assert result == { - "type": "_Test1Exception", - "message": "Hello...", - } + assert result == {"type": "_Test1Exception", "message": "Hello..."} result = report_to_string( _Test1Exception("Hello world!"), reporter1, ReportLevel.MESSAGE, max_message_len=20, ) - assert result == { - "type": "_Test1Exception", - "message": "Hello world!", - } + assert result == {"type": "_Test1Exception", "message": "Hello world!"} result = report_to_string( _Test1Exception("Hello"), reporter1, ReportLevel.MESSAGE, max_message_len=4 ) - assert result == { - "type": "_Test1Exception", - "message": "", - } + assert result == {"type": "_Test1Exception", "message": ""} diff --git a/tests/gordo/client/test_client.py b/tests/gordo/client/test_client.py index e74a199c5..1ec2d6651 100644 --- a/tests/gordo/client/test_client.py +++ b/tests/gordo/client/test_client.py @@ -182,10 +182,8 @@ def test_client_predictions_diff_batch_sizes( ), f"Expected new values in 'predictions' measurement, but found {vals}" -def test_client_metadata_revision( - gordo_project, gordo_single_target, ml_server, -): - prediction_client = Client(project=gordo_project,) +def test_client_metadata_revision(gordo_project, gordo_single_target, ml_server): + prediction_client = Client(project=gordo_project) assert "revision" in prediction_client.get_available_machines() @@ -232,7 +230,7 @@ def test_client_cli_metadata(gordo_project, gordo_single_target, ml_server, tmpd # Simple metadata fetching with all targets out = runner.invoke( - cli.gordo, args=["client", "--project", gordo_project, "metadata",], + cli.gordo, args=["client", "--project", gordo_project, "metadata"] ) assert out.exit_code == 0 assert gordo_single_target in out.output diff --git a/tests/gordo/machine/model/anomaly/test_anomaly_detectors.py b/tests/gordo/machine/model/anomaly/test_anomaly_detectors.py index a4736b38c..56fc07e0b 100644 --- a/tests/gordo/machine/model/anomaly/test_anomaly_detectors.py +++ b/tests/gordo/machine/model/anomaly/test_anomaly_detectors.py @@ -51,7 +51,7 @@ def test_diff_detector(scaler, index, with_thresholds: bool, shuffle: bool): assert isinstance(model, AnomalyDetectorBase) assert model.get_params() == dict( - base_estimator=base_estimator, scaler=scaler, shuffle=shuffle, + base_estimator=base_estimator, scaler=scaler, shuffle=shuffle ) if with_thresholds: @@ -163,7 +163,7 @@ def test_diff_detector_with_window( if window is None: assert model.get_params() == dict( - base_estimator=base_estimator, scaler=scaler, shuffle=shuffle, + base_estimator=base_estimator, scaler=scaler, shuffle=shuffle ) elif window is not None and smoothing_method is None: @@ -773,11 +773,11 @@ def test_diff_detector_require_thresholds(mode: str, require_threshold: bool): base_estimator = MultiOutputRegressor(LinearRegression()) if mode == "tscv": model = DiffBasedAnomalyDetector( - base_estimator=base_estimator, require_thresholds=require_threshold, + base_estimator=base_estimator, require_thresholds=require_threshold ) elif mode == "kfcv": model = DiffBasedKFCVAnomalyDetector( - base_estimator=base_estimator, require_thresholds=require_threshold, + base_estimator=base_estimator, require_thresholds=require_threshold ) model.fit(X, y) diff --git a/tests/gordo/machine/model/test_transformers.py b/tests/gordo/machine/model/test_transformers.py index c5b534ec6..430708991 100644 --- a/tests/gordo/machine/model/test_transformers.py +++ b/tests/gordo/machine/model/test_transformers.py @@ -140,7 +140,7 @@ def test_infimputer_fill_values(): imputer = InfImputer(inf_fill_value=9999.0, neg_inf_fill_value=-9999.0) X = imputer.fit_transform(base_x) np.equal( - X.ravel()[[pos_inf_idxs]], np.array([9999.0, 9999.0, 9999.0, 9999.0, 9999.0]), + X.ravel()[[pos_inf_idxs]], np.array([9999.0, 9999.0, 9999.0, 9999.0, 9999.0]) ) np.equal( X.ravel()[[neg_inf_idxs]], diff --git a/tests/gordo/server/test_base_view.py b/tests/gordo/server/test_base_view.py index 954d206fa..b4ce2b8be 100644 --- a/tests/gordo/server/test_base_view.py +++ b/tests/gordo/server/test_base_view.py @@ -19,7 +19,7 @@ def test_empty_target_tag_list(): with app.app_context(): g.metadata = {"dataset": {"tag_list": [test_tag]}} view = BaseModelView() - assert view.target_tags == [test_tag] + assert view.target_tags == [test_tag] # test comment @pytest.mark.parametrize( diff --git a/tests/gordo/server/test_gordo_server.py b/tests/gordo/server/test_gordo_server.py index 16eed3ec5..3730e7387 100644 --- a/tests/gordo/server/test_gordo_server.py +++ b/tests/gordo/server/test_gordo_server.py @@ -27,7 +27,7 @@ def test_healthcheck_endpoint(base_route, gordo_ml_server_client): Test expected behavior of //healthcheck """ # Should also be at the very lowest level as well. - resp = gordo_ml_server_client.get(f"/healthcheck") + resp = gordo_ml_server_client.get("/healthcheck") assert resp.status_code == 200 resp = gordo_ml_server_client.get(f"{base_route}/healthcheck") @@ -102,7 +102,7 @@ def test_run_cmd(monkeypatch): def test_run_server_gthread(): with patch( - "gordo.server.server.run_cmd", MagicMock(return_value=None, autospec=True), + "gordo.server.server.run_cmd", MagicMock(return_value=None, autospec=True) ) as m: server.run_server( "127.0.0.1", @@ -139,7 +139,7 @@ def test_run_server_gthread(): def test_run_server_gevent(): with patch( - "gordo.server.server.run_cmd", MagicMock(return_value=None, autospec=True), + "gordo.server.server.run_cmd", MagicMock(return_value=None, autospec=True) ) as m: server.run_server( "127.0.0.1", @@ -290,7 +290,7 @@ def test_models_by_revision_list_view(caplog, tmpdir, revision_to_models): # revision_to_models is empty, so there is nothing on the server. # Test that asking for some arbitrary revision will give a 404 and error message resp = client.get( - f"/gordo/v0/test-project/models?revision=revision-does-not-exist" + "/gordo/v0/test-project/models?revision=revision-does-not-exist" ) assert resp.status_code == 410 assert resp.json == { diff --git a/tests/gordo/util/test_version.py b/tests/gordo/util/test_version.py index d36bc6676..c07b071a9 100644 --- a/tests/gordo/util/test_version.py +++ b/tests/gordo/util/test_version.py @@ -1,6 +1,13 @@ import pytest -from gordo.util.version import parse_version, GordoRelease, GordoSpecial, Special, GordoPR, GordoSHA +from gordo.util.version import ( + parse_version, + GordoRelease, + GordoSpecial, + Special, + GordoPR, + GordoSHA, +) def test_release(): @@ -21,12 +28,12 @@ def test_release(): @pytest.mark.parametrize( "gordo_version,expected", [ - ('1.2.3', GordoRelease(1, 2, 3)), - ('3.4.5dev2', GordoRelease(3, 4, 5, 'dev2')), - ('5.7', GordoRelease(5, 7)), - ('latest', GordoSpecial(Special.LATEST)), - ('pr-43', GordoPR(43)), - ('dke0832k', GordoSHA('dke0832k')) + ("1.2.3", GordoRelease(1, 2, 3)), + ("3.4.5dev2", GordoRelease(3, 4, 5, "dev2")), + ("5.7", GordoRelease(5, 7)), + ("latest", GordoSpecial(Special.LATEST)), + ("pr-43", GordoPR(43)), + ("dke0832k", GordoSHA("dke0832k")), ], ) def test_versions(gordo_version, expected): diff --git a/tests/gordo/workflow/test_normlized_config.py b/tests/gordo/workflow/test_normlized_config.py new file mode 100644 index 000000000..0e53a80ff --- /dev/null +++ b/tests/gordo/workflow/test_normlized_config.py @@ -0,0 +1,49 @@ +import pytest + +from gordo.workflow.config_elements.normalized_config import NormalizedConfig + + +def test_splited_docker_images(): + config = {"machines": [], "globals": {"runtime": {}}} + normalized_config = NormalizedConfig(config, "test", "1.0.0") + config_globals = normalized_config.globals + config_runtime = config_globals["runtime"] + assert config_runtime["deployer"]["image"] == "gordo-deploy" + assert config_runtime["server"]["image"] == "gordo-model-server" + assert config_runtime["prometheus_metrics_server"]["image"] == "gordo-model-server" + assert config_runtime["builder"]["image"] == "gordo-model-builder" + assert config_runtime["client"]["image"] == "gordo-client" + + +def test_unified_docker_images(): + config = {"machines": [], "globals": {"runtime": {}}} + normalized_config = NormalizedConfig(config, "test", "1.3.0") + config_globals = normalized_config.globals + config_runtime = config_globals["runtime"] + assert config_runtime["deployer"]["image"] == "gordo-base" + assert config_runtime["server"]["image"] == "gordo-base" + assert config_runtime["prometheus_metrics_server"]["image"] == "gordo-base" + assert config_runtime["builder"]["image"] == "gordo-base" + assert config_runtime["client"]["image"] == "gordo-base" + + +def test_custom_docker_images(): + config = { + "machines": [], + "globals": { + "runtime": { + "deployer": {"image": "my-deployer"}, + "server": {"image": "my-server"}, + "builder": {"image": "my-builder"}, + } + }, + } + normalized_config = NormalizedConfig(config, "test", "1.1.0") + config_globals = normalized_config.globals + print(config_globals) + config_runtime = config_globals["runtime"] + assert config_runtime["deployer"]["image"] == "my-deployer" + assert config_runtime["server"]["image"] == "my-server" + assert config_runtime["prometheus_metrics_server"]["image"] == "gordo-model-server" + assert config_runtime["builder"]["image"] == "my-builder" + assert config_runtime["client"]["image"] == "gordo-client" diff --git a/tests/gordo/workflow/test_workflow_generator/data/config-test-runtime-images.yaml b/tests/gordo/workflow/test_workflow_generator/data/config-test-runtime-images.yaml new file mode 100644 index 000000000..5471ee517 --- /dev/null +++ b/tests/gordo/workflow/test_workflow_generator/data/config-test-runtime-images.yaml @@ -0,0 +1,61 @@ +machines: + + - name: ct-23-0002 #Uses defaults of everything + dataset: + tags: + - CT/1 + - CT/2 + - CT/3 + train_start_date: 2016-11-07T09:11:30+01:00 + train_end_date: 2018-09-15T03:01:00+01:00 + + + - name: ct-23-0003 #Modifies server.resources.requests.memory + dataset: + tags: + - CT/1 + - CT/2 + - CT/3 + train_start_date: 2016-11-07T09:11:30+01:00 + train_end_date: 2018-09-15T03:01:00+01:00 + +globals: + runtime: #We request some different resources for the server, but not change limit + server: + image: "new-server-image" + resources: + requests: + memory: 111 + cpu: 112 + builder: # We want different builder settings + image: "new-builder-image" + resources: + requests: + memory: 121 + limits: + memory: 120 # This is illegal since it is smaller than request, and will be bumped to 121 + deployer: + image: "new-deploy-image" + client: + image: "new-client-image" + resources: + requests: + memory: 221 + limits: + memory: 220 # This is illegal since it is smaller than request, and will be bumped to 221 + max_instances: 10 + + influx: + resources: + requests: + memory: 321 + limits: + memory: 320 # This is illegal since it is smaller than request, and will be bumped to 321 + + + model: + sklearn.pipeline.Pipeline: + steps: + - sklearn.preprocessing.MinMaxScaler + - gordo.machine.model.models.KerasAutoEncoder: + kind: feedforward_hourglass diff --git a/tests/gordo/workflow/test_workflow_generator/test_workflow_generator.py b/tests/gordo/workflow/test_workflow_generator/test_workflow_generator.py index 55421cc59..17ab02f0e 100644 --- a/tests/gordo/workflow/test_workflow_generator/test_workflow_generator.py +++ b/tests/gordo/workflow/test_workflow_generator/test_workflow_generator.py @@ -244,6 +244,38 @@ def test_overrides_builder_datasource(path_to_config_files): )["dataset"]["data_provider"] +def test_runtime_image_override(path_to_config_files): + expanded_template = _generate_test_workflow_yaml( + path_to_config_files, "config-test-runtime-images.yaml" + ) + templates = expanded_template["spec"]["templates"] + model_builder_task = [ + task for task in templates if task["name"] == "model-builder" + ][0] + model_builder_image = model_builder_task["container"]["image"] + actual_model_builder_image = model_builder_image.split("/")[-1].split(":")[0] + assert actual_model_builder_image == "new-builder-image" + + client_task = [task for task in templates if task["name"] == "gordo-client"][0] + client_task_image = client_task["script"]["image"] + actual_client_task_image = client_task_image.split("/")[-1].split(":")[0] + assert actual_client_task_image == "new-client-image" + + server_task = [ + task for task in templates if task["name"] == "gordo-server-deployment" + ][0] + server_task_deployment = server_task["steps"][0][0]["arguments"]["parameters"][0][ + "value" + ] + server_task_yaml = yaml.load(server_task_deployment) + actual_server_task_image = ( + server_task_yaml["spec"]["template"]["spec"]["containers"][0]["image"] + .split("/")[-1] + .split(":")[0] + ) + assert actual_server_task_image == "new-server-image" + + def test_runtime_overrides_builder(path_to_config_files): expanded_template = _generate_test_workflow_yaml( path_to_config_files, "config-test-runtime-resource.yaml" diff --git a/tests/test_formatting.py b/tests/test_formatting.py deleted file mode 100644 index cf05bbddc..000000000 --- a/tests/test_formatting.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -import os -import sys - - -def test_formatting_black(): - project_path = os.path.join(os.path.dirname(__file__), "..") - gordo_path = os.path.join(project_path, "gordo") - tests_path = os.path.join(project_path, "tests") - cmd = [ - sys.executable, - "-m", - "black", - "--check", - "-v", - gordo_path, - tests_path, - "--exclude", - r".*_version.py", - ] - exit_code = os.system(" ".join(cmd)) - assert exit_code == 0