diff --git a/Dockerfile.data-model b/Dockerfile.data-model index 80c3fa5e..f67d8f14 100644 --- a/Dockerfile.data-model +++ b/Dockerfile.data-model @@ -2,7 +2,8 @@ FROM centos:7 MAINTAINER Saleem Ansari RUN yum install -y epel-release && \ - yum install -y python-pip python-devel gcc && \ + yum install -y python34-pip python34-devel gcc && \ + yum install -y git && \ yum clean all # -------------------------------------------------------------------------------------------------------------- @@ -21,15 +22,14 @@ RUN yum install -y epel-release && \ # Note: cron daemon ( crond ) will be invoked from within entry point # -------------------------------------------------------------------------------------------------------------- +RUN pip3 install git+https://git@github.com/fabric8-analytics/fabric8-analytics-version-comparator.git +RUN pip3 install git+https://github.com/fabric8-analytics/fabric8-analytics-utils.git -# install python packages -COPY ./requirements.txt / -RUN pip install -r requirements.txt && rm requirements.txt - +COPY ./ /tmp/f8a_data_model/ COPY ./src /src +RUN cd /tmp/f8a_data_model && pip3 install . ADD scripts/entrypoint.sh /bin/entrypoint.sh ADD populate_schema.py /populate_schema.py ENTRYPOINT ["/bin/entrypoint.sh"] - diff --git a/Dockerfile.data-model.rhel b/Dockerfile.data-model.rhel index 39149ef2..7175b6ca 100644 --- a/Dockerfile.data-model.rhel +++ b/Dockerfile.data-model.rhel @@ -18,12 +18,12 @@ LABEL author "Devtools " # Note: cron daemon ( crond ) will be invoked from within entry point # -------------------------------------------------------------------------------------------------------------- +RUN pip3 install git+https://git@github.com/fabric8-analytics/fabric8-analytics-version-comparator.git +RUN pip3 install git+https://github.com/fabric8-analytics/fabric8-analytics-utils.git@latest -# install python packages -COPY ./requirements.txt / -RUN pip install -r requirements.txt && rm requirements.txt - +COPY ./ /tmp/f8a_data_model/ COPY ./src /src +RUN cd /tmp/f8a_data_model && pip3 install . ADD scripts/entrypoint.sh /bin/entrypoint.sh ADD populate_schema.py /populate_schema.py diff --git a/cico_setup.sh b/cico_setup.sh index bd70542d..d1e0604a 100755 --- a/cico_setup.sh +++ b/cico_setup.sh @@ -31,8 +31,8 @@ docker_login() { prep() { yum -y update yum -y install docker git which epel-release python-virtualenv postgresql - yum -y install python-pip - pip install docker-compose + yum -y install python34-pip python34-devel + pip3 install docker-compose systemctl start docker } diff --git a/populate_schema.py b/populate_schema.py index f88efbe1..b4a8ca4e 100755 --- a/populate_schema.py +++ b/populate_schema.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Populate graph schema.""" import logging diff --git a/requirements.txt b/requirements.txt index 321f208b..4f2d2f14 100644 --- a/requirements.txt +++ b/requirements.txt @@ -25,7 +25,6 @@ flake8==3.6.0 # via flake8-polyfill flask-cors==3.0.7 flask==1.0.2 funcsigs==1.0.2 # via mock, pytest -futures==3.2.0 # via s3transfer gevent==1.4.0 greenlet==0.4.15 # via gevent gunicorn==19.9.0 @@ -61,3 +60,5 @@ sqlalchemy==1.2.15 urllib3==1.24.1 # via botocore, minio, requests uuid==1.30 werkzeug==0.14.1 # via flask, pytest-flask +git+https://git@github.com/fabric8-analytics/fabric8-analytics-version-comparator.git@2e7eddc +git+https://github.com/fabric8-analytics/fabric8-analytics-utils.git@d7aaccf diff --git a/runtests.sh b/runtests.sh index 7f54a3c7..0bbb0bf6 100755 --- a/runtests.sh +++ b/runtests.sh @@ -44,7 +44,7 @@ function start_services { function setup_virtualenv { echo "Create Virtualenv for Python deps ..." - virtualenv --python /usr/bin/python2.7 env-test + virtualenv -p python3 venv && source venv/bin/activate if [ $? -ne 0 ] then @@ -53,21 +53,19 @@ function setup_virtualenv { fi printf "%sPython virtual environment initialized%s\n" "${YELLOW}" "${NORMAL}" - source env-test/bin/activate - pip install -U pip - pip install -r requirements.txt + pip3 install -r requirements.txt # Install profiling module - pip install pytest-profiling + pip3 install pytest-profiling # Install pytest-coverage module - pip install pytest-cov + pip3 install pytest-cov } function destroy_virtualenv { echo "Remove Virtualenv ..." - rm -rf env-test/ + rm -rf venv/ } echo JAVA_OPTIONS value: "$JAVA_OPTIONS" @@ -76,9 +74,9 @@ start_services setup_virtualenv -source env-test/bin/activate +source venv/bin/activate -PYTHONPATH=$(pwd)/src +PYTHONPATH=$(pwd) export PYTHONPATH export BAYESIAN_PGBOUNCER_SERVICE_HOST="localhost" @@ -99,9 +97,9 @@ echo "*** Unit tests ***" echo "*****************************************" echo "Check for sanity of the connections..." -if python sanitycheck.py +if python3 sanitycheck.py then - python populate_schema.py + python3 populate_schema.py py.test --cov=src/ --cov-report term-missing --cov-fail-under=$COVERAGE_THRESHOLD -vv -s test/ codecov --token=3c1d9638-afb6-40e6-85eb-3fb193000d4b else diff --git a/sanitycheck.py b/sanitycheck.py index 8d612e37..23f05e4a 100644 --- a/sanitycheck.py +++ b/sanitycheck.py @@ -1,10 +1,10 @@ """Sanity check of the graph DB REST API.""" -from graph_manager import BayesianGraph +from src.graph_manager import BayesianGraph import time import sys import logging -import config +from src import config logging.basicConfig() logger = logging.getLogger(config.APP_NAME) diff --git a/scripts/data_importer_crontab b/scripts/data_importer_crontab index 711a1bf9..795e34af 100644 --- a/scripts/data_importer_crontab +++ b/scripts/data_importer_crontab @@ -1,2 +1,2 @@ -# 0 */6 * * * root . /root/project_env.sh; export PYTHONPATH=/src; python /src/data_importer.py -s S3 +# 0 */6 * * * root . /root/project_env.sh; export PYTHONPATH=/src; python3 /src/data_importer.py -s S3 # An empty line is required at the end of this file for a valid cron file. diff --git a/scripts/entrypoint.sh b/scripts/entrypoint.sh index e41fb9ff..c248f20b 100755 --- a/scripts/entrypoint.sh +++ b/scripts/entrypoint.sh @@ -8,7 +8,7 @@ do done if [ ! -z "$SKIP_SCHEMA" ]; then - python populate_schema.py + python3 populate_schema.py fi # Start data model service with time out diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..eebaa74b --- /dev/null +++ b/setup.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +# Copyright © 2019 Red Hat Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Author: Yusuf Zainee +# + +"""Project setup file for fabric8 analytics notifications project.""" + +from setuptools import setup, find_packages + + +def get_requirements(): + """Parse all packages mentioned in the 'requirements.txt' file.""" + with open('requirements.txt') as fd: + lines = fd.read().splitlines() + reqs, dep_links = [], [] + for line in lines: + if line.startswith('git+'): + dep_links.append(line) + else: + reqs.append(line) + return reqs, dep_links + + +# pip doesn't install from dependency links by default, +# so one should install dependencies by +# `pip install -r requirements.txt`, not by `pip install .` +# See https://github.com/pypa/pip/issues/2023 +reqs, dep_links = get_requirements() +setup( + name='fabric8-analytics-data-model', + version='0.1', + scripts=[ + ], + packages=find_packages(exclude=['tests', 'tests.*']), + install_requires=reqs, + dependency_links=dep_links, + include_package_data=True, + author='Yusuf Zainee', + author_email='yzainee@redhat.com', + description='data importer for fabric8 analytics', + license='ASL 2.0', + keywords='fabric8-analytics-data-model', + url=('https://github.com/fabric8-analytics/' + 'fabric8-analytics-data-model') +) diff --git a/src/cve.py b/src/cve.py index 2f68cda3..4c75a736 100644 --- a/src/cve.py +++ b/src/cve.py @@ -1,9 +1,9 @@ """This module encapsulates CVE related queries.""" import logging -from graph_populator import GraphPopulator -from graph_manager import BayesianGraph -from utils import get_timestamp, call_gremlin +from src.graph_populator import GraphPopulator +from src.graph_manager import BayesianGraph +from src.utils import get_timestamp, call_gremlin logger = logging.getLogger(__name__) diff --git a/src/data_importer.py b/src/data_importer.py index 8c52532c..343bdea6 100644 --- a/src/data_importer.py +++ b/src/data_importer.py @@ -1,16 +1,16 @@ """Module with functions to fetch data from the S3 data source.""" -from graph_populator import GraphPopulator +from src.graph_populator import GraphPopulator import logging -import config -import traceback +from src import config import json import requests -from data_source.s3_data_source import S3DataSource +from src.data_source.s3_data_source import S3DataSource from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from sqlalchemy.orm.exc import NoResultFound +from f8a_utils.versions import get_latest_versions_for_ep logger = logging.getLogger(config.APP_NAME) @@ -51,8 +51,8 @@ def _other_key_info(data_source, other_keys, bucket_name=None): def _get_exception_msg(prefix, e): msg = prefix + ": " + str(e) - logger.error(msg) - tb = traceback.format_exc() + # logger.error(msg) + tb = logging.exception(msg) logger.error("Traceback for latest failure in import call: %s" % tb) return msg @@ -80,11 +80,20 @@ def _import_keys_from_s3_http(data_source, epv_list): 'version': pkg_version, 'source_repo': pkg_source} + latest_version = get_latest_versions_for_ep(pkg_ecosystem, pkg_name) + latest_epv_list = [{ + 'ecosystem': pkg_ecosystem, + 'name': pkg_name, + 'version': latest_version + }] + create_graph_nodes(latest_epv_list) + try: # Check other Version level information and add it to common object if len(contents.get('ver_list_keys')) > 0: first_key = contents['ver_key_prefix'] + '.json' first_obj = _first_key_info(data_source, first_key, config.AWS_EPV_BUCKET) + first_obj['latest_version'] = latest_version obj.update(first_obj) ver_obj = _other_key_info(data_source, contents.get('ver_list_keys'), config.AWS_EPV_BUCKET) diff --git a/src/data_source/s3_data_source.py b/src/data_source/s3_data_source.py index e5cdc843..0414b5ed 100644 --- a/src/data_source/s3_data_source.py +++ b/src/data_source/s3_data_source.py @@ -1,10 +1,10 @@ """Data source that returns data read from the AWS S3 database.""" -from data_source.abstract_data_source import AbstractDataSource +from src.data_source.abstract_data_source import AbstractDataSource import botocore import boto3 import json -import config +from src import config class S3DataSource(AbstractDataSource): diff --git a/src/graph_manager.py b/src/graph_manager.py index 21ae1a94..5b094029 100644 --- a/src/graph_manager.py +++ b/src/graph_manager.py @@ -1,6 +1,6 @@ """Template for a singleton object which will have reference to Graph object.""" -import config +from src import config import json import requests import os diff --git a/src/graph_populator.py b/src/graph_populator.py index e44f2bbe..0488c820 100644 --- a/src/graph_populator.py +++ b/src/graph_populator.py @@ -5,9 +5,10 @@ import time from dateutil.parser import parse as parse_datetime from six import string_types -import config -from utils import get_current_version +from src import config +from src.utils import get_current_version from datetime import datetime +from f8a_utils.versions import get_latest_versions_for_ep logger = logging.getLogger(config.APP_NAME) @@ -22,6 +23,9 @@ def construct_graph_nodes(cls, epv): pkg_name = epv.get('name') version = epv.get('version') source_repo = epv.get('source_repo', '') + latest_version = epv.get('latest_version', '') + if not latest_version: + latest_version = get_latest_versions_for_ep(ecosystem, pkg_name) if ecosystem and pkg_name and version: # Query to Create Package Node # TODO: refactor into the separate module @@ -33,8 +37,9 @@ def construct_graph_nodes(cls, epv): "property('{ecosystem}_pkg_count',1)).iterate();" \ "graph.addVertex('ecosystem', '{ecosystem}', " \ "'name', '{pkg_name}', 'vertex_label', 'Package');}};" \ + "pkg.property('latest_version', '{latest_version}');" \ "pkg.property('last_updated', {last_updated});".format( - ecosystem=ecosystem, pkg_name=pkg_name, + ecosystem=ecosystem, latest_version=latest_version, pkg_name=pkg_name, last_updated=str(time.time()) ) @@ -487,6 +492,7 @@ def create_query_string(cls, input_json): str_gremlin += str_gremlin_version if not prp_package: # TODO: refactor into the separate module + latest_version = get_latest_versions_for_ep(ecosystem, pkg_name) str_gremlin += "pkg = g.V().has('ecosystem','{ecosystem}')." \ "has('name', '{pkg_name}').tryNext().orElseGet{{" \ "g.V().has('vertex_label','Count').choose(has('" \ @@ -496,9 +502,10 @@ def create_query_string(cls, input_json): "'{ecosystem}_pkg_count',1)).iterate();graph.addVertex(" \ "'ecosystem', '{ecosystem}', 'name', '{pkg_name}', " \ "'vertex_label', 'Package');}};" \ + "pkg.property('latest_version', '{latest_version}');" \ "pkg.property('last_updated', {last_updated});".format( - ecosystem=ecosystem, pkg_name=pkg_name, - last_updated=str(time.time()) + ecosystem=ecosystem, latest_version=latest_version, + pkg_name=pkg_name, last_updated=str(time.time()) ) # TODO: refactor into the separate module str_gremlin += "edge_c = g.V().has('pecosystem','{ecosystem}').has('pname'," \ diff --git a/src/rest_api.py b/src/rest_api.py index b57e1910..c1f29c3c 100644 --- a/src/rest_api.py +++ b/src/rest_api.py @@ -6,12 +6,12 @@ from flask_cors import CORS import json import sys -import data_importer -from graph_manager import BayesianGraph -from graph_populator import GraphPopulator -from cve import CVEPut, CVEDelete, CVEGet, CVEDBVersion +from src import data_importer +from src.graph_manager import BayesianGraph +from src.graph_populator import GraphPopulator +from src.cve import CVEPut, CVEDelete, CVEGet, CVEDBVersion from raven.contrib.flask import Sentry -import config +from src import config as config from werkzeug.contrib.fixers import ProxyFix import logging from flask import Blueprint, current_app @@ -313,7 +313,7 @@ def cvedb_version_put(): def create_app(): """Create Flask app object.""" new_app = Flask(config.APP_NAME) - new_app.config.from_object('config') + new_app.config.from_object('src.config') CORS(new_app) new_app.register_blueprint(api_v1) return new_app diff --git a/src/utils.py b/src/utils.py index 5e038c13..87baf57c 100644 --- a/src/utils.py +++ b/src/utils.py @@ -5,7 +5,7 @@ import requests import json import logging -import config +from src import config from datetime import datetime logger = logging.getLogger(__name__) diff --git a/test/conftest.py b/test/conftest.py index 217393ec..74c18400 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -2,7 +2,7 @@ import pytest -from rest_api import create_app +from src.rest_api import create_app @pytest.fixture(scope='session') diff --git a/test/test_cve.py b/test/test_cve.py index 4a27ffcd..ca9cbd3b 100644 --- a/test/test_cve.py +++ b/test/test_cve.py @@ -3,7 +3,7 @@ import pytest from mock import patch -from cve import ( +from src.cve import ( CVEPut, CVEDelete, CVEGet, cve_node_replace_script_template, cve_node_delete_script_template @@ -123,7 +123,7 @@ def test_cve_delete_prepare_payload(): assert bindings['cve_id'] -@patch("cve.call_gremlin") +@patch("src.cve.call_gremlin") def test_cve_get_e(mocker): """Test getting CVEs for (ecosystem).""" mocker.return_value = {'result': {'data': ['CVE-2018-0001']}} @@ -139,7 +139,7 @@ def test_cve_get_e(mocker): assert response['cve_ids'][0] == 'CVE-2018-0001' -@patch("cve.call_gremlin") +@patch("src.cve.call_gremlin") def test_cve_get_ep(mocker): """Test getting CVEs for (ecosystem,name).""" mocker.return_value = {'result': {'data': ['CVE-2018-0001', 'CVE-2018-0002']}} @@ -156,7 +156,7 @@ def test_cve_get_ep(mocker): assert response['cve_ids'][1] in ('CVE-2018-0001', 'CVE-2018-0002') -@patch("cve.call_gremlin") +@patch("src.cve.call_gremlin") def test_cve_get_epv(mocker): """Test getting CVEs for (ecosystem,name,version).""" mocker.return_value = {'result': {'data': []}} diff --git a/test/test_data_importer.py b/test/test_data_importer.py index 05e2a1ec..2d4e83bb 100644 --- a/test/test_data_importer.py +++ b/test/test_data_importer.py @@ -1,6 +1,6 @@ """Tests for the data_importer module (to be done).""" -import data_importer +from src import data_importer def test_parse_int_or_none_for_integer_input(): diff --git a/test/test_graph_manager.py b/test/test_graph_manager.py index f8ed3f0e..75c9e453 100644 --- a/test/test_graph_manager.py +++ b/test/test_graph_manager.py @@ -1,8 +1,8 @@ """Tests for the graph_manager module (to be done).""" -from graph_manager import BayesianGraph as g +from src.graph_manager import BayesianGraph as g import logging -import config +from src import config logger = logging.getLogger(config.APP_NAME) diff --git a/test/test_graph_populator.py b/test/test_graph_populator.py index ff6ee82e..dcb2b83e 100644 --- a/test/test_graph_populator.py +++ b/test/test_graph_populator.py @@ -1,9 +1,9 @@ """Tests for the graph_populator module.""" -from graph_populator import GraphPopulator +from src.graph_populator import GraphPopulator import pytest import logging -import config +from src import config logger = logging.getLogger(config.APP_NAME) diff --git a/test/test_init.py b/test/test_init.py index d4ea916e..ce7db89f 100644 --- a/test/test_init.py +++ b/test/test_init.py @@ -1,11 +1,11 @@ """Tests for the __init__ script.""" -import __init__ +from src import logger def test_logger(): """Test the logger initialized in __init__.""" - assert __init__.logger is not None + assert logger is not None if __name__ == '__main__': diff --git a/test/test_insertion_from_minio.py b/test/test_insertion_from_minio.py index 99c6d7bb..80f508b5 100644 --- a/test/test_insertion_from_minio.py +++ b/test/test_insertion_from_minio.py @@ -5,12 +5,12 @@ """ import logging -import config +from src import config import traceback from minio import Minio from minio.error import ResponseError, BucketAlreadyOwnedByYou, BucketAlreadyExists -from data_importer import import_epv_from_s3_http -from graph_manager import BayesianGraph +from src.data_importer import import_epv_from_s3_http +from src.graph_manager import BayesianGraph logging.basicConfig() logger = logging.getLogger(__name__) diff --git a/test/test_postgres_handler.py b/test/test_postgres_handler.py index ee666723..c1a42506 100644 --- a/test/test_postgres_handler.py +++ b/test/test_postgres_handler.py @@ -5,7 +5,7 @@ """ import logging -from data_importer import PostgresHandler +from src.data_importer import PostgresHandler logging.basicConfig() logger = logging.getLogger(__name__) diff --git a/test/test_rest_api.py b/test/test_rest_api.py index 078e8c4c..2ea341f2 100644 --- a/test/test_rest_api.py +++ b/test/test_rest_api.py @@ -1,7 +1,7 @@ """Tests for the rest_api module.""" import logging -import config +from src import config import json from flask import url_for from mock import patch @@ -37,7 +37,7 @@ def test_pending(client): logger.info(response) # we expect that the HTTP code will be 200/OK assert response.status_code == 200 - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) assert 'pending_list' in data assert 'all_counts' in data @@ -50,7 +50,7 @@ def test_sync_all(client): logger.info(response) # we expect that the HTTP code will be 200/OK assert response.status_code == 200 - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) assert 'count_imported_EPVs' in data assert 'epv' in data @@ -71,7 +71,7 @@ def test_ingest_to_graph(client): data = response.get_data() logger.info("Returned data") logger.info(data) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) assert 'count_imported_EPVs' in data assert 'epv' in data @@ -95,7 +95,7 @@ def test_ingest_to_graph_source(client): headers={'Content-Type': 'application/json'}) # we expect that the HTTP code will be 200/OK assert response.status_code == 200 - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) assert 'count_imported_EPVs' in data assert 'epv' in data assert 'message' in data @@ -117,12 +117,12 @@ def test_ingest_to_graph_valid(client): headers={'Content-Type': 'application/json'}) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) epv_keys = input_data[0].keys() assert data['message'] == 'Invalid keys found in input: ' + ','.join(epv_keys) -@patch("rest_api.data_importer.import_epv_from_s3_http") +@patch("src.rest_api.data_importer.import_epv_from_s3_http") def test_ingest_to_graph_report(mocker, client): """Add test for ingest to graph API when report status is Failure.""" input_data = [ @@ -148,7 +148,7 @@ def test_selective_ingest_empty(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 @@ -158,7 +158,7 @@ def test_selective_ingest_empty(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 @@ -168,7 +168,7 @@ def test_selective_ingest_empty(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 @@ -184,7 +184,7 @@ def test_selective_ingest_nonempty(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 @@ -204,7 +204,7 @@ def test_selective_ingest_valid(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 200/OK assert response.status_code == 200 @@ -226,7 +226,7 @@ def test_selective_ingest_valid_source(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 200/OK assert response.status_code == 200 @@ -235,7 +235,7 @@ def test_selective_ingest_valid_source(client): def test_handle_properties_put(client, mocker): """Test PUT on /api/v1//

//properties.""" - gremlin_mock = mocker.patch('rest_api.BayesianGraph.execute') + gremlin_mock = mocker.patch('src.rest_api.BayesianGraph.execute') gremlin_mock.return_value = (True, {}) url = url_for('api_v1.handle_properties', ecosystem='maven', package='net.iharder:base64', version='2.3.9') @@ -262,7 +262,7 @@ def test_handle_properties_put(client, mocker): def test_handle_properties_delete(client, mocker): """Test DELETE on /api/v1//

//properties.""" - gremlin_mock = mocker.patch('rest_api.BayesianGraph.execute') + gremlin_mock = mocker.patch('src.rest_api.BayesianGraph.execute') gremlin_mock.return_value = (True, {}) url = url_for('api_v1.handle_properties', ecosystem='maven', package='net.iharder:base64', version='2.3.9') @@ -293,7 +293,7 @@ def test_create_blank_nodes_invalid(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 @@ -307,7 +307,7 @@ def test_create_blank_nodes_empty(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 400/Bad Request assert response.status_code == 400 @@ -327,14 +327,14 @@ def test_create_blank_nodes_valid(client): response = client.post(url, data=json.dumps(input_data), headers={'Content-Type': 'application/json'}) - data = json.loads(response.get_data()) + data = json.loads(response.get_data().decode('utf-8')) logger.info(data) # we expect that the HTTP code will be 200/OK assert response.status_code == 200 assert data['epv_nodes_created'] == 1 -@patch("rest_api.data_importer.create_graph_nodes") +@patch("src.rest_api.data_importer.create_graph_nodes") def test_create_blank_nodes_report_status(mocker, client): """Add test to create blank nodes API when report status is Failure.""" input_data = [ @@ -353,7 +353,7 @@ def test_create_blank_nodes_report_status(mocker, client): assert response.status_code == 500 -@patch("rest_api.CVEPut.process") +@patch("src.rest_api.CVEPut.process") def test_cves_put(mocker, client): """Test PUT /api/v1/cves.""" mocker.return_value = {} @@ -368,7 +368,7 @@ def test_cves_put(mocker, client): assert response.json == {} -@patch("rest_api.CVEPut.process") +@patch("src.rest_api.CVEPut.process") def test_cves_put_invalid_input(mocker, client): """Test PUT /api/v1/cves with invalid input.""" mocker.return_value = {} @@ -383,7 +383,7 @@ def test_cves_put_invalid_input(mocker, client): assert 'error' in response.json -@patch("rest_api.CVEDelete.process") +@patch("src.rest_api.CVEDelete.process") def test_cves_delete(mocker, client): """Test DELETE /api/v1/cves.""" mocker.return_value = {} @@ -398,7 +398,7 @@ def test_cves_delete(mocker, client): assert response.json == {} -@patch("rest_api.CVEDelete.process") +@patch("src.rest_api.CVEDelete.process") def test_cves_delete_invalid_input(mocker, client): """Test DELETE /api/v1/cves with invalid input.""" mocker.return_value = {} @@ -413,7 +413,7 @@ def test_cves_delete_invalid_input(mocker, client): assert 'error' in response.json -@patch("rest_api.CVEGet.get") +@patch("src.rest_api.CVEGet.get") def test_cves_get_e(mocker, client): """Test GET /api/v1/cves/.""" mocker.return_value = {'count': 1, 'cve_ids': ['CVE-2018-0001']} @@ -425,7 +425,7 @@ def test_cves_get_e(mocker, client): assert response.status_code == 200 -@patch("rest_api.CVEGet.get") +@patch("src.rest_api.CVEGet.get") def test_cves_get_ep(mocker, client): """Test GET /api/v1/cves//.""" mocker.return_value = {'count': 1, 'cve_ids': ['CVE-2018-0001']} @@ -437,7 +437,7 @@ def test_cves_get_ep(mocker, client): assert response.status_code == 200 -@patch("cve.call_gremlin") +@patch("src.cve.call_gremlin") def test_cvedb_version_get(mocker, client): """Test GET /api/v1/cvedb-version.""" mocker.return_value = {'result': {'data': ['9f4d54dd1a21584a40596c05d60ab00974953047']}} @@ -453,7 +453,7 @@ def test_cvedb_version_get(mocker, client): assert resp['version'] == '9f4d54dd1a21584a40596c05d60ab00974953047' -@patch("cve.call_gremlin") +@patch("src.cve.call_gremlin") def test_cvedb_version_put(mocker, client): """Test PUT /api/v1/cvedb-version.""" mocker.return_value = {'result': {'data': ['9f4d54dd1a21584a40596c05d60ab00974953047']}} diff --git a/test/test_s3_data_source.py b/test/test_s3_data_source.py index b1e8d8e2..314b5c25 100644 --- a/test/test_s3_data_source.py +++ b/test/test_s3_data_source.py @@ -2,8 +2,8 @@ # TODO: to be implemented -from data_source.s3_data_source import S3DataSource -import config +from src.data_source.s3_data_source import S3DataSource +from src import config import pytest diff --git a/test/test_utils.py b/test/test_utils.py index 4bfbc68c..120e768d 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -2,9 +2,9 @@ import pytest import datetime -from utils import get_current_version, execute_gremlin_dsl, get_timestamp, call_gremlin +from src.utils import get_current_version, execute_gremlin_dsl, get_timestamp, call_gremlin import logging -import config +from src import config from mock import patch from conftest import RequestsMockResponse @@ -18,7 +18,7 @@ def test_get_current_version(): assert out2 == -1 -@patch("utils.get_session_retry") +@patch("src.utils.get_session_retry") def test_execute_gremlin_dsl(mocker): """Test the function get_version_information.""" mocker.return_value = "" @@ -60,7 +60,7 @@ def status_code(self): return '404' -@patch("utils.get_session_retry") +@patch("src.utils.get_session_retry") def test_execute_gremlin_dsl2(mocker): """Test the function get_version_information.""" mocker.return_value = MockedSession() @@ -84,14 +84,14 @@ def test_get_timestamp(): assert result == timestamp -@patch("utils.requests.post") +@patch("src.utils.requests.post") def test_gremlin_call(mocker): """Test utils.call_gremlin().""" mocker.return_value = RequestsMockResponse({}, 200) assert call_gremlin({'dummy': 'payload'}) == {} -@patch("utils.requests.post") +@patch("src.utils.requests.post") def test_bad_gremlin_call(mocker): """Test utils.call_gremlin().""" mocker.return_value = RequestsMockResponse({}, 500)