diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 42bd98ee..4ec31992 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,4 +11,10 @@ repos: - repo: https://github.com/pre-commit/mirrors-mypy.git rev: v0.790 hooks: - - id: mypy + - id: mypy +- repo: https://github.com/pycqa/isort + rev: 5.7.0 + hooks: + - id: isort + args: ["--profile", "black"] + diff --git a/forwarder/application_logger.py b/forwarder/application_logger.py index 9d34a192..b398d19d 100644 --- a/forwarder/application_logger.py +++ b/forwarder/application_logger.py @@ -1,7 +1,8 @@ import logging -import graypy from typing import Optional +import graypy + logger_name = "python-forwarder" diff --git a/forwarder/configuration_store.py b/forwarder/configuration_store.py index e251ea53..4abdb7f6 100644 --- a/forwarder/configuration_store.py +++ b/forwarder/configuration_store.py @@ -1,15 +1,17 @@ import time from typing import Dict from unittest import mock + from confluent_kafka import TopicPartition -from streaming_data_types.forwarder_config_update_rf5k import ( - serialise_rf5k, - StreamInfo, - Protocol, -) from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) +from streaming_data_types.forwarder_config_update_rf5k import ( + Protocol, + StreamInfo, + serialise_rf5k, +) + from forwarder.parse_config_update import EpicsProtocol diff --git a/forwarder/epics_to_serialisable_types.py b/forwarder/epics_to_serialisable_types.py index f0f1fc1d..757f08dd 100644 --- a/forwarder/epics_to_serialisable_types.py +++ b/forwarder/epics_to_serialisable_types.py @@ -1,7 +1,7 @@ import numpy as np from caproto import ChannelType -from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity +from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus # caproto can give us values of different dtypes even from the same EPICS channel, # for example it will use the smallest integer type it can for the particular value, diff --git a/forwarder/handle_config_change.py b/forwarder/handle_config_change.py index 8bbeaabb..6bf06200 100644 --- a/forwarder/handle_config_change.py +++ b/forwarder/handle_config_change.py @@ -1,14 +1,18 @@ -from forwarder.update_handlers.create_update_handler import create_update_handler -from forwarder.parse_config_update import CommandType, Channel, ConfigUpdate -from typing import Optional, Dict +import fnmatch from logging import Logger -from forwarder.status_reporter import StatusReporter -from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore +from typing import Dict, Optional + from caproto.threading.client import Context as CaContext from p4p.client.thread import Context as PvaContext + +from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore from forwarder.kafka.kafka_producer import KafkaProducer -from forwarder.update_handlers.create_update_handler import UpdateHandler -import fnmatch +from forwarder.parse_config_update import Channel, CommandType, ConfigUpdate +from forwarder.status_reporter import StatusReporter +from forwarder.update_handlers.create_update_handler import ( + UpdateHandler, + create_update_handler, +) def _subscribe_to_pv( diff --git a/forwarder/kafka/kafka_helpers.py b/forwarder/kafka/kafka_helpers.py index e39d7721..0ff09ba3 100644 --- a/forwarder/kafka/kafka_helpers.py +++ b/forwarder/kafka/kafka_helpers.py @@ -1,18 +1,21 @@ -from confluent_kafka import Consumer, Producer -from .kafka_producer import KafkaProducer -from streaming_data_types.logdata_f142 import serialise_f142 -from streaming_data_types.timestamps_tdct import serialise_tdct -from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus -from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity import uuid +from typing import Dict, Optional, Tuple, Union + import numpy as np -from typing import Optional, Tuple, Dict, Union +from confluent_kafka import Consumer, Producer +from streaming_data_types.epics_connection_info_ep00 import serialise_ep00 from streaming_data_types.fbschemas.epics_connection_info_ep00.EventType import ( EventType as ConnectionStatusEventType, ) -from streaming_data_types.epics_connection_info_ep00 import serialise_ep00 +from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity +from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus +from streaming_data_types.logdata_f142 import serialise_f142 +from streaming_data_types.timestamps_tdct import serialise_tdct + from forwarder.utils import Counter +from .kafka_producer import KafkaProducer + def create_producer( broker_address: str, diff --git a/forwarder/kafka/kafka_producer.py b/forwarder/kafka/kafka_producer.py index d0fa3440..ab96327b 100644 --- a/forwarder/kafka/kafka_producer.py +++ b/forwarder/kafka/kafka_producer.py @@ -1,7 +1,9 @@ -import confluent_kafka from threading import Thread -from forwarder.application_logger import setup_logger from typing import Optional + +import confluent_kafka + +from forwarder.application_logger import setup_logger from forwarder.utils import Counter diff --git a/forwarder/parse_commandline_args.py b/forwarder/parse_commandline_args.py index 725b10a1..c14626f0 100644 --- a/forwarder/parse_commandline_args.py +++ b/forwarder/parse_commandline_args.py @@ -1,9 +1,10 @@ +import configparser import logging -import configargparse from os import getpid -from socket import gethostname -import configparser from pathlib import Path +from socket import gethostname + +import configargparse class VersionArgParser(configargparse.ArgumentParser): diff --git a/forwarder/parse_config_update.py b/forwarder/parse_config_update.py index 1f21699b..53f3b54c 100644 --- a/forwarder/parse_config_update.py +++ b/forwarder/parse_config_update.py @@ -1,20 +1,22 @@ -from forwarder.application_logger import get_logger -import attr from enum import Enum -from typing import Tuple, Generator, Optional, List +from typing import Generator, List, Optional, Tuple + +import attr +from flatbuffers.packer import struct as flatbuffer_struct from streaming_data_types.exceptions import WrongSchemaException -from streaming_data_types.forwarder_config_update_rf5k import ( - deserialise_rf5k, - StreamInfo, +from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( + Protocol, ) from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) -from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( - Protocol, +from streaming_data_types.forwarder_config_update_rf5k import ( + StreamInfo, + deserialise_rf5k, ) + +from forwarder.application_logger import get_logger from forwarder.update_handlers.schema_publishers import schema_publishers -from flatbuffers.packer import struct as flatbuffer_struct logger = get_logger() diff --git a/forwarder/repeat_timer.py b/forwarder/repeat_timer.py index 49e78e99..37251b4c 100644 --- a/forwarder/repeat_timer.py +++ b/forwarder/repeat_timer.py @@ -1,5 +1,5 @@ -from threading import Timer, Lock from datetime import datetime, timedelta +from threading import Lock, Timer def milliseconds_to_seconds(time_ms: int) -> float: diff --git a/forwarder/status_reporter.py b/forwarder/status_reporter.py index d89ea56f..4703ce88 100644 --- a/forwarder/status_reporter.py +++ b/forwarder/status_reporter.py @@ -1,14 +1,16 @@ -from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds -from forwarder.kafka.kafka_producer import KafkaProducer -from typing import Dict -from streaming_data_types.status_x5f2 import serialise_x5f2 import json import time -from socket import gethostname -from os import getpid from logging import Logger -from forwarder.update_handlers.create_update_handler import UpdateHandler +from os import getpid +from socket import gethostname +from typing import Dict + +from streaming_data_types.status_x5f2 import serialise_x5f2 + +from forwarder.kafka.kafka_producer import KafkaProducer from forwarder.parse_config_update import Channel +from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds +from forwarder.update_handlers.create_update_handler import UpdateHandler class StatusReporter: diff --git a/forwarder/update_handlers/ca_update_handler.py b/forwarder/update_handlers/ca_update_handler.py index ca512a8f..b5410266 100644 --- a/forwarder/update_handlers/ca_update_handler.py +++ b/forwarder/update_handlers/ca_update_handler.py @@ -1,23 +1,25 @@ -from forwarder.application_logger import get_logger -from forwarder.kafka.kafka_producer import KafkaProducer -from caproto import ReadNotifyResponse, ChannelType -import numpy as np +import time from threading import Lock -from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds +from typing import Any, Optional, Tuple + +import numpy as np +from caproto import ChannelType, ReadNotifyResponse +from caproto.threading.client import PV +from caproto.threading.client import Context as CAContext + +from forwarder.application_logger import get_logger from forwarder.epics_to_serialisable_types import ( - numpy_type_from_caproto_type, - epics_alarm_severity_to_f142, ca_alarm_status_to_f142, + epics_alarm_severity_to_f142, + numpy_type_from_caproto_type, ) -from caproto.threading.client import Context as CAContext -from caproto.threading.client import PV -from typing import Optional, Tuple, Any -from forwarder.update_handlers.schema_publishers import schema_publishers -import time from forwarder.kafka.kafka_helpers import ( publish_connection_status_message, seconds_to_nanoseconds, ) +from forwarder.kafka.kafka_producer import KafkaProducer +from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds +from forwarder.update_handlers.schema_publishers import schema_publishers class CAUpdateHandler: diff --git a/forwarder/update_handlers/create_update_handler.py b/forwarder/update_handlers/create_update_handler.py index a2fe87e3..5db39319 100644 --- a/forwarder/update_handlers/create_update_handler.py +++ b/forwarder/update_handlers/create_update_handler.py @@ -3,13 +3,12 @@ from caproto.threading.client import Context as CAContext from p4p.client.thread import Context as PVAContext -from forwarder.parse_config_update import EpicsProtocol -from forwarder.parse_config_update import Channel as ConfigChannel from forwarder.kafka.kafka_producer import KafkaProducer +from forwarder.parse_config_update import Channel as ConfigChannel +from forwarder.parse_config_update import EpicsProtocol from forwarder.update_handlers.ca_update_handler import CAUpdateHandler -from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler from forwarder.update_handlers.fake_update_handler import FakeUpdateHandler - +from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler UpdateHandler = Union[CAUpdateHandler, PVAUpdateHandler, FakeUpdateHandler] diff --git a/forwarder/update_handlers/fake_update_handler.py b/forwarder/update_handlers/fake_update_handler.py index ffcc4d46..97d1034c 100644 --- a/forwarder/update_handlers/fake_update_handler.py +++ b/forwarder/update_handlers/fake_update_handler.py @@ -1,9 +1,11 @@ -from forwarder.kafka.kafka_producer import KafkaProducer +import time +from random import randint + import numpy as np + +from forwarder.kafka.kafka_producer import KafkaProducer from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds -import time from forwarder.update_handlers.schema_publishers import schema_publishers -from random import randint class FakeUpdateHandler: diff --git a/forwarder/update_handlers/pva_update_handler.py b/forwarder/update_handlers/pva_update_handler.py index 148391cc..52e5d56b 100644 --- a/forwarder/update_handlers/pva_update_handler.py +++ b/forwarder/update_handlers/pva_update_handler.py @@ -1,24 +1,27 @@ -from p4p.client.thread import Context as PVAContext +import time +from threading import Lock +from typing import Any, Optional, Tuple, Union + +import numpy as np from p4p import Value -from forwarder.kafka.kafka_producer import KafkaProducer +from p4p.client.thread import Cancelled +from p4p.client.thread import Context as PVAContext +from p4p.client.thread import Disconnected, RemoteError +from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus + from forwarder.application_logger import get_logger -from typing import Optional, Tuple, Union, Any -from threading import Lock -from forwarder.update_handlers.schema_publishers import schema_publishers -from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds from forwarder.epics_to_serialisable_types import ( - numpy_type_from_p4p_type, epics_alarm_severity_to_f142, + numpy_type_from_p4p_type, pva_alarm_message_to_f142_alarm_status, ) -from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus -import numpy as np from forwarder.kafka.kafka_helpers import ( publish_connection_status_message, seconds_to_nanoseconds, ) -from p4p.client.thread import Cancelled, Disconnected, RemoteError -import time +from forwarder.kafka.kafka_producer import KafkaProducer +from forwarder.repeat_timer import RepeatTimer, milliseconds_to_seconds +from forwarder.update_handlers.schema_publishers import schema_publishers def _get_alarm_status(response): diff --git a/forwarder/update_handlers/schema_publishers.py b/forwarder/update_handlers/schema_publishers.py index 7dcc0fad..96ab7ba3 100644 --- a/forwarder/update_handlers/schema_publishers.py +++ b/forwarder/update_handlers/schema_publishers.py @@ -1,6 +1,6 @@ -from forwarder.kafka.kafka_helpers import publish_f142_message, publish_tdct_message -from typing import Dict, Callable +from typing import Callable, Dict +from forwarder.kafka.kafka_helpers import publish_f142_message, publish_tdct_message schema_publishers: Dict[str, Callable] = { "f142": publish_f142_message, diff --git a/forwarder_launch.py b/forwarder_launch.py index 1a71643c..ed9a7797 100644 --- a/forwarder_launch.py +++ b/forwarder_launch.py @@ -5,23 +5,21 @@ from caproto.threading.client import Context as CaContext from p4p.client.thread import Context as PvaContext +from forwarder.application_logger import setup_logger +from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore +from forwarder.handle_config_change import handle_configuration_change from forwarder.kafka.kafka_helpers import ( - create_producer, create_consumer, + create_producer, get_broker_and_topic_from_uri, ) -from forwarder.application_logger import setup_logger -from forwarder.parse_config_update import parse_config_update -from forwarder.status_reporter import StatusReporter +from forwarder.parse_commandline_args import get_version, parse_args +from forwarder.parse_config_update import Channel, parse_config_update from forwarder.statistics_reporter import StatisticsReporter -from forwarder.parse_commandline_args import parse_args, get_version -from forwarder.handle_config_change import handle_configuration_change +from forwarder.status_reporter import StatusReporter from forwarder.update_handlers.create_update_handler import UpdateHandler -from forwarder.parse_config_update import Channel -from forwarder.configuration_store import ConfigurationStore, NullConfigurationStore from forwarder.utils import Counter - if __name__ == "__main__": args = parse_args() diff --git a/manual_testing/add_ca_config.py b/manual_testing/add_ca_config.py index d2a71c7c..5b8d215b 100644 --- a/manual_testing/add_ca_config.py +++ b/manual_testing/add_ca_config.py @@ -1,12 +1,14 @@ -from forwarder.kafka.kafka_helpers import create_producer -from streaming_data_types.forwarder_config_update_rf5k import serialise_rf5k, StreamInfo -from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( - UpdateType, -) +import time + from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( Protocol, ) -import time +from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( + UpdateType, +) +from streaming_data_types.forwarder_config_update_rf5k import StreamInfo, serialise_rf5k + +from forwarder.kafka.kafka_helpers import create_producer """ "docker-compose up" first! diff --git a/manual_testing/add_fake_config.py b/manual_testing/add_fake_config.py index 844f1c7c..109029cc 100644 --- a/manual_testing/add_fake_config.py +++ b/manual_testing/add_fake_config.py @@ -1,12 +1,14 @@ -from forwarder.kafka.kafka_helpers import create_producer -from streaming_data_types.forwarder_config_update_rf5k import serialise_rf5k, StreamInfo -from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( - UpdateType, -) +import time + from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( Protocol, ) -import time +from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( + UpdateType, +) +from streaming_data_types.forwarder_config_update_rf5k import StreamInfo, serialise_rf5k + +from forwarder.kafka.kafka_helpers import create_producer """ "docker-compose up" first! diff --git a/manual_testing/add_pva_config.py b/manual_testing/add_pva_config.py index 94f147ec..908fc47b 100644 --- a/manual_testing/add_pva_config.py +++ b/manual_testing/add_pva_config.py @@ -1,12 +1,14 @@ -from forwarder.kafka.kafka_helpers import create_producer -from streaming_data_types.forwarder_config_update_rf5k import serialise_rf5k, StreamInfo -from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( - UpdateType, -) +import time + from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( Protocol, ) -import time +from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( + UpdateType, +) +from streaming_data_types.forwarder_config_update_rf5k import StreamInfo, serialise_rf5k + +from forwarder.kafka.kafka_helpers import create_producer """ "docker-compose up" first! diff --git a/requirements-dev.txt b/requirements-dev.txt index 9926805f..f02d59c9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,6 +9,7 @@ wheel black==20.8b1 flake8==3.8.4 mypy==v0.790 +isort==5.7.0 # For system tests docker-compose diff --git a/system_tests/conftest.py b/system_tests/conftest.py index c12d3c0f..2f3cae69 100644 --- a/system_tests/conftest.py +++ b/system_tests/conftest.py @@ -1,23 +1,23 @@ import os.path +import warnings +from subprocess import Popen +from time import sleep + +import docker import pytest from compose.cli.main import TopLevelCommand, project_from_options -from confluent_kafka.admin import AdminClient from confluent_kafka import Producer -import docker -from time import sleep -from subprocess import Popen -import warnings -from streaming_data_types.forwarder_config_update_rf5k import ( - serialise_rf5k, - StreamInfo, - Protocol, -) +from confluent_kafka.admin import AdminClient from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) +from streaming_data_types.forwarder_config_update_rf5k import ( + Protocol, + StreamInfo, + serialise_rf5k, +) -from .helpers.PVs import PVSTR, PVLONG - +from .helpers.PVs import PVLONG, PVSTR LOCAL_BUILD = "--local-build" WAIT_FOR_DEBUGGER_ATTACH = "--wait-to-attach-debugger" diff --git a/system_tests/helpers/flatbuffer_helpers.py b/system_tests/helpers/flatbuffer_helpers.py index 1df2b567..70dc8af7 100644 --- a/system_tests/helpers/flatbuffer_helpers.py +++ b/system_tests/helpers/flatbuffer_helpers.py @@ -1,7 +1,9 @@ from cmath import isclose +from typing import Any + import numpy as np from streaming_data_types.logdata_f142 import deserialise_f142 -from typing import Any + from .f142_logdata.AlarmSeverity import AlarmSeverity from .f142_logdata.AlarmStatus import AlarmStatus diff --git a/system_tests/helpers/forwarderconfig.py b/system_tests/helpers/forwarderconfig.py index 37a532c5..b38a8991 100644 --- a/system_tests/helpers/forwarderconfig.py +++ b/system_tests/helpers/forwarderconfig.py @@ -1,11 +1,12 @@ -from streaming_data_types.forwarder_config_update_rf5k import serialise_rf5k, StreamInfo +from typing import List + from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( Protocol, ) from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) -from typing import List +from streaming_data_types.forwarder_config_update_rf5k import StreamInfo, serialise_rf5k class ForwarderConfig: diff --git a/system_tests/helpers/kafka_helpers.py b/system_tests/helpers/kafka_helpers.py index fffec383..57014cf3 100644 --- a/system_tests/helpers/kafka_helpers.py +++ b/system_tests/helpers/kafka_helpers.py @@ -1,8 +1,8 @@ -from confluent_kafka import TopicPartition -from confluent_kafka import Consumer import uuid +from typing import Optional, Tuple + +from confluent_kafka import Consumer, TopicPartition from pytictoc import TicToc -from typing import Tuple, Optional class MsgErrorException(Exception): diff --git a/system_tests/helpers/producerwrapper.py b/system_tests/helpers/producerwrapper.py index a96c3005..4030d55f 100644 --- a/system_tests/helpers/producerwrapper.py +++ b/system_tests/helpers/producerwrapper.py @@ -1,11 +1,13 @@ -from .forwarderconfig import ForwarderConfig -from confluent_kafka import Producer, Consumer, KafkaException import uuid from typing import List + +from confluent_kafka import Consumer, KafkaException, Producer from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( Protocol, ) +from .forwarderconfig import ForwarderConfig + class ProducerWrapper: """ diff --git a/system_tests/test_config_storage.py b/system_tests/test_config_storage.py index 74ce35b4..70264bef 100644 --- a/system_tests/test_config_storage.py +++ b/system_tests/test_config_storage.py @@ -1,8 +1,10 @@ -from .helpers.kafka_helpers import create_consumer, poll_for_valid_message -from .helpers.PVs import PVSTR, PVLONG import json + from streaming_data_types.status_x5f2 import deserialise_x5f2 +from .helpers.kafka_helpers import create_consumer, poll_for_valid_message +from .helpers.PVs import PVLONG, PVSTR + def test_on_starting_stored_config_is_retrieved(docker_compose_storage): cons = create_consumer("latest") diff --git a/system_tests/test_forwarding.py b/system_tests/test_forwarding.py index 385426dd..c132679e 100644 --- a/system_tests/test_forwarding.py +++ b/system_tests/test_forwarding.py @@ -1,35 +1,37 @@ -from confluent_kafka import TopicPartition, Consumer -from .helpers.producerwrapper import ProducerWrapper +import json +from time import sleep + +import numpy as np +import pytest +from caproto._utils import CaprotoTimeoutError +from confluent_kafka import Consumer, TopicPartition from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( Protocol, ) -from time import sleep +from streaming_data_types.status_x5f2 import deserialise_x5f2 + +from .helpers.epics_helpers import change_pv_value +from .helpers.f142_logdata.AlarmSeverity import AlarmSeverity +from .helpers.f142_logdata.AlarmStatus import AlarmStatus from .helpers.flatbuffer_helpers import ( + check_expected_alarm_status, check_expected_value, check_multiple_expected_values, - check_expected_alarm_status, ) from .helpers.kafka_helpers import ( create_consumer, - poll_for_valid_message, get_last_available_status_message, + poll_for_valid_message, ) -from .helpers.epics_helpers import change_pv_value +from .helpers.producerwrapper import ProducerWrapper from .helpers.PVs import ( PVDOUBLE, - PVSTR, - PVLONG, + PVDOUBLE_WITH_ALARM_THRESHOLDS, PVENUM, PVFLOATARRAY, - PVDOUBLE_WITH_ALARM_THRESHOLDS, + PVLONG, + PVSTR, ) -import json -import numpy as np -from .helpers.f142_logdata.AlarmSeverity import AlarmSeverity -from .helpers.f142_logdata.AlarmStatus import AlarmStatus -import pytest -from streaming_data_types.status_x5f2 import deserialise_x5f2 -from caproto._utils import CaprotoTimeoutError CONFIG_TOPIC = "TEST_forwarderConfig" INITIAL_STRING_VALUE = "test" diff --git a/system_tests/test_idle_pv_updates.py b/system_tests/test_idle_pv_updates.py index 4d5431b9..11eb26ea 100644 --- a/system_tests/test_idle_pv_updates.py +++ b/system_tests/test_idle_pv_updates.py @@ -1,10 +1,12 @@ +from time import sleep + from streaming_data_types.logdata_f142 import deserialise_f142 -from .helpers.kafka_helpers import create_consumer, poll_for_valid_message + from .helpers.f142_logdata.AlarmSeverity import AlarmSeverity from .helpers.f142_logdata.AlarmStatus import AlarmStatus from .helpers.flatbuffer_helpers import check_expected_value +from .helpers.kafka_helpers import create_consumer, poll_for_valid_message from .helpers.producerwrapper import ProducerWrapper -from time import sleep from .helpers.PVs import PVDOUBLE CONFIG_TOPIC = "TEST_forwarderConfig" diff --git a/system_tests/test_longrunning.py b/system_tests/test_longrunning.py index 09bce742..59ea1cd7 100644 --- a/system_tests/test_longrunning.py +++ b/system_tests/test_longrunning.py @@ -1,17 +1,18 @@ +from datetime import datetime +from time import sleep + +import docker +import pytest + from .helpers.epics_helpers import change_pv_value +from .helpers.flatbuffer_helpers import check_expected_value from .helpers.kafka_helpers import ( + MsgErrorException, create_consumer, poll_for_valid_message, - MsgErrorException, ) -from .helpers.PVs import PVDOUBLE -import pytest -import docker -from time import sleep -from datetime import datetime -from .helpers.flatbuffer_helpers import check_expected_value from .helpers.producerwrapper import ProducerWrapper - +from .helpers.PVs import PVDOUBLE CONFIG_TOPIC = "TEST_forwarderConfig" diff --git a/tests/handle_config_change_test.py b/tests/handle_config_change_test.py index 302f846a..11d48ab0 100644 --- a/tests/handle_config_change_test.py +++ b/tests/handle_config_change_test.py @@ -1,17 +1,19 @@ +import logging +from typing import Dict, List +from unittest import mock + +import pytest + from forwarder.configuration_store import ConfigurationStore from forwarder.handle_config_change import handle_configuration_change -from tests.kafka.fake_producer import FakeProducer -import logging from forwarder.parse_config_update import ( - ConfigUpdate, - CommandType, Channel, + CommandType, + ConfigUpdate, EpicsProtocol, ) from forwarder.update_handlers.create_update_handler import UpdateHandler -from typing import Dict, List -import pytest -from unittest import mock +from tests.kafka.fake_producer import FakeProducer class StubStatusReporter: diff --git a/tests/kafka/kafka_helpers_test.py b/tests/kafka/kafka_helpers_test.py index eecb096a..03418c97 100644 --- a/tests/kafka/kafka_helpers_test.py +++ b/tests/kafka/kafka_helpers_test.py @@ -1,9 +1,12 @@ -from forwarder.kafka.kafka_helpers import get_broker_and_topic_from_uri -from forwarder.kafka.kafka_helpers import publish_tdct_message -from tests.kafka.fake_producer import FakeProducer -from streaming_data_types.timestamps_tdct import deserialise_tdct import numpy as np import pytest +from streaming_data_types.timestamps_tdct import deserialise_tdct + +from forwarder.kafka.kafka_helpers import ( + get_broker_and_topic_from_uri, + publish_tdct_message, +) +from tests.kafka.fake_producer import FakeProducer def test_raises_exception_if_no_forward_slash_present(): diff --git a/tests/parse_config_update_test.py b/tests/parse_config_update_test.py index 10dd3526..da5ec688 100644 --- a/tests/parse_config_update_test.py +++ b/tests/parse_config_update_test.py @@ -1,18 +1,19 @@ -from streaming_data_types.forwarder_config_update_rf5k import ( - serialise_rf5k, - deserialise_rf5k, - StreamInfo, +from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( + Protocol, ) from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) +from streaming_data_types.forwarder_config_update_rf5k import ( + StreamInfo, + deserialise_rf5k, + serialise_rf5k, +) + from forwarder.parse_config_update import ( - parse_config_update, CommandType, _parse_streams, -) -from streaming_data_types.fbschemas.forwarder_config_update_rf5k.Protocol import ( - Protocol, + parse_config_update, ) diff --git a/tests/statistic_reporter_test.py b/tests/statistic_reporter_test.py index 2ec8661b..dd8bbd80 100644 --- a/tests/statistic_reporter_test.py +++ b/tests/statistic_reporter_test.py @@ -1,10 +1,10 @@ import logging from typing import Dict -from unittest.mock import MagicMock, call, ANY +from unittest.mock import ANY, MagicMock, call +from forwarder.kafka.kafka_producer import KafkaProducer from forwarder.statistics_reporter import StatisticsReporter from forwarder.utils import Counter -from forwarder.kafka.kafka_producer import KafkaProducer logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) diff --git a/tests/status_reporter_test.py b/tests/status_reporter_test.py index 942af7a5..9adb5d38 100644 --- a/tests/status_reporter_test.py +++ b/tests/status_reporter_test.py @@ -1,11 +1,12 @@ -from forwarder.status_reporter import StatusReporter -from typing import Dict -from tests.kafka.fake_producer import FakeProducer import json -from streaming_data_types.status_x5f2 import deserialise_x5f2 import logging -from forwarder.parse_config_update import Channel, EpicsProtocol +from typing import Dict +from streaming_data_types.status_x5f2 import deserialise_x5f2 + +from forwarder.parse_config_update import Channel, EpicsProtocol +from forwarder.status_reporter import StatusReporter +from tests.kafka.fake_producer import FakeProducer logger = logging.getLogger("stub_for_use_in_tests") logger.addHandler(logging.NullHandler()) diff --git a/tests/store_configuration_test.py b/tests/store_configuration_test.py index d9e74c6b..7ea16740 100644 --- a/tests/store_configuration_test.py +++ b/tests/store_configuration_test.py @@ -1,23 +1,24 @@ from unittest import mock + from confluent_kafka import Consumer -from streaming_data_types.forwarder_config_update_rf5k import ( - serialise_rf5k, - StreamInfo, - Protocol, -) from streaming_data_types.fbschemas.forwarder_config_update_rf5k.UpdateType import ( UpdateType, ) -from forwarder.parse_config_update import ( - parse_config_update, - config_change_to_command_type, +from streaming_data_types.forwarder_config_update_rf5k import ( + Protocol, + StreamInfo, + serialise_rf5k, ) from forwarder.configuration_store import ConfigurationStore -from forwarder.parse_config_update import Channel, EpicsProtocol +from forwarder.parse_config_update import ( + Channel, + EpicsProtocol, + config_change_to_command_type, + parse_config_update, +) from tests.kafka.fake_producer import FakeProducer - DUMMY_UPDATE_HANDLER = None CHANNELS_TO_STORE = { diff --git a/tests/test_helpers/ca_fakes.py b/tests/test_helpers/ca_fakes.py index 59cc5d6e..37fb8dbe 100644 --- a/tests/test_helpers/ca_fakes.py +++ b/tests/test_helpers/ca_fakes.py @@ -1,4 +1,5 @@ -from typing import List, Callable, Optional +from typing import Callable, List, Optional + from caproto import ReadNotifyResponse diff --git a/tests/test_helpers/p4p_fakes.py b/tests/test_helpers/p4p_fakes.py index 169eb2d0..42313f17 100644 --- a/tests/test_helpers/p4p_fakes.py +++ b/tests/test_helpers/p4p_fakes.py @@ -1,4 +1,5 @@ from typing import Callable, Union + from p4p import Value diff --git a/tests/update_handlers/ca_update_handler_test.py b/tests/update_handlers/ca_update_handler_test.py index 43130c33..e02727a4 100644 --- a/tests/update_handlers/ca_update_handler_test.py +++ b/tests/update_handlers/ca_update_handler_test.py @@ -1,20 +1,22 @@ -from tests.kafka.fake_producer import FakeProducer -from forwarder.update_handlers.ca_update_handler import CAUpdateHandler -from tests.test_helpers.ca_fakes import FakeContext from cmath import isclose -from streaming_data_types.logdata_f142 import deserialise_f142 -from streaming_data_types.timestamps_tdct import deserialise_tdct -from streaming_data_types.epics_connection_info_ep00 import deserialise_ep00 -import pytest -from caproto import ReadNotifyResponse, ChannelType, TimeStamp -import numpy as np -from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus -from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity from time import sleep from typing import List + +import numpy as np +import pytest +from caproto import ChannelType, ReadNotifyResponse, TimeStamp +from streaming_data_types.epics_connection_info_ep00 import deserialise_ep00 from streaming_data_types.fbschemas.epics_connection_info_ep00.EventType import ( EventType as ConnectionEventType, ) +from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity +from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus +from streaming_data_types.logdata_f142 import deserialise_f142 +from streaming_data_types.timestamps_tdct import deserialise_tdct + +from forwarder.update_handlers.ca_update_handler import CAUpdateHandler +from tests.kafka.fake_producer import FakeProducer +from tests.test_helpers.ca_fakes import FakeContext def test_update_handler_throws_if_schema_not_recognised(): diff --git a/tests/update_handlers/create_update_handler_test.py b/tests/update_handlers/create_update_handler_test.py index 884fedf7..56966f56 100644 --- a/tests/update_handlers/create_update_handler_test.py +++ b/tests/update_handlers/create_update_handler_test.py @@ -1,13 +1,14 @@ -from forwarder.update_handlers.create_update_handler import create_update_handler -from forwarder.parse_config_update import Channel, EpicsProtocol -from tests.kafka.fake_producer import FakeProducer -from tests.test_helpers.p4p_fakes import FakeContext as FakePVAContext -from tests.test_helpers.ca_fakes import FakeContext as FakeCAContext -from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler -from forwarder.update_handlers.ca_update_handler import CAUpdateHandler import logging + import pytest +from forwarder.parse_config_update import Channel, EpicsProtocol +from forwarder.update_handlers.ca_update_handler import CAUpdateHandler +from forwarder.update_handlers.create_update_handler import create_update_handler +from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler +from tests.kafka.fake_producer import FakeProducer +from tests.test_helpers.ca_fakes import FakeContext as FakeCAContext +from tests.test_helpers.p4p_fakes import FakeContext as FakePVAContext _logger = logging.getLogger("stub_for_use_in_tests") _logger.addHandler(logging.NullHandler()) diff --git a/tests/update_handlers/fake_update_handler_test.py b/tests/update_handlers/fake_update_handler_test.py index e38bfb81..815bd3fb 100644 --- a/tests/update_handlers/fake_update_handler_test.py +++ b/tests/update_handlers/fake_update_handler_test.py @@ -1,8 +1,9 @@ -from tests.kafka.fake_producer import FakeProducer -from forwarder.update_handlers.fake_update_handler import FakeUpdateHandler +import pytest from streaming_data_types.logdata_f142 import deserialise_f142 from streaming_data_types.timestamps_tdct import deserialise_tdct -import pytest + +from forwarder.update_handlers.fake_update_handler import FakeUpdateHandler +from tests.kafka.fake_producer import FakeProducer def test_update_handler_throws_if_schema_not_recognised(): diff --git a/tests/update_handlers/pva_update_handler_test.py b/tests/update_handlers/pva_update_handler_test.py index 135306a3..f40bf290 100644 --- a/tests/update_handlers/pva_update_handler_test.py +++ b/tests/update_handlers/pva_update_handler_test.py @@ -1,21 +1,23 @@ -from tests.kafka.fake_producer import FakeProducer -from tests.test_helpers.p4p_fakes import FakeContext -from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler -from p4p.nt import NTScalar, NTEnum -from streaming_data_types.logdata_f142 import deserialise_f142 -from streaming_data_types.timestamps_tdct import deserialise_tdct -from streaming_data_types.epics_connection_info_ep00 import deserialise_ep00 from cmath import isclose -import numpy as np -import pytest -from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus -from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity from time import sleep from typing import List + +import numpy as np +import pytest from p4p.client.thread import Cancelled, Disconnected, RemoteError +from p4p.nt import NTEnum, NTScalar +from streaming_data_types.epics_connection_info_ep00 import deserialise_ep00 from streaming_data_types.fbschemas.epics_connection_info_ep00.EventType import ( EventType as ConnectionEventType, ) +from streaming_data_types.fbschemas.logdata_f142.AlarmSeverity import AlarmSeverity +from streaming_data_types.fbschemas.logdata_f142.AlarmStatus import AlarmStatus +from streaming_data_types.logdata_f142 import deserialise_f142 +from streaming_data_types.timestamps_tdct import deserialise_tdct + +from forwarder.update_handlers.pva_update_handler import PVAUpdateHandler +from tests.kafka.fake_producer import FakeProducer +from tests.test_helpers.p4p_fakes import FakeContext def test_update_handler_throws_if_schema_not_recognised():