diff --git a/.github/workflows/code.yml b/.github/workflows/code.yml index 43b517a5..0063f51c 100644 --- a/.github/workflows/code.yml +++ b/.github/workflows/code.yml @@ -14,8 +14,9 @@ jobs: runs-on: ubuntu-latest strategy: + max-parallel: 4 matrix: - python-version: [3.6, 3.7] + python-version: [3.7, 3.8, 3.9, 3.10.8] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml new file mode 100644 index 00000000..7c4dc2d5 --- /dev/null +++ b/.github/workflows/docker_publish.yml @@ -0,0 +1,45 @@ +on: + push: + tags: + - '.*' + workflow_run: + workflows: ["Test code"] + branches: [master, develop] + types: + - completed + +name: Docker Build +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Check Out Repo + uses: actions/checkout@v2 + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} + + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v1 + + - name: Extract tag name + id: extract_tag_name + shell: bash + run: echo "##[set-output name=tag;]$(echo ${GITHUB_REF##*/})" + + - name: Build and push + id: docker_build + uses: docker/build-push-action@v2 + with: + context: ./ + file: ./Dockerfile + push: true + tags: napalmautomation/napalm-logs:${{ steps.extract_tag_name.outputs.tag }} + + - name: Image digest + run: echo ${{ steps.docker_build.outputs.digest }} diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index ad524171..3093ae37 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -13,15 +13,16 @@ jobs: - name: Set up Python uses: actions/setup-python@v1 with: - python-version: '3.x' + python-version: '3.9' - name: Install dependencies run: | python -m pip install --upgrade pip - pip install setuptools wheel twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + pip install setuptools wheel + - name: Build run: | python setup.py sdist bdist_wheel - twine upload dist/* + - name: Publish + uses: pypa/gh-action-pypi-publish@master + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} diff --git a/Dockerfile b/Dockerfile index 11d7c9f3..e6287b65 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,15 @@ -FROM python:3.6-alpine +FROM python:3.9-slim-buster COPY docker/config.txt /etc/napalm/logs COPY ./ /var/cache/napalm-logs/ # Install napalm-logs and pre-requisites -RUN apk add --no-cache \ - libffi \ - libffi-dev \ - python3-dev \ - build-base \ - && pip --no-cache-dir install cffi pyzmq==19.0.2 /var/cache/napalm-logs/ \ - && rm -rf /var/cache/napalm-logs/ - -CMD napalm-logs --config-file /etc/napalm/logs +RUN apt-get update \ + && apt-get install -y dumb-init python3-dev python3-cffi libffi-dev \ + && pip --no-cache-dir install -U pip \ + && pip --no-cache-dir install /var/cache/napalm-logs/ \ + && rm -rf /var/cache/napalm-logs/ + +ENTRYPOINT ["/usr/bin/dumb-init", "--"] + +CMD ["napalm-logs", "--config-file", "/etc/napalm/logs"] diff --git a/docs/conf.py b/docs/conf.py index f301b9a4..3302c76b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,8 +24,8 @@ # Import third party libs import jinja2 -sys.path.insert(0, os.path.abspath('../')) -sys.path.insert(0, os.path.abspath('_themes')) +sys.path.insert(0, os.path.abspath("../")) +sys.path.insert(0, os.path.abspath("_themes")) import napalm_logs # noqa from napalm_logs.base import NapalmLogs # noqa @@ -42,30 +42,30 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.coverage', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'napalm-logs' -copyright = u'2017-2019, Mircea Ulinic' -author = u'Mircea Ulinic' +project = "napalm-logs" +copyright = "2017-2019, Mircea Ulinic" +author = "Mircea Ulinic" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -86,10 +86,10 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'flask_theme_support.FlaskyStyle' +pygments_style = "flask_theme_support.FlaskyStyle" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -99,24 +99,24 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { - 'show_powered_by': False, - 'github_user': 'napalm-automation', - 'github_repo': 'napalm-logs', - 'github_banner': True, - 'show_related': False, + "show_powered_by": False, + "github_user": "napalm-automation", + "github_repo": "napalm-logs", + "github_banner": True, + "show_related": False, } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -124,13 +124,13 @@ # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': [ - 'about.html', - 'navigation.html', - 'links.html', - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - 'donate.html', + "**": [ + "about.html", + "navigation.html", + "links.html", + "relations.html", # needs 'show_related': True theme option to display + "searchbox.html", + "donate.html", ] } @@ -146,7 +146,7 @@ # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. -htmlhelp_basename = 'napalm-logsdoc' +htmlhelp_basename = "napalm-logsdoc" # -- Options for LaTeX output --------------------------------------------- @@ -172,10 +172,10 @@ latex_documents = [ ( master_doc, - 'napalm-logs.tex', - u'napalm-logs Documentation', - u'Mircea Ulinic', - 'manual', + "napalm-logs.tex", + "napalm-logs Documentation", + "Mircea Ulinic", + "manual", ), ] @@ -184,7 +184,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, 'napalm-logs', u'napalm-logs Documentation', [author], 1)] +man_pages = [(master_doc, "napalm-logs", "napalm-logs Documentation", [author], 1)] # -- Options for Texinfo output ------------------------------------------- @@ -195,15 +195,15 @@ texinfo_documents = [ ( master_doc, - 'napalm-logs', - u'napalm-logs Documentation', + "napalm-logs", + "napalm-logs Documentation", author, - 'napalm-logs', + "napalm-logs", ( - 'napalm-logs is a Python library that listens to syslog messages from network devices and returns strucuted data' - 'following the OpenConfig or IETF YANG models' + "napalm-logs is a Python library that listens to syslog messages from network devices and returns strucuted data" + "following the OpenConfig or IETF YANG models" ), - 'Miscellaneous', + "Miscellaneous", ), ] @@ -225,7 +225,7 @@ # epub_uid = '' # A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] +epub_exclude_files = ["search.html"] def gen_messages_rst(): @@ -250,23 +250,23 @@ def gen_messages_rst(): nl_ = NapalmLogs(publisher=[]) defined_errors = {} for os_name, os_cfg in nl_.config_dict.items(): - for message in os_cfg['messages']: - error_name = message['error'] + for message in os_cfg["messages"]: + error_name = message["error"] if error_name not in defined_errors: - defined_errors[error_name] = {'doc': '', 'os': [], 'model': ''} - if not defined_errors[error_name]['doc'] or len( - defined_errors[error_name]['doc'] - ) < len(message['__doc__']): - defined_errors[error_name]['doc'] = message['__doc__'] - if not defined_errors[error_name]['model']: - defined_errors[error_name]['model'] = message['model'] - defined_errors[error_name]['os'].append(os_name) + defined_errors[error_name] = {"doc": "", "os": [], "model": ""} + if not defined_errors[error_name]["doc"] or len( + defined_errors[error_name]["doc"] + ) < len(message["__doc__"]): + defined_errors[error_name]["doc"] = message["__doc__"] + if not defined_errors[error_name]["model"]: + defined_errors[error_name]["model"] = message["model"] + defined_errors[error_name]["os"].append(os_name) # The collect the mock data from the tests: cwd = os.path.dirname(__file__) - test_root_path = os.path.join(cwd, '..', 'tests', 'config') - env = jinja2.Environment(loader=jinja2.FileSystemLoader('.')) + test_root_path = os.path.join(cwd, "..", "tests", "config") + env = jinja2.Environment(loader=jinja2.FileSystemLoader(".")) for error_name, error_details in defined_errors.items(): - os_name = error_details['os'][0] # Picking up the first OS in the list. + os_name = error_details["os"][0] # Picking up the first OS in the list. error_path = os.path.join(test_root_path, os_name, error_name) test_cases = [ name @@ -274,47 +274,47 @@ def gen_messages_rst(): if os.path.isdir(os.path.join(error_path, name)) ] test_case_name = ( - 'default' if 'default' in test_cases else test_cases[0] + "default" if "default" in test_cases else test_cases[0] ) # Picking up a test case. test_case_path = os.path.join(error_path, test_case_name) - raw_message_filepath = os.path.join(test_case_path, 'syslog.msg') - log.debug('Looking for %s', raw_message_filepath) + raw_message_filepath = os.path.join(test_case_path, "syslog.msg") + log.debug("Looking for %s", raw_message_filepath) assert os.path.isfile(raw_message_filepath) - with open(raw_message_filepath, 'r') as raw_message_fh: + with open(raw_message_filepath, "r") as raw_message_fh: raw_message = raw_message_fh.read() - log.debug('Read raw message:') + log.debug("Read raw message:") log.debug(raw_message) - yang_message_filepath = os.path.join(test_case_path, 'yang.json') - log.debug('Looking for %s', yang_message_filepath) + yang_message_filepath = os.path.join(test_case_path, "yang.json") + log.debug("Looking for %s", yang_message_filepath) assert os.path.isfile(yang_message_filepath) - with open(yang_message_filepath, 'r') as yang_message_fh: + with open(yang_message_filepath, "r") as yang_message_fh: yang_message = yang_message_fh.read() - log.debug('Read YANG text:') + log.debug("Read YANG text:") log.debug(yang_message) struct_yang_message = json.loads(yang_message) indented_yang_message = json.dumps( struct_yang_message, indent=4, sort_keys=True ) - log.debug('Struct YANG message:') + log.debug("Struct YANG message:") log.debug(struct_yang_message) - msg_template = env.get_template('message_template.jinja') + msg_template = env.get_template("message_template.jinja") rendered_template = msg_template.render( error_name=error_name, - error_doc=error_details['doc'], - error_yang=error_details['model'], - error_os_list=list(set(error_details['os'])), + error_doc=error_details["doc"], + error_yang=error_details["model"], + error_os_list=list(set(error_details["os"])), error_txt_example=raw_message.strip(), - error_json_example=indented_yang_message.replace('\n}', '\n }'), + error_json_example=indented_yang_message.replace("\n}", "\n }"), ) - message_rst_path = 'messages/{error_name}.rst'.format(error_name=error_name) - with open(message_rst_path, 'w') as rst_fh: + message_rst_path = "messages/{error_name}.rst".format(error_name=error_name) + with open(message_rst_path, "w") as rst_fh: rst_fh.write(rendered_template) - index_tpl_file = env.get_template('messages_index_template.jinja') + index_tpl_file = env.get_template("messages_index_template.jinja") messages_list = list(defined_errors.keys()) - messages_list.extend(['RAW', 'UNKNOWN']) + messages_list.extend(["RAW", "UNKNOWN"]) messages_list.sort() rendered_template = index_tpl_file.render(error_list=messages_list) - with open('messages/index.rst', 'w') as index_fh: + with open("messages/index.rst", "w") as index_fh: index_fh.write(rendered_template) diff --git a/docs/developers/device_profiles.rst b/docs/developers/device_profiles.rst index efbbb56c..d1fbc4c1 100644 --- a/docs/developers/device_profiles.rst +++ b/docs/developers/device_profiles.rst @@ -213,6 +213,41 @@ should be placed. There are two options, ``variables`` and ``static``. ``variables`` should be used when the value being set is taken from the message, and ``static`` should be used when the value is manually set. +``state`` +--------- + +``state`` is an optional config bit which may be useful when defining messages +that have a counter-part. For example: ``MAJOR_ALARM_SET`` +/ ``MAJOR_ALARM_CLEARED``. The idea behind this is to have pairs or groups of +notifications that have a specific significance mapping out to a desired value +(e.g., the state value for ``MAJOR_ALARM_SET`` can be 1, while the state value +for ``MAJOR_ALARM_CLEARED`` can be 0). The value can be any number, not only +binary, when the group is larger than two notifications. + +It is equally important to not that when using the :ref:`publisher-prometheus` +Publisher, when using this field, an additional metric is being exposed, +providing the state value, besides the usual counter. + +The metric name is derived from the base name of the notification, by stripping +the last part (after underscore) and replacing it with ``_state``. For +instance, continuing the example above, ``MAJOR_ALARM_SET`` and +``MAJOR_ALARM_CLEARED`` would both set the same Gauge metric +``napalm_logs_major_alarm_state``. If the notification groups don't have +a common base name for whatever reason, you can define individual state tags +using the ``state_tag`` option (see below). + +``state_tag`` +------------- + +This option provides a custom name for the state metric on groups of +notifications. By default, this is not necessary, as it's assumed a group has +the same base name, but it may not always be the case. + +Based on the example above, ``MAJOR_ALARM_SET`` and ``MAJOR_ALARM_CLEARED`` +would, by default, set the ``napalm_logs_major_alarm_state`` Gauge metric, +however, by providing the value ``state_tag: system_alarm_state`` (for both), the +metric becomes: ``napalm_logs_system_alarm_state``. + Pure Python profiles ++++++++++++++++++++ diff --git a/docs/options/index.rst b/docs/options/index.rst index 5f982df1..ddd7f60f 100644 --- a/docs/options/index.rst +++ b/docs/options/index.rst @@ -690,3 +690,38 @@ Example: device_blacklist: - eos + +.. _configuration-options-sentry-dsn: + +``sentry_dsn`` +-------------- + +.. versionadded:: 0.11.0 + +The Sentry DSN to identify the project to send traces to. + +Example: + +.. code-block:: yaml + + sentry_dsn: https://187deccf24d5ea7dc:f3e690bd3a03fb@sentry.example.com/777 + +.. _configuration-options-sentry-opts: + +``sentry_opts`` +--------------- + +.. versionadded:: 0.11.0 + +Dictionary of Sentry options to customise the behaviour of traces reports. See +https://docs.sentry.io/platforms/python/configuration/options/ for more +details. By default, ``traces_sample_rate`` is set to 1.0 (100%). + +Example: + +.. code-block:: yaml + + sentry_opts: + traces_sample_rate: 0.5 + sample_rate: 0.1 + max_breadcrumbs: 30 diff --git a/docs/releases/0.11.0.rst b/docs/releases/0.11.0.rst new file mode 100644 index 00000000..31ca8c89 --- /dev/null +++ b/docs/releases/0.11.0.rst @@ -0,0 +1,34 @@ +.. _release-0.11.0: + +============================== +Release 0.11.0 - Codename Rasp +============================== + +Support for Python 3.6 and earlier versions has been dropped, and similarly +ZeroMQ 2 is no longer supported. Please upgrade your environment and +dependencies to continue using *napalm-logs*. + +The Docker image built and published on the Docker Hub, +https://hub.docker.com/r/napalmautomation/napalm-logs is now based on Debian +Buster and Python 3.9. + +New features +------------ + +Added two new optional configuration bits for :ref:`device-profiles`, namely +``state`` and ``state_tag``, that are helpful to process and flag appropriately +messages that have a counter-part, e.g., ``MAJOR_ALARM_CLEARED`` +/ ``MAJOR_ALARM_SET``. + +Enabled support for [Sentry](https://sentry.io/). Simply by adding the Sentry +DSN under the ``sentry_dsn`` configuration key, or as the ``SENTRY_DSN`` +environment variable, *napalm-logs* will send error traces to your Sentry +server. See :ref:`configuration-options-sentry-dsn` for more details. + +New Structured Messages +----------------------- + +The following have been added for Junos: + +- :ref:`INTERFACE_UP` +- :ref:`FAILED_ALLOCATING_PACKET_BUFFER` diff --git a/docs/releases/0.6.0.rst b/docs/releases/0.6.0.rst index 9fae4504..e106c52b 100644 --- a/docs/releases/0.6.0.rst +++ b/docs/releases/0.6.0.rst @@ -16,7 +16,7 @@ The implementation is fully compliant with Prometheus scraping, so you need only point your Prometheus server to the exposed metrics to scrape them. See :ref:`metrics` for further details and configuration options, as well as -the new options: :ref:`onfiguration-options-enable-metrics`, +the new options: :ref:`configuration-options-enable-metrics`, :ref:`configuration-options-metrics-address`, :ref:`configuration-options-metrics-port`, and :ref:`configuration-options-metrics-dir`. diff --git a/docs/releases/index.rst b/docs/releases/index.rst index 6dbe21af..df4989ff 100644 --- a/docs/releases/index.rst +++ b/docs/releases/index.rst @@ -8,11 +8,12 @@ Release Notes Latest Release ^^^^^^^^^^^^^^ -- :ref:`release-0.10.0` +- :ref:`release-0.11.0` Previous Releases ^^^^^^^^^^^^^^^^^ +- :ref:`release-0.10.0` - :ref:`release-0.9.0` - :ref:`release-0.8.0` - :ref:`release-0.7.0` diff --git a/examples/client_auth.py b/examples/client_auth.py index 5ee4d4c3..c38fee26 100644 --- a/examples/client_auth.py +++ b/examples/client_auth.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -''' +""" napalm-logs client, with authentication. Listens to the napalm-logs server started using the following settings: @@ -13,24 +13,24 @@ --transport zmq This client example listens to messages published via ZeroMQ (default transport). -''' +""" import zmq import napalm_logs.utils -server_address = '127.0.0.1' # --publish-address +server_address = "127.0.0.1" # --publish-address server_port = 49017 # --publish-port -auth_address = '127.0.0.1' # --auth-address +auth_address = "127.0.0.1" # --auth-address auth_port = 49018 # --auth-port -certificate = '/var/cache/napalm-logs.crt' # --certificate +certificate = "/var/cache/napalm-logs.crt" # --certificate # Using zmq context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect( - 'tcp://{address}:{port}'.format(address=server_address, port=server_port) + "tcp://{address}:{port}".format(address=server_address, port=server_port) ) -socket.setsockopt(zmq.SUBSCRIBE, '') +socket.setsockopt(zmq.SUBSCRIBE, "") auth = napalm_logs.utils.ClientAuth(certificate, address=auth_address, port=auth_port) diff --git a/examples/client_no_auth.py b/examples/client_no_auth.py index f17cbf48..c07aa052 100644 --- a/examples/client_no_auth.py +++ b/examples/client_no_auth.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -''' +""" napalm-logs client, without authentication. Listens to the napalm-logs server started using the following settings: @@ -10,21 +10,21 @@ --disable-security This client example listens to messages published via ZeroMQ (default transport). -''' +""" import zmq import napalm_logs.utils import json -server_address = '127.0.0.1' # --publish-address +server_address = "127.0.0.1" # --publish-address server_port = 49017 # --publish-port # Using zmq context = zmq.Context() socket = context.socket(zmq.SUB) socket.connect( - 'tcp://{address}:{port}'.format(address=server_address, port=server_port) + "tcp://{address}:{port}".format(address=server_address, port=server_port) ) -socket.setsockopt(zmq.SUBSCRIBE, b'') +socket.setsockopt(zmq.SUBSCRIBE, b"") while True: raw_object = socket.recv() diff --git a/napalm_logs/__init__.py b/napalm_logs/__init__.py index 88096f61..16f6e207 100644 --- a/napalm_logs/__init__.py +++ b/napalm_logs/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs package init -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -13,15 +13,15 @@ # be set before start up. At startup, it doesn't matter # what the value is, and we always overwrite it to either # the poperly defined default or the user propvided value. -os.environ['prometheus_multiproc_dir'] = "/tmp/napalm_logs_metrics" +os.environ["prometheus_multiproc_dir"] = "/tmp/napalm_logs_metrics" # Import napalm-logs pkgs from napalm_logs.base import NapalmLogs # noqa: E402 try: - __version__ = pkg_resources.get_distribution('napalm-logs').version + __version__ = pkg_resources.get_distribution("napalm-logs").version except pkg_resources.DistributionNotFound: __version__ = "Not installed" -__all__ = ('NapalmLogs', '__version__') +__all__ = ("NapalmLogs", "__version__") diff --git a/napalm_logs/auth.py b/napalm_logs/auth.py index db334f54..2737ae13 100644 --- a/napalm_logs/auth.py +++ b/napalm_logs/auth.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Authenticator worker process -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -32,7 +32,7 @@ class NapalmLogsAuthProc(NapalmLogsProc): - ''' + """ Authenticator sub-process class. This process waits for the clients to request the private and signing keys. @@ -54,7 +54,7 @@ class NapalmLogsAuthProc(NapalmLogsProc): | ------- send SGN HEX -------> | | | | <------------ ACK ----------- | - ''' + """ def __init__( self, @@ -75,104 +75,104 @@ def __init__( self.socket = None def _exit_gracefully(self, signum, _): # pylint: disable=unused-argument - ''' + """ Exit gracefully. - ''' + """ self.stop() def _handshake(self, conn, addr): - ''' + """ Ensures that the client receives the AES key. - ''' + """ # waiting for the magic request message msg = conn.recv(len(MAGIC_REQ)) - log.debug('Received message %s from %s', msg, addr) + log.debug("Received message %s from %s", msg, addr) if msg != MAGIC_REQ: - log.warning('%s is not a valid REQ message from %s', msg, addr) + log.warning("%s is not a valid REQ message from %s", msg, addr) return - log.debug('Sending the private key') + log.debug("Sending the private key") conn.send(self.__key) # wait for explicit ACK - log.debug('Waiting for the client to confirm') + log.debug("Waiting for the client to confirm") msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return - log.debug('Sending the signature key') + log.debug("Sending the signature key") conn.send(self.__sgn) # wait for explicit ACK - log.debug('Waiting for the client to confirm') + log.debug("Waiting for the client to confirm") msg = conn.recv(len(MAGIC_ACK)) if msg != MAGIC_ACK: return - log.info('%s is now authenticated', addr) + log.info("%s is now authenticated", addr) self.keep_alive(conn) def keep_alive(self, conn): - ''' + """ Maintains auth sessions - ''' + """ while self.__up: msg = conn.recv(len(AUTH_KEEP_ALIVE)) if msg != AUTH_KEEP_ALIVE: - log.error('Received something other than %s', AUTH_KEEP_ALIVE) + log.error("Received something other than %s", AUTH_KEEP_ALIVE) conn.close() return try: conn.send(AUTH_KEEP_ALIVE_ACK) except (IOError, socket.error) as err: - log.error('Unable to send auth keep alive: %s', err) + log.error("Unable to send auth keep alive: %s", err) conn.close() return def verify_cert(self): - ''' + """ Checks that the provided cert and key are valid and usable - ''' + """ log.debug( - 'Verifying the %s certificate, keyfile: %s', self.certificate, self.keyfile + "Verifying the %s certificate, keyfile: %s", self.certificate, self.keyfile ) try: ssl.create_default_context().load_cert_chain( self.certificate, keyfile=self.keyfile ) except ssl.SSLError: - error_string = 'SSL certificate and key do not match' + error_string = "SSL certificate and key do not match" log.error(error_string) raise SSLMismatchException(error_string) except IOError: - log.error('Unable to open either certificate or key file') + log.error("Unable to open either certificate or key file") raise - log.debug('Certificate looks good.') + log.debug("Certificate looks good.") def _create_skt(self): - ''' + """ Create the authentication socket. - ''' - log.debug('Creating the auth socket') - if ':' in self.auth_address: + """ + log.debug("Creating the auth socket") + if ":" in self.auth_address: self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) else: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.socket.bind((self.auth_address, self.auth_port)) except socket.error as msg: - error_string = 'Unable to bind (auth) to port {} on {}: {}'.format( + error_string = "Unable to bind (auth) to port {} on {}: {}".format( self.auth_port, self.auth_address, msg ) log.error(error_string, exc_info=True) raise BindException(error_string) def start(self): - ''' + """ Listen to auth requests and send the AES key. Each client connection starts a new thread. - ''' + """ # Start suicide polling thread - log.debug('Starting the auth process') + log.debug("Starting the auth process") self.verify_cert() self._create_skt() log.debug( - 'The auth process can receive at most %d parallel connections', + "The auth process can receive at most %d parallel connections", AUTH_MAX_CONN, ) self.socket.listen(AUTH_MAX_CONN) @@ -192,26 +192,26 @@ def start(self): keyfile=self.keyfile, ) except ssl.SSLError: - log.exception('SSL error', exc_info=True) + log.exception("SSL error", exc_info=True) continue except socket.error as error: if self.__up is False: return else: - msg = 'Received auth socket error: {}'.format(error) + msg = "Received auth socket error: {}".format(error) log.error(msg, exc_info=True) raise NapalmLogsExit(msg) - log.info('%s connected', address) - log.debug('Starting the handshake') + log.info("%s connected", address) + log.debug("Starting the handshake") client_thread = threading.Thread( target=self._handshake, args=(wrapped_auth_skt, address) ) client_thread.start() def stop(self): - ''' + """ Stop the auth proc. - ''' - log.info('Stopping auth process') + """ + log.info("Stopping auth process") self.__up = False self.socket.close() diff --git a/napalm_logs/base.py b/napalm_logs/base.py index 7cffff29..7d36f107 100644 --- a/napalm_logs/base.py +++ b/napalm_logs/base.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs base -''' +""" from __future__ import absolute_import # Import std lib @@ -16,6 +16,12 @@ from multiprocessing import Process # Import third party libs +try: + import sentry_sdk + + HAS_SENTRY = True +except ImportError: + HAS_SENTRY = False # crypto import nacl.utils import nacl.secret @@ -45,18 +51,18 @@ class NapalmLogs: def __init__( self, - address='0.0.0.0', + address="0.0.0.0", port=514, - listener='udp', - publisher='zmq', - publish_address='0.0.0.0', + listener="udp", + publisher="zmq", + publish_address="0.0.0.0", publish_port=49017, - auth_address='0.0.0.0', + auth_address="0.0.0.0", auth_port=49018, metrics_enabled=False, - metrics_address='0.0.0.0', - metrics_port='9215', - metrics_dir='/tmp/napalm_logs_metrics', + metrics_address="0.0.0.0", + metrics_port="9215", + metrics_dir="/tmp/napalm_logs_metrics", certificate=None, keyfile=None, disable_security=False, @@ -64,17 +70,17 @@ def __init__( config_dict=None, extension_config_path=None, extension_config_dict=None, - log_level='warning', - log_format='%(asctime)s,%(msecs)03.0f [%(name)-17s][%(levelname)-8s] %(message)s', + log_level="warning", + log_format="%(asctime)s,%(msecs)03.0f [%(name)-17s][%(levelname)-8s] %(message)s", device_blacklist=[], device_whitelist=[], hwm=None, device_worker_processes=1, - serializer='msgpack', + serializer="msgpack", buffer=None, opts=None, ): - ''' + """ Init the napalm-logs engine. :param address: The address to bind the syslog client. Default: 0.0.0.0. @@ -83,7 +89,19 @@ def __init__( :param publish_address: The address to bing when publishing the OC objects. Default: 0.0.0.0. :param publish_port: Publish port. Default: 49017. - ''' + """ + self.opts = opts if opts else {} + sentry_dsn = self.opts.get("sentry_dsn") or os.getenv("SENTRY_DSN") + if sentry_dsn: + if HAS_SENTRY: + sentry_sdk.init( + sentry_dsn, + **self.opts.get("sentry_opts", {"traces_sample_rate": 1.0}) + ) + else: + log.warning( + "Sentry DSN provided, but the sentry_sdk library is not installed" + ) self.address = address self.port = port self.listener = listener @@ -112,7 +130,6 @@ def __init__( self.hwm = hwm self._buffer_cfg = buffer self._buffer = None - self.opts = opts if opts else {} # Setup the environment self._setup_log() self._build_config() @@ -133,20 +150,20 @@ def _exit_gracefully(self, signum, _): def __exit__(self, exc_type, exc_value, exc_traceback): self.stop_engine() if exc_type is not None: - log.error('Exiting due to unhandled exception', exc_info=True) + log.error("Exiting due to unhandled exception", exc_info=True) self.__raise_clean_exception(exc_type, exc_value, exc_traceback) def _setup_buffer(self): - ''' + """ Setup the buffer subsystem. - ''' + """ if not self._buffer_cfg or not isinstance(self._buffer_cfg, dict): return buffer_name = list(self._buffer_cfg.keys())[0] buffer_class = napalm_logs.buffer.get_interface(buffer_name) log.debug('Setting up buffer interface "%s"', buffer_name) - if 'expire_time' not in self._buffer_cfg[buffer_name]: - self._buffer_cfg[buffer_name]['expire_time'] = CONFIG.BUFFER_EXPIRE_TIME + if "expire_time" not in self._buffer_cfg[buffer_name]: + self._buffer_cfg[buffer_name]["expire_time"] = CONFIG.BUFFER_EXPIRE_TIME self._buffer = buffer_class(**self._buffer_cfg[buffer_name]) def _setup_metrics(self): @@ -164,7 +181,7 @@ def _setup_metrics(self): path = self.metrics_dir elif path != self.metrics_dir: path = self.metrics_dir - os.environ['prometheus_multiproc_dir'] = path + os.environ["prometheus_multiproc_dir"] = path log.info("Cleaning metrics collection directory") log.debug("Metrics directory set to: {}".format(path)) files = os.listdir(path) @@ -180,89 +197,89 @@ def _setup_metrics(self): ) def _setup_log(self): - ''' + """ Setup the log object. - ''' + """ logging_level = CONFIG.LOGGING_LEVEL.get(self.log_level.lower()) logging.basicConfig(format=self.log_format, level=logging_level) def _post_preparation(self): - ''' + """ The steps for post-preparation (when the logs, and everything is already setup). - ''' - self.opts['hwm'] = CONFIG.ZMQ_INTERNAL_HWM if self.hwm is None else self.hwm - self.opts['_server_send_unknown'] = False + """ + self.opts["hwm"] = CONFIG.ZMQ_INTERNAL_HWM if self.hwm is None else self.hwm + self.opts["_server_send_unknown"] = False for pub in self.publisher: pub_name = list(pub.keys())[0] pub_opts = list(pub.values())[0] - error_whitelist = pub_opts.get('error_whitelist', []) - error_blacklist = pub_opts.get('error_blacklist', []) - if 'UNKNOWN' not in error_blacklist: + error_whitelist = pub_opts.get("error_whitelist", []) + error_blacklist = pub_opts.get("error_blacklist", []) + if "UNKNOWN" not in error_blacklist: # by default we should not send unknown messages - error_blacklist.append('UNKNOWN') - if 'RAW' not in error_blacklist: + error_blacklist.append("UNKNOWN") + if "RAW" not in error_blacklist: # same with RAW - error_blacklist.append('RAW') + error_blacklist.append("RAW") # This implementation is a bit sub-optimal, but more readable like # that. It is executed only at the init, so just once. - if 'only_unknown' in pub_opts and pub[pub_name]['only_unknown']: - pub[pub_name]['send_unknown'] = True - error_whitelist = ['UNKNOWN'] + if "only_unknown" in pub_opts and pub[pub_name]["only_unknown"]: + pub[pub_name]["send_unknown"] = True + error_whitelist = ["UNKNOWN"] error_blacklist = [] - if 'only_raw' in pub_opts and pub[pub_name]['only_raw']: - pub[pub_name]['send_raw'] = True - error_whitelist = ['RAW'] + if "only_raw" in pub_opts and pub[pub_name]["only_raw"]: + pub[pub_name]["send_raw"] = True + error_whitelist = ["RAW"] error_blacklist = [] - if 'send_unknown' in pub_opts and 'UNKNOWN' in error_blacklist: - error_blacklist.remove('UNKNOWN') - if 'send_raw' in pub_opts and 'RAW' in error_blacklist: - error_blacklist.remove('RAW') - self.opts['_server_send_unknown'] |= ( - 'UNKNOWN' in error_whitelist or 'UNKNOWN' not in error_blacklist + if "send_unknown" in pub_opts and "UNKNOWN" in error_blacklist: + error_blacklist.remove("UNKNOWN") + if "send_raw" in pub_opts and "RAW" in error_blacklist: + error_blacklist.remove("RAW") + self.opts["_server_send_unknown"] |= ( + "UNKNOWN" in error_whitelist or "UNKNOWN" not in error_blacklist ) - pub[pub_name]['error_whitelist'] = error_whitelist - pub[pub_name]['error_blacklist'] = error_blacklist + pub[pub_name]["error_whitelist"] = error_whitelist + pub[pub_name]["error_blacklist"] = error_blacklist def _whitelist_blacklist(self, os_name): - ''' + """ Determines if the OS should be ignored, depending on the whitelist-blacklist logic configured by the user. - ''' - return napalm_logs.ext.check_whitelist_blacklist( + """ + return napalm_logs.utils.check_whitelist_blacklist( os_name, whitelist=self.device_whitelist, blacklist=self.device_blacklist ) @staticmethod def _extract_yaml_docstring(stream): - ''' + """ Extract the comments at the top of the YAML file, from the stream handler. Return the extracted comment as string. - ''' + """ comment_lines = [] lines = stream.read().splitlines() for line in lines: line_strip = line.strip() if not line_strip: continue - if line_strip.startswith('#'): - comment_lines.append(line_strip.replace('#', '', 1).strip()) + if line_strip.startswith("#"): + comment_lines.append(line_strip.replace("#", "", 1).strip()) else: break - return ' '.join(comment_lines) + return " ".join(comment_lines) def _load_config(self, path): - ''' + """ Read the configuration under a specific path and return the object. - ''' + """ config = {} - log.debug('Reading configuration from %s', path) + log.debug("Reading configuration from %s", path) if not os.path.isdir(path): msg = ( - 'Unable to read from {path}: ' 'the directory does not exist!' + "Unable to read from {path}: " "the directory does not exist!" ).format(path=path) log.error(msg) raise IOError(msg) @@ -281,83 +298,83 @@ def _load_config(self, path): # └── init.yml os_subdirs = [sdpath[0] for sdpath in os.walk(path)][1:] if not os_subdirs: - log.error('%s does not contain any OS subdirectories', path) + log.error("%s does not contain any OS subdirectories", path) for os_dir in os_subdirs: os_name = os.path.split(os_dir)[1] # the network OS name - if os_name.startswith('__'): - log.debug('Ignoring %s', os_name) + if os_name.startswith("__"): + log.debug("Ignoring %s", os_name) continue if not self._whitelist_blacklist(os_name): log.debug( - 'Not building config for %s (whitelist-blacklist logic)', os_name + "Not building config for %s (whitelist-blacklist logic)", os_name ) # Ignore devices that are not in the whitelist (if defined), # or those operating systems that are on the blacklist. # This way we can prevent starting unwanted sub-processes. continue - log.debug('Building config for %s:', os_name) - log.debug('=' * 40) + log.debug("Building config for %s:", os_name) + log.debug("=" * 40) if os_name not in config: config[os_name] = {} files = os.listdir(os_dir) # Read all files under the OS dir for file_ in files: - log.debug('Inspecting %s', file_) + log.debug("Inspecting %s", file_) file_name, file_extension = os.path.splitext(file_) - file_extension = file_extension.replace('.', '') + file_extension = file_extension.replace(".", "") filepath = os.path.join(os_dir, file_) - comment = '' - if file_extension in ('yml', 'yaml'): + comment = "" + if file_extension in ("yml", "yaml"): try: - log.debug('Loading %s as YAML', file_) - with open(filepath, 'r') as fstream: + log.debug("Loading %s as YAML", file_) + with open(filepath, "r") as fstream: cfg = yaml.load(fstream, Loader=yaml.FullLoader) # Reposition at the top and read the comments. if file_name not in CONFIG.OS_INIT_FILENAMES: # If the file name is not a profile init. fstream.seek(0) comment = self._extract_yaml_docstring(fstream) - if 'messages' in cfg: - for message in cfg['messages']: - message['__doc__'] = comment + if "messages" in cfg: + for message in cfg["messages"]: + message["__doc__"] = comment napalm_logs.utils.dictupdate(config[os_name], cfg) except yaml.YAMLError as yamlexc: - log.error('Invalid YAML file: %s', filepath, exc_info=True) + log.error("Invalid YAML file: %s", filepath, exc_info=True) if file_name in CONFIG.OS_INIT_FILENAMES: # Raise exception and break only when the init file is borked # otherwise, it will try loading best efforts. raise IOError(yamlexc) - elif file_extension == 'py': - log.debug('Lazy loading Python module %s', file_) + elif file_extension == "py": + log.debug("Lazy loading Python module %s", file_) mod_fp, mod_file, mod_data = imp.find_module(file_name, [os_dir]) mod = imp.load_module(file_name, mod_fp, mod_file, mod_data) if file_name in CONFIG.OS_INIT_FILENAMES: # Init file defined as Python module - log.debug('%s seems to be a Python profiler', filepath) + log.debug("%s seems to be a Python profiler", filepath) # Init files require to define the `extract` function. # Sample init file: # def extract(message): # return {'tag': 'A_TAG', 'host': 'hostname'} if hasattr(mod, CONFIG.INIT_RUN_FUN) and hasattr( - getattr(mod, CONFIG.INIT_RUN_FUN), '__call__' + getattr(mod, CONFIG.INIT_RUN_FUN), "__call__" ): # if extract is defined and is callable - if 'prefixes' not in config[os_name]: - config[os_name]['prefixes'] = [] - config[os_name]['prefixes'].append( + if "prefixes" not in config[os_name]: + config[os_name]["prefixes"] = [] + config[os_name]["prefixes"].append( { - 'values': {'tag': ''}, - 'line': '', - '__python_fun__': getattr(mod, CONFIG.INIT_RUN_FUN), - '__python_mod__': filepath, # Will be used for debugging + "values": {"tag": ""}, + "line": "", + "__python_fun__": getattr(mod, CONFIG.INIT_RUN_FUN), + "__python_mod__": filepath, # Will be used for debugging } ) log.info( - 'Adding the prefix function defined under %s to %s', + "Adding the prefix function defined under %s to %s", filepath, os_name, ) - elif file_name != '__init__': + elif file_name != "__init__": # If __init__.py does not have the extractor function, no problem. log.warning( '%s does not have the "%s" function defined. Ignoring.', @@ -366,59 +383,59 @@ def _load_config(self, path): ) else: # Other python files require the `emit` function. - if hasattr(mod, '__tag__'): - mod_tag = getattr(mod, '__tag__') + if hasattr(mod, "__tag__"): + mod_tag = getattr(mod, "__tag__") else: log.info( - '%s does not have __tag__, defaulting the tag to %s', + "%s does not have __tag__, defaulting the tag to %s", filepath, file_name, ) mod_tag = file_name - if hasattr(mod, '__error__'): - mod_err = getattr(mod, '__error__') + if hasattr(mod, "__error__"): + mod_err = getattr(mod, "__error__") else: log.info( - '%s does not have __error__, defaulting the error to %s', + "%s does not have __error__, defaulting the error to %s", filepath, file_name, ) mod_err = file_name - if hasattr(mod, '__match_on__'): - err_match = getattr(mod, '__match_on__') + if hasattr(mod, "__match_on__"): + err_match = getattr(mod, "__match_on__") else: - err_match = 'tag' + err_match = "tag" model = CONFIG.OPEN_CONFIG_NO_MODEL - if hasattr(mod, '__yang_model__'): - model = getattr(mod, '__yang_model__') - log.debug('Mathing on %s', err_match) + if hasattr(mod, "__yang_model__"): + model = getattr(mod, "__yang_model__") + log.debug("Mathing on %s", err_match) if hasattr(mod, CONFIG.CONFIG_RUN_FUN) and hasattr( - getattr(mod, CONFIG.CONFIG_RUN_FUN), '__call__' + getattr(mod, CONFIG.CONFIG_RUN_FUN), "__call__" ): log.debug( - 'Adding %s with tag:%s, error:%s, matching on:%s', + "Adding %s with tag:%s, error:%s, matching on:%s", file_, mod_tag, mod_err, err_match, ) # the structure below must correspond to the VALID_CONFIG structure enforcement - if 'messages' not in config[os_name]: - config[os_name]['messages'] = [] - config[os_name]['messages'].append( + if "messages" not in config[os_name]: + config[os_name]["messages"] = [] + config[os_name]["messages"].append( { - 'tag': mod_tag, - 'error': mod_err, - 'match_on': err_match, - '__doc__': mod.__doc__, - '__python_fun__': getattr( + "tag": mod_tag, + "error": mod_err, + "match_on": err_match, + "__doc__": mod.__doc__, + "__python_fun__": getattr( mod, CONFIG.CONFIG_RUN_FUN ), - '__python_mod__': filepath, # Will be used for debugging - 'line': '', - 'model': model, - 'values': {}, - 'mapping': {'variables': {}, 'static': {}}, + "__python_mod__": filepath, # Will be used for debugging + "line": "", + "model": model, + "values": {}, + "mapping": {"variables": {}, "static": {}}, } ) else: @@ -428,17 +445,17 @@ def _load_config(self, path): CONFIG.CONFIG_RUN_FUN, ) else: - log.info('Ignoring %s (extension not allowed)', filepath) - log.debug('-' * 40) + log.info("Ignoring %s (extension not allowed)", filepath) + log.debug("-" * 40) if not config: - msg = 'Could not find proper configuration files under {path}'.format( + msg = "Could not find proper configuration files under {path}".format( path=path ) log.error(msg) raise IOError(msg) - log.debug('Complete config:') + log.debug("Complete config:") log.debug(config) - log.debug('ConfigParserg size in bytes: %d', sys.getsizeof(config)) + log.debug("ConfigParserg size in bytes: %d", sys.getsizeof(config)) return config @staticmethod @@ -448,21 +465,23 @@ def _raise_config_exception(error_string): def _compare_values(self, value, config, dev_os, key_path): if ( - 'line' not in value - or 'values' not in value - or '__python_fun__' not in value + "line" not in value + or "values" not in value + or "__python_fun__" not in value ): # Check looks good when using a Python-defined profile. return - from_line = re.findall(r'\{(\w+)\}', config['line']) - if set(from_line) == set(config['values']): + from_line = re.findall(r"\{(\w+)\}", config["line"]) + if set(from_line) == set(config["values"]): return - if config.get('error'): - error = 'The "values" do not match variables in "line" for {}:{} in {}'.format( - ':'.join(key_path), config.get('error'), dev_os + if config.get("error"): + error = ( + 'The "values" do not match variables in "line" for {}:{} in {}'.format( + ":".join(key_path), config.get("error"), dev_os + ) ) else: error = 'The "values" do not match variables in "line" for {} in {}'.format( - ':'.join(key_path), dev_os + ":".join(key_path), dev_os ) self._raise_config_exception(error) @@ -470,20 +489,20 @@ def _verify_config_key(self, key, value, valid, config, dev_os, key_path): key_path.append(key) if config.get(key, False) is False: self._raise_config_exception( - 'Unable to find key "{}" for {}'.format(':'.join(key_path), dev_os) + 'Unable to find key "{}" for {}'.format(":".join(key_path), dev_os) ) if isinstance(value, type): if not isinstance(config[key], value): self._raise_config_exception( 'Key "{}" for {} should be {}'.format( - ':'.join(key_path), dev_os, value + ":".join(key_path), dev_os, value ) ) elif isinstance(value, dict): if not isinstance(config[key], dict): self._raise_config_exception( 'Key "{}" for {} should be of type '.format( - ':'.join(key_path), dev_os + ":".join(key_path), dev_os ) ) self._verify_config_dict(value, config[key], dev_os, key_path) @@ -494,7 +513,7 @@ def _verify_config_key(self, key, value, valid, config, dev_os, key_path): if not isinstance(config[key], list): self._raise_config_exception( 'Key "{}" for {} should be of type '.format( - ':'.join(key_path), dev_os + ":".join(key_path), dev_os ) ) for item in config[key]: @@ -503,41 +522,41 @@ def _verify_config_key(self, key, value, valid, config, dev_os, key_path): key_path.remove(key) def _verify_config_dict(self, valid, config, dev_os, key_path=None): - ''' + """ Verify if the config dict is valid. - ''' + """ if not key_path: key_path = [] for key, value in valid.items(): self._verify_config_key(key, value, valid, config, dev_os, key_path) def _verify_config(self): - ''' + """ Verify that the config is correct - ''' + """ if not self.config_dict: - self._raise_config_exception('No config found') + self._raise_config_exception("No config found") # Check for device conifg, if there isn't anything then just log, do not raise an exception for dev_os, dev_config in self.config_dict.items(): if not dev_config: - log.warning('No config found for %s', dev_os) + log.warning("No config found for %s", dev_os) continue # Compare the valid opts with the conifg self._verify_config_dict(CONFIG.VALID_CONFIG, dev_config, dev_os) - log.debug('Read the config without error') + log.debug("Read the config without error") def _build_config(self): - ''' + """ Build the config of the napalm syslog parser. - ''' + """ if not self.config_dict: if not self.config_path: # No custom config path requested # Read the native config files self.config_path = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'config' + os.path.dirname(os.path.realpath(__file__)), "config" ) - log.info('Reading the configuration from %s', self.config_path) + log.info("Reading the configuration from %s", self.config_path) self.config_dict = self._load_config(self.config_path) if ( not self.extension_config_dict @@ -548,7 +567,7 @@ def _build_config(self): # When extension config is not sent as dict # But `extension_config_path` is specified log.info( - 'Reading extension configuration from %s', self.extension_config_path + "Reading extension configuration from %s", self.extension_config_path ) self.extension_config_dict = self._load_config(self.extension_config_path) if self.extension_config_dict: @@ -557,13 +576,13 @@ def _build_config(self): ) # deep merge def _start_auth_proc(self): - ''' + """ Start the authenticator process. - ''' - log.debug('Computing the signing key hex') + """ + log.debug("Computing the signing key hex") verify_key = self.__signing_key.verify_key sgn_verify_hex = verify_key.encode(encoder=nacl.encoding.HexEncoder) - log.debug('Starting the authenticator subprocess') + log.debug("Starting the authenticator subprocess") auth = NapalmLogsAuthProc( self.certificate, self.keyfile, @@ -574,15 +593,15 @@ def _start_auth_proc(self): ) proc = Process(target=auth.start) proc.start() - proc.description = 'Auth process' - log.debug('Started auth process as %s with PID %s', proc._name, proc.pid) + proc.description = "Auth process" + log.debug("Started auth process as %s with PID %s", proc._name, proc.pid) return proc def _start_lst_proc(self, listener_type, listener_opts): - ''' + """ Start the listener process. - ''' - log.debug('Starting the listener process for %s', listener_type) + """ + log.debug("Starting the listener process for %s", listener_type) listener = NapalmLogsListenerProc( self.opts, self.address, @@ -592,39 +611,38 @@ def _start_lst_proc(self, listener_type, listener_opts): ) proc = Process(target=listener.start) proc.start() - proc.description = 'Listener process' - log.debug('Started listener process as %s with PID %s', proc._name, proc.pid) + proc.description = "Listener process" + log.debug("Started listener process as %s with PID %s", proc._name, proc.pid) return proc def _start_srv_proc(self, started_os_proc): - ''' + """ Start the server process. - ''' - log.debug('Starting the server process') + """ + log.debug("Starting the server process") server = NapalmLogsServerProc( self.opts, self.config_dict, started_os_proc, buffer=self._buffer ) proc = Process(target=server.start) proc.start() - proc.description = 'Server process' - log.debug('Started server process as %s with PID %s', proc._name, proc.pid) + proc.description = "Server process" + log.debug("Started server process as %s with PID %s", proc._name, proc.pid) return proc def _start_pub_px_proc(self): - ''' - ''' - px = NapalmLogsPublisherProxy(self.opts['hwm']) + """ """ + px = NapalmLogsPublisherProxy(self.opts["hwm"]) proc = Process(target=px.start) proc.start() - proc.description = 'Publisher proxy process' - log.debug('Started pub proxy as %s with PID %s', proc._name, proc.pid) + proc.description = "Publisher proxy process" + log.debug("Started pub proxy as %s with PID %s", proc._name, proc.pid) return proc def _start_pub_proc(self, publisher_type, publisher_opts, pub_id): - ''' + """ Start the publisher process. - ''' - log.debug('Starting the publisher process for %s', publisher_type) + """ + log.debug("Starting the publisher process for %s", publisher_type) publisher = NapalmLogsPublisherProc( self.opts, self.publish_address, @@ -639,21 +657,21 @@ def _start_pub_proc(self, publisher_type, publisher_opts, pub_id): ) proc = Process(target=publisher.start) proc.start() - proc.description = 'Publisher process' - log.debug('Started publisher process as %s with PID %s', proc._name, proc.pid) + proc.description = "Publisher process" + log.debug("Started publisher process as %s with PID %s", proc._name, proc.pid) return proc def _start_dev_proc(self, device_os, device_config): - ''' + """ Start the device worker process. - ''' - log.info('Starting the child process for %s', device_os) + """ + log.info("Starting the child process for %s", device_os) dos = NapalmLogsDeviceProc(device_os, self.opts, device_config) os_proc = Process(target=dos.start) os_proc.start() - os_proc.description = '%s device process' % device_os + os_proc.description = "%s device process" % device_os log.debug( - 'Started process %s for %s, having PID %s', + "Started process %s for %s, having PID %s", os_proc._name, device_os, os_proc.pid, @@ -661,21 +679,21 @@ def _start_dev_proc(self, device_os, device_config): return os_proc def start_engine(self): - ''' + """ Start the child processes (one per device OS) - ''' + """ if self.disable_security is True: log.warning( - '***Not starting the authenticator process due to disable_security being set to True***' + "***Not starting the authenticator process due to disable_security being set to True***" ) else: - log.debug('Generating the private key') + log.debug("Generating the private key") self.__priv_key = nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE) - log.debug('Generating the signing key') + log.debug("Generating the signing key") self.__signing_key = nacl.signing.SigningKey.generate() # start the keepalive thread for the auth sub-process self._processes.append(self._start_auth_proc()) - log.debug('Starting the internal proxy') + log.debug("Starting the internal proxy") proc = self._start_pub_px_proc() self._processes.append(proc) # publisher process start @@ -686,19 +704,19 @@ def start_engine(self): self._processes.append(proc) pub_id += 1 # device process start - log.info('Starting child processes for each device type') + log.info("Starting child processes for each device type") started_os_proc = [] for device_os, device_config in self.config_dict.items(): if not self._whitelist_blacklist(device_os): log.debug( - 'Not starting process for %s (whitelist-blacklist logic)', device_os + "Not starting process for %s (whitelist-blacklist logic)", device_os ) # Ignore devices that are not in the whitelist (if defined), # or those operating systems that are on the blacklist. # This way we can prevent starting unwanted sub-processes. continue log.debug( - 'Will start %d worker process(es) for %s', + "Will start %d worker process(es) for %s", self.device_worker_processes, device_os, ) @@ -716,22 +734,22 @@ def start_engine(self): thread.start() def _check_children(self): - ''' + """ Check all of the child processes are still running - ''' + """ while self.up: time.sleep(1) for process in self._processes: if process.is_alive() is True: continue log.debug( - '%s is dead. Stopping the napalm-logs engine.', process.description + "%s is dead. Stopping the napalm-logs engine.", process.description ) self.stop_engine() def stop_engine(self): self.up = False - log.info('Shutting down the engine') + log.info("Shutting down the engine") # Set SIGTERM to all child processes, then join them for proc in self._processes: proc.terminate() diff --git a/napalm_logs/buffer/__init__.py b/napalm_logs/buffer/__init__.py index 953cfbe6..ac6a91f9 100644 --- a/napalm_logs/buffer/__init__.py +++ b/napalm_logs/buffer/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs pluggable buffer interface. -''' +""" from __future__ import absolute_import, unicode_literals # Import python std lib @@ -18,24 +18,24 @@ log = logging.getLogger(__file__) BUFFER_LOOKUP = { - 'mem': MemoryBuffer, - 'memory': MemoryBuffer, - 'cache': MemoryBuffer, - 'redis': RedisBuffer, + "mem": MemoryBuffer, + "memory": MemoryBuffer, + "cache": MemoryBuffer, + "redis": RedisBuffer, } def get_interface(name): - ''' + """ Return the serialize function. - ''' + """ try: - log.debug('Using %s as buffer interface', name) + log.debug("Using %s as buffer interface", name) return BUFFER_LOOKUP[name] except KeyError: - msg = 'Buffer interface {} is not available'.format(name) + msg = "Buffer interface {} is not available".format(name) log.error(msg, exc_info=True) raise InvalidBufferException(msg) -__all__ = ('get_interface',) +__all__ = ("get_interface",) diff --git a/napalm_logs/buffer/memory.py b/napalm_logs/buffer/memory.py index 4d646923..38f4847f 100644 --- a/napalm_logs/buffer/memory.py +++ b/napalm_logs/buffer/memory.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" In-memory buffer interface. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -13,9 +13,9 @@ class MemoryBuffer(object): - ''' + """ Memory buffer class. - ''' + """ def __init__(self, expire_time, **kwargs): self.expire_time = expire_time @@ -24,7 +24,7 @@ def __init__(self, expire_time, **kwargs): self._cache = {} def __setitem__(self, key, val): - self._cache[key] = {'data': val, 'timestamp': datetime.datetime.utcnow()} + self._cache[key] = {"data": val, "timestamp": datetime.datetime.utcnow()} def __contains__(self, key): return True if key in self._cache else False @@ -34,8 +34,8 @@ def __getitem__(self, key): item = self._cache[key] except KeyError: return None - if datetime.datetime.utcnow() - item['timestamp'] < self.expire_time_delta: - return item['data'] + if datetime.datetime.utcnow() - item["timestamp"] < self.expire_time_delta: + return item["data"] else: del self._cache[key] return None diff --git a/napalm_logs/buffer/redisbuf.py b/napalm_logs/buffer/redisbuf.py index 024d1f10..ad423b4e 100644 --- a/napalm_logs/buffer/redisbuf.py +++ b/napalm_logs/buffer/redisbuf.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Redis buffer interface. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -20,20 +20,20 @@ class RedisBuffer(object): - ''' + """ Memory buffer class. - ''' + """ def __init__(self, expire_time, **kwargs): self.expire_time = expire_time # expire_time is assumed to be in seconds - self._key_prefix = kwargs.pop('key_prefix', '') - self._keys_set_name = kwargs.pop('keys_set_name', '__napalm_logs_keys_set') + self._key_prefix = kwargs.pop("key_prefix", "") + self._keys_set_name = kwargs.pop("keys_set_name", "__napalm_logs_keys_set") self._redis = redis.StrictRedis(**kwargs) self._redis_pipeline = self._redis.pipeline() def __setitem__(self, key, val): - key = '{prefix}{key}'.format(prefix=self._key_prefix, key=key) + key = "{prefix}{key}".format(prefix=self._key_prefix, key=key) self._redis_pipeline.set(key, val, ex=self.expire_time, nx=True) self._redis_pipeline.sadd(self._keys_set_name, key) self._redis_pipeline.execute() @@ -42,7 +42,7 @@ def __contains__(self, key): return True if key in self else False def __getitem__(self, key): - key = '{prefix}{key}'.format(prefix=self._key_prefix, key=key) + key = "{prefix}{key}".format(prefix=self._key_prefix, key=key) val = self._redis.get(key) if val is None: self._redis.srem(self._keys_set_name, key) diff --git a/napalm_logs/config/__init__.py b/napalm_logs/config/__init__.py index 6be10002..67ca7f2d 100644 --- a/napalm_logs/config/__init__.py +++ b/napalm_logs/config/__init__.py @@ -1,78 +1,77 @@ # -*- coding: utf-8 -*- -''' +""" Config defaults. -''' +""" from __future__ import absolute_import import os import tempfile import logging -import napalm_logs.ext.six as six # config -ROOT_DIR = '/' -CONFIG_FILE = os.path.join(ROOT_DIR, 'etc', 'napalm', 'logs') -ADDRESS = '0.0.0.0' +ROOT_DIR = "/" +CONFIG_FILE = os.path.join(ROOT_DIR, "etc", "napalm", "logs") +ADDRESS = "0.0.0.0" PORT = 514 -LISTENER = 'udp' +LISTENER = "udp" LOGGER = None -PUBLISHER = 'zmq' +PUBLISHER = "zmq" MAX_TCP_CLIENTS = 5 -PUBLISH_ADDRESS = '0.0.0.0' +PUBLISH_ADDRESS = "0.0.0.0" PUBLISH_PORT = 49017 -AUTH_ADDRESS = '0.0.0.0' +AUTH_ADDRESS = "0.0.0.0" AUTH_PORT = 49018 AUTH_MAX_TRY = 5 AUTH_TIMEOUT = 1 -SERIALIZER = 'msgpack' -LOG_LEVEL = 'warning' -LOG_FORMAT = '%(asctime)s,%(msecs)03.0f [%(name)-17s][%(levelname)-8s] %(message)s' -LOG_FILE = os.path.join(ROOT_DIR, 'var', 'log', 'napalm', 'logs') -LOG_FILE_CLI_OPTIONS = ('cli', 'screen') +SERIALIZER = "msgpack" +LOG_LEVEL = "warning" +LOG_FORMAT = "%(asctime)s,%(msecs)03.0f [%(name)-17s][%(levelname)-8s] %(message)s" +LOG_FILE = os.path.join(ROOT_DIR, "var", "log", "napalm", "logs") +LOG_FILE_CLI_OPTIONS = ("cli", "screen") ZMQ_INTERNAL_HWM = 1000 -METRICS_ADDRESS = '0.0.0.0' +METRICS_ADDRESS = "0.0.0.0" METRICS_PORT = 9443 METRICS_DIR = "/tmp/napalm_logs_metrics" BUFFER_EXPIRE_TIME = 5 # Allowed names for the init files. -OS_INIT_FILENAMES = ('__init__', 'init', 'index') +OS_INIT_FILENAMES = ("__init__", "init", "index") # The name of the function to be invoked when extracting the parts from the # raw syslog message. -INIT_RUN_FUN = 'extract' +INIT_RUN_FUN = "extract" # The name of the function to be invoked when the OpenConfig / IETF object # is generated. -CONFIG_RUN_FUN = 'emit' +CONFIG_RUN_FUN = "emit" -UNKNOWN_DEVICE_NAME = 'unknown' +UNKNOWN_DEVICE_NAME = "unknown" LISTENER_OPTS = {} -LOGGER_OPTS = {'send_raw': False, 'send_unknown': False} +LOGGER_OPTS = {"send_raw": False, "send_unknown": False} -PUBLISHER_OPTS = {'send_raw': False, 'send_unknown': False} +PUBLISHER_OPTS = {"send_raw": False, "send_unknown": False} LOGGING_LEVEL = { - 'debug': logging.DEBUG, - 'info': logging.INFO, - 'warning': logging.WARNING, - 'error': logging.ERROR, - 'critical': logging.CRITICAL, + "debug": logging.DEBUG, + "info": logging.INFO, + "warning": logging.WARNING, + "error": logging.ERROR, + "critical": logging.CRITICAL, } VALID_CONFIG = { - 'prefixes': [{'values': {'tag': six.string_type}, 'line': six.string_type}], - 'messages': [ + "prefixes": [{"values": {"tag": str}, "line": str}], + "messages": [ { # 'error' should be unique and vendor agnostic. # Currently we are using the JUNOS syslog message name as the canonical name. # This may change if we are able to find a more well defined naming system. - 'error': six.string_type, - 'tag': six.string_type, - 'values': dict, - 'line': six.string_type, - 'model': six.string_type, - 'mapping': {'variables': dict, 'static': dict}, + "error": str, + "tag": str, + "values": dict, + "line": str, + "model": str, + "mapping": {"variables": dict, "static": dict}, } ], } @@ -83,34 +82,34 @@ TIMEOUT = 60 # device -DEFAULT_DELIM = '//' +DEFAULT_DELIM = "//" # proc -PROC_DEAD_FLAGS = ('T', 'X', 'Z') +PROC_DEAD_FLAGS = ("T", "X", "Z") # zmq proxies TMP_DIR = tempfile.gettempdir() -AUTH_IPC_URL = 'ipc://{}'.format(os.path.join(TMP_DIR, 'napalm-logs-auth')) +AUTH_IPC_URL = "ipc://{}".format(os.path.join(TMP_DIR, "napalm-logs-auth")) # the auth proxy is not used yet, TODO -LST_IPC_URL = 'ipc://{}'.format(os.path.join(TMP_DIR, 'napalm-logs-lst')) -SRV_IPC_URL = 'ipc://{}'.format(os.path.join(TMP_DIR, 'napalm-logs-srv')) +LST_IPC_URL = "ipc://{}".format(os.path.join(TMP_DIR, "napalm-logs-lst")) +SRV_IPC_URL = "ipc://{}".format(os.path.join(TMP_DIR, "napalm-logs-srv")) # the publisher IPC is used as proxy # the devices send the messages to the proxy # and the publisher subscribes to the proxy and # publishes them on the desired transport -DEV_IPC_URL = 'ipc://{}'.format(os.path.join(TMP_DIR, 'napalm-logs-dev')) +DEV_IPC_URL = "ipc://{}".format(os.path.join(TMP_DIR, "napalm-logs-dev")) # the server publishes to a separate IPC per device -PUB_PX_IPC_URL = 'ipc://{}'.format(os.path.join(TMP_DIR, 'napalm-logs-pub-px')) -PUB_IPC_URL = 'ipc://{}'.format(os.path.join(TMP_DIR, 'napalm-logs-pub')) +PUB_PX_IPC_URL = "ipc://{}".format(os.path.join(TMP_DIR, "napalm-logs-pub-px")) +PUB_IPC_URL = "ipc://{}".format(os.path.join(TMP_DIR, "napalm-logs-pub")) # auth -AUTH_KEEP_ALIVE = b'KEEPALIVE' -AUTH_KEEP_ALIVE_ACK = b'KEEPALIVEACK' +AUTH_KEEP_ALIVE = b"KEEPALIVE" +AUTH_KEEP_ALIVE_ACK = b"KEEPALIVEACK" AUTH_KEEP_ALIVE_INTERVAL = 10 AUTH_MAX_CONN = 5 AUTH_TIMEOUT = 5 -MAGIC_ACK = b'ACK' -MAGIC_REQ = b'INIT' -AUTH_CIPHER = 'ECDHE-RSA-AES256-GCM-SHA384' +MAGIC_ACK = b"ACK" +MAGIC_REQ = b"INIT" +AUTH_CIPHER = "ECDHE-RSA-AES256-GCM-SHA384" -OPEN_CONFIG_NO_MODEL = 'NO_MODEL' +OPEN_CONFIG_NO_MODEL = "NO_MODEL" diff --git a/napalm_logs/config/eos/MAINTENANCE_MODE.yml b/napalm_logs/config/eos/MAINTENANCE_MODE.yml new file mode 100644 index 00000000..d8e5940f --- /dev/null +++ b/napalm_logs/config/eos/MAINTENANCE_MODE.yml @@ -0,0 +1,23 @@ +messages: + + - error: MAINTENANCE_MODE_STARTED + tag: MMODE-5-MAINT_UNIT_STATE_CHANGE + state: 1 + values: {} + line: ': Maintenance unit state changed for unit MAINT-UNIT. Old State maintenanceModeEnter, New State underMaintenance' + model: ietf-notification-messages + mapping: + variables: {} + static: + notification-messages//notification-message//message-text: "maintenance started" + + - error: MAINTENANCE_MODE_ENDED + tag: MMODE-5-MAINT_UNIT_STATE_CHANGE + state: 0 + values: {} + line: ': Maintenance unit state changed for unit MAINT-UNIT. Old State maintenanceModeExit, New State active' + model: ietf-notification-messages + mapping: + variables: {} + static: + notification-messages//notification-message//message-text: "maintenance ended" diff --git a/napalm_logs/config/eos/PROCESS_RESTART.yml b/napalm_logs/config/eos/PROCESS_RESTART.yml new file mode 100644 index 00000000..4eb6db61 --- /dev/null +++ b/napalm_logs/config/eos/PROCESS_RESTART.yml @@ -0,0 +1,17 @@ +# This message is sent when an agent process is restarting +#Jan 24 02:50:31 HOSTNAME ProcMgr-worker: %PROCMGR-6-PROCESS_RESTART: Restarting 'Bgp' immediately (it had PID=32058) +messages: + # 'error' should be unique and vendor agnostic. + - error: PROCESS_RESTART + tag: PROCMGR-6-PROCESS_RESTART + values: + agent: ([\w-]+) + pid|int: (\d+) + line: ": Restarting '{agent}' immediately (it had PID={pid})" + model: NO_MODEL + mapping: + variables: + system//processes//process//name: agent + system//processes//process//pid: pid + static: + system//processes//process//state: restarting diff --git a/napalm_logs/config/eos/PROCESS_STARTED.yml b/napalm_logs/config/eos/PROCESS_STARTED.yml new file mode 100644 index 00000000..b04a45fe --- /dev/null +++ b/napalm_logs/config/eos/PROCESS_STARTED.yml @@ -0,0 +1,18 @@ +# This message is sent when an agent process starts +#Jan 24 02:50:31 HOSTNAME ProcMgr-worker: %PROCMGR-6-PROCESS_STARTED: 'Bgp' starting with PID=6186 (PPID=2030) -- execing '/usr/bin/Bgp' +messages: + # 'error' should be unique and vendor agnostic. + - error: PROCESS_STARTED + tag: PROCMGR-6-PROCESS_STARTED + values: + agent: ([\w-]+) + pid|int: (\d+) + line: ": '{agent}' starting with PID={pid} (PPID=2030) -- execing '/usr/bin/Bgp'" + model: NO_MODEL + mapping: + variables: + system//processes//process//name: agent + system//processes//process//pid: pid + static: + system//processes//process//state: started + system//processes//process//uptime: 0 diff --git a/napalm_logs/config/eos/PROCESS_TERMINATED.yml b/napalm_logs/config/eos/PROCESS_TERMINATED.yml new file mode 100644 index 00000000..ce7cccab --- /dev/null +++ b/napalm_logs/config/eos/PROCESS_TERMINATED.yml @@ -0,0 +1,17 @@ +# This message is sent when an agent process is terminated +#Jan 24 02:50:31 HOSTNAME ProcMgr-worker: %PROCMGR-6-PROCESS_TERMINATED: 'Bgp' (PID=32058, status=9) has terminated. +messages: + # 'error' should be unique and vendor agnostic. + - error: PROCESS_TERMINATED + tag: PROCMGR-6-PROCESS_TERMINATED + values: + agent: ([\w-]+) + pid|int: (\d+) + line: ": '{agent}' (PID={pid}, status=9) has terminated." + model: NO_MODEL + mapping: + variables: + system//processes//process//name: agent + system//processes//process//pid: pid + static: + system//processes//process//state: terminated diff --git a/napalm_logs/config/eos/init.yml b/napalm_logs/config/eos/init.yml index 5c1e9b6f..6f886956 100644 --- a/napalm_logs/config/eos/init.yml +++ b/napalm_logs/config/eos/init.yml @@ -9,7 +9,7 @@ prefixes: date: (\w+ +\d+) time: (\d\d:\d\d:\d\d) host: ([^ ]+) - processName: (\w+) + processName: ([\w-]+) tag: ([\w-]+) line: '{date} {time} {host} {processName}: %{tag}' # ISO8601 date-time format @@ -17,6 +17,6 @@ prefixes: date: (\d{4}-\d{2}-\d{2}) time: (\d{2}:\d{2}:\d{2}[\.\d{3}]?[\+|-]\d{2}:\d{2}) host: ([^ ]+) - processName: (\w+) + processName: ([\w-]+) tag: ([\w-]+) line: '{date}T{time} {host} {processName}: %{tag}' diff --git a/napalm_logs/config/junos/FAILED_ALLOCATING_PACKET_BUFFER.yml b/napalm_logs/config/junos/FAILED_ALLOCATING_PACKET_BUFFER.yml new file mode 100644 index 00000000..080fcba6 --- /dev/null +++ b/napalm_logs/config/junos/FAILED_ALLOCATING_PACKET_BUFFER.yml @@ -0,0 +1,9 @@ +messages: + - error: FAILED_ALLOCATING_PACKET_BUFFER + tag: fpc0 + values: {} + line: '(buf alloc) failed allocating packet buffer' + model: NO_MODEL + mapping: + static: {} + variables: {} diff --git a/napalm_logs/config/junos/INTERFACE_UP.yml b/napalm_logs/config/junos/INTERFACE_UP.yml new file mode 100644 index 00000000..0d883201 --- /dev/null +++ b/napalm_logs/config/junos/INTERFACE_UP.yml @@ -0,0 +1,19 @@ +messages: + # 'error' should be unique and vendor agnostic. Currently we are using the JUNOS syslog message name as the canonical name. + # This may change if we are able to find a more well defined naming system. + - error: INTERFACE_UP + tag: SNMP_TRAP_LINK_UP + values: + snmpID: (\d+) + adminStatusString|upper: (\w+) + adminStatusValue: (\d) + operStatusString|upper: (\w+) + operStatusValue: (\d) + interface: ([\w\-\/\:]+) + line: 'ifIndex {snmpID}, ifAdminStatus {adminStatusString}({adminStatusValue}), ifOperStatus {operStatusString}({operStatusValue}), ifName {interface}' + model: openconfig-interfaces + mapping: + variables: + interfaces//interface//{interface}//state//admin_status: adminStatusString + interfaces//interface//{interface}//state//oper_status: operStatusString + static: {} diff --git a/napalm_logs/config/junos/init.yml b/napalm_logs/config/junos/init.yml index bb2aa24a..1b3d6300 100644 --- a/napalm_logs/config/junos/init.yml +++ b/napalm_logs/config/junos/init.yml @@ -58,6 +58,14 @@ prefixes: # Some logs have data which can be inside brackets or parenthesis additionalData: (?:(?:\[|\()(.+)(?:\]|\)))? line: '{date} {time} {hostPrefix}{host} fpc{fpcId} fpc{fpcId2} dcpfe: {tag}{additionalData}:' + - time_format: "%b %d %H:%M:%S" + values: + date: (\w+\s+\d+) + time: (\d\d:\d\d:\d\d) + hostPrefix: (re\d.)? + host: ([^ ]+) + tag: (fpc\d) + line: '{date} {time} {hostPrefix}{host} {tag}' # Same structures as above, but to match the ISO8601 date-time format. # jlaunchd doesn't have proper tags. Override the match to use jlaunchd as the tag instead. @@ -114,3 +122,11 @@ prefixes: # Some logs have data which can be inside brackets or parenthesis additionalData: (?:(?:\[|\()(.+)(?:\]|\)))? line: '{date}T{time} {hostPrefix}{host} fpc{fpcId} fpc{fpcId2} dcpfe: {tag}{additionalData}:' + - time_format: "%b %d %H:%M:%S" + values: + date: (\d{4}-\d{2}-\d{2}) + time: (\d{2}:\d{2}:\d{2}[\.\d{3}]?[\+|-]\d{2}:\d{2}) + hostPrefix: (re\d.)? + host: ([^ ]+) + tag: (fpc\d) + line: '{date}T{time} {hostPrefix}{host} {tag}' diff --git a/napalm_logs/config/nxos/USER_LOGIN.py b/napalm_logs/config/nxos/USER_LOGIN.py index 17495036..11e9c959 100644 --- a/napalm_logs/config/nxos/USER_LOGIN.py +++ b/napalm_logs/config/nxos/USER_LOGIN.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -''' +""" Match messages AUTHPRIV-6-SYSTEM_MSG from NX-OS. Message example: @@ -24,7 +24,7 @@ } } } -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -34,25 +34,25 @@ import napalm_logs.utils from napalm_logs.config import OPEN_CONFIG_NO_MODEL -__tag__ = 'AUTHPRIV-6-SYSTEM_MSG' -__error__ = 'USER_LOGIN' +__tag__ = "AUTHPRIV-6-SYSTEM_MSG" +__error__ = "USER_LOGIN" __yang_model__ = OPEN_CONFIG_NO_MODEL log = logging.getLogger(__file__) -_RGX_PARTS = [('user', r'(\w+)'), ('uid', r'(\d+)'), ('sshPid', r'(\d+)')] +_RGX_PARTS = [("user", r"(\w+)"), ("uid", r"(\d+)"), ("sshPid", r"(\d+)")] _RGX_PARTS = OrderedDict(_RGX_PARTS) _RGX = ( - r'pam_unix\(dcos_sshd:session\): session opened for user ' - r'{0[user]} by \(uid={0[uid]}\) - dcos_sshd\[{0[sshPid]}\]' + r"pam_unix\(dcos_sshd:session\): session opened for user " + r"{0[user]} by \(uid={0[uid]}\) - dcos_sshd\[{0[sshPid]}\]" ).format( _RGX_PARTS ) # ATTENTION to escape the parans def emit(msg_dict): - ''' + """ Extracts the details from the syslog message and returns an object having the following structure: @@ -70,17 +70,17 @@ def emit(msg_dict): } } } - ''' - log.debug('Evaluating the message dict:') + """ + log.debug("Evaluating the message dict:") log.debug(msg_dict) ret = {} - extracted = napalm_logs.utils.extract(_RGX, msg_dict['message'], _RGX_PARTS) + extracted = napalm_logs.utils.extract(_RGX, msg_dict["message"], _RGX_PARTS) if not extracted: return ret - uid_key_path = 'users//user//{0[user]}//uid'.format(extracted) - uid_value = int(extracted['uid']) - log.debug('Setting %d under key path %s', uid_value, uid_key_path) + uid_key_path = "users//user//{0[user]}//uid".format(extracted) + uid_value = int(extracted["uid"]) + log.debug("Setting %d under key path %s", uid_value, uid_key_path) ret.update(napalm_logs.utils.setval(uid_key_path, uid_value, dict_=ret)) - login_key_path = 'users//user//{0[user]}//action//login'.format(extracted) + login_key_path = "users//user//{0[user]}//action//login".format(extracted) ret.update(napalm_logs.utils.setval(login_key_path, True, dict_=ret)) return ret diff --git a/napalm_logs/config/nxos/__init__.py b/napalm_logs/config/nxos/__init__.py index 8e35e33e..53a5c3a5 100644 --- a/napalm_logs/config/nxos/__init__.py +++ b/napalm_logs/config/nxos/__init__.py @@ -1,43 +1,43 @@ # -*- coding: utf-8 -*- -''' +""" Prefix profiler for Nexus devices. This profiler matches messages having the following form: sw01.bjm01: 2017 Jul 26 14:42:46 UTC: %AUTHPRIV-6-SYSTEM_MSG: pam_unix(dcos_sshd:session): session opened for user luke by (uid=0) - dcos_sshd[12977] # noqa -''' +""" from collections import OrderedDict import napalm_logs.utils _RGX_PARTS = [ - ('pri', r'(\d+)'), - ('host', r'([^ ]+)'), - ('date', r'(\d+ \w+ +\d+)'), - ('time', r'(\d\d:\d\d:\d\d)'), - ('timeZone', r'(\w\w\w)'), - ('tag', r'([\w\d-]+)'), - ('message', r'(.*)'), + ("pri", r"(\d+)"), + ("host", r"([^ ]+)"), + ("date", r"(\d+ \w+ +\d+)"), + ("time", r"(\d\d:\d\d:\d\d)"), + ("timeZone", r"(\w\w\w)"), + ("tag", r"([\w\d-]+)"), + ("message", r"(.*)"), ] _RGX_PARTS = OrderedDict(_RGX_PARTS) -_RGX = r'\<{0[pri]}\>{0[host]}: {0[date]} {0[time]} {0[timeZone]}: %{0[tag]}: {0[message]}'.format( +_RGX = r"\<{0[pri]}\>{0[host]}: {0[date]} {0[time]} {0[timeZone]}: %{0[tag]}: {0[message]}".format( _RGX_PARTS ) _ALT_RGX_PARTS = [ - ('pri', r'(\d+)'), - ('date', r'(\d+ \w+ +\d+)'), - ('time', r'(\d\d:\d\d:\d\d)'), - ('host', r'([^ ]+)'), - ('tag', r'([\w\d-]+)'), - ('message', r'(.*)'), + ("pri", r"(\d+)"), + ("date", r"(\d+ \w+ +\d+)"), + ("time", r"(\d\d:\d\d:\d\d)"), + ("host", r"([^ ]+)"), + ("tag", r"([\w\d-]+)"), + ("message", r"(.*)"), ] _ALT_RGX_PARTS = OrderedDict(_ALT_RGX_PARTS) -_ALT_RGX = r'\<{0[pri]}\>{0[date]} {0[time]} {0[host]} %{0[tag]}: {0[message]}'.format( +_ALT_RGX = r"\<{0[pri]}\>{0[date]} {0[time]} {0[host]} %{0[tag]}: {0[message]}".format( _ALT_RGX_PARTS ) -_TIME_FORMAT = ('{date} {time} {timeZone}', '%Y %b %d %H:%M:%S %Z') -_ALT_TIME_FORMAT = ('{date} {time}', '%Y %b %d %H:%M:%S') +_TIME_FORMAT = ("{date} {time} {timeZone}", "%Y %b %d %H:%M:%S %Z") +_ALT_TIME_FORMAT = ("{date} {time}", "%Y %b %d %H:%M:%S") def extract(msg): diff --git a/napalm_logs/device.py b/napalm_logs/device.py index 9b91832b..47f02e81 100644 --- a/napalm_logs/device.py +++ b/napalm_logs/device.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Device worker process -''' +""" from __future__ import absolute_import # Import python stdlib @@ -22,7 +22,6 @@ # Import napalm-logs pkgs import napalm_logs.utils -import napalm_logs.ext.six as six from napalm_logs.proc import NapalmLogsProc from napalm_logs.config import PUB_PX_IPC_URL from napalm_logs.config import DEV_IPC_URL @@ -34,13 +33,13 @@ class NapalmLogsDeviceProc(NapalmLogsProc): - ''' + """ Device sub-process class. - ''' + """ def __init__(self, name, opts, config): self._name = name - log.debug('Starting process for %s', self._name) + log.debug("Starting process for %s", self._name) self._config = config self.opts = opts self.__up = False @@ -48,139 +47,130 @@ def __init__(self, name, opts, config): self._compile_messages() def _exit_gracefully(self, signum, _): - log.debug('Caught signal in %s device process', self._name) + log.debug("Caught signal in %s device process", self._name) self.stop() def _setup_ipc(self): - ''' + """ Subscribe to the right topic in the device IPC and publish to the publisher proxy. - ''' + """ self.ctx = zmq.Context() # subscribe to device IPC - log.debug('Creating the dealer IPC for %s', self._name) + log.debug("Creating the dealer IPC for %s", self._name) self.sub = self.ctx.socket(zmq.DEALER) - if six.PY2: - self.sub.setsockopt(zmq.IDENTITY, self._name) - elif six.PY3: - self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, 'utf-8')) - try: - self.sub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) + self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, "utf-8")) + self.sub.setsockopt(zmq.RCVHWM, self.opts["hwm"]) # subscribe to the corresponding IPC pipe self.sub.connect(DEV_IPC_URL) # publish to the publisher IPC self.pub = self.ctx.socket(zmq.PUB) self.pub.connect(PUB_PX_IPC_URL) - try: - self.pub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) + self.pub.setsockopt(zmq.SNDHWM, self.opts["hwm"]) def _compile_messages(self): - ''' + """ Create a list of all OS messages and their compiled regexs - ''' + """ self.compiled_messages = [] if not self._config: return - for message_dict in self._config.get('messages', {}): - error = message_dict['error'] - tag = message_dict['tag'] - model = message_dict['model'] - match_on = message_dict.get('match_on', 'tag') - if '__python_fun__' in message_dict: + for message_dict in self._config.get("messages", {}): + error = message_dict["error"] + tag = message_dict["tag"] + model = message_dict["model"] + match_on = message_dict.get("match_on", "tag") + if "__python_fun__" in message_dict: self.compiled_messages.append( { - 'error': error, - 'tag': tag, - 'match_on': match_on, - 'model': model, - '__python_fun__': message_dict['__python_fun__'], + "error": error, + "tag": tag, + "match_on": match_on, + "model": model, + "__python_fun__": message_dict["__python_fun__"], } ) continue - values = message_dict['values'] - line = message_dict['line'] - mapping = message_dict['mapping'] + values = message_dict["values"] + line = message_dict["line"] + mapping = message_dict["mapping"] # We will now figure out which position each value is in so we can use it with the match statement position = {} replace = {} - for key in values.keys(): - if '|' in key: - new_key, replace[new_key] = key.replace(' ', '').split('|') + for key in list(values.keys()): + if "|" in key: + new_key, replace[new_key] = key.replace(" ", "").split("|") values[new_key] = values.pop(key) key = new_key - position[line.find('{' + key + '}')] = key + position[line.find("{" + key + "}")] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 # Escape the line, then remove the escape for the curly bracets so they can be used when formatting - escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') + escaped = re.escape(line).replace(r"\{", "{").replace(r"\}", "}") # Replace a whitespace with \s+ - escaped = escaped.replace(r'\ ', r'\s+') + escaped = escaped.replace(r"\ ", r"\s+") self.compiled_messages.append( { - 'error': error, - 'tag': tag, - 'match_on': match_on, - 'line': re.compile(escaped.format(**values)), - 'positions': sorted_position, - 'values': values, - 'replace': replace, - 'model': model, - 'mapping': mapping, + "error": error, + "tag": tag, + "match_on": match_on, + "line": re.compile(escaped.format(**values)), + "positions": sorted_position, + "values": values, + "replace": replace, + "model": model, + "mapping": mapping, + "state": message_dict.get("state"), + "state_tag": message_dict.get("state_tag"), } ) - log.debug('Compiled messages:') + log.debug("Compiled messages:") log.debug(self.compiled_messages) def _parse(self, msg_dict): - ''' + """ Parse a syslog message and check what OpenConfig object should be generated. - ''' + """ error_present = False # log.debug('Matching the message:') # log.debug(msg_dict) for message in self.compiled_messages: # log.debug('Matching using:') # log.debug(message) - match_on = message['match_on'] + match_on = message["match_on"] if match_on not in msg_dict: # log.debug('%s is not a valid key in the partially parsed dict', match_on) continue - if message['tag'] != msg_dict[match_on]: + if message["tag"] != msg_dict[match_on]: continue - if '__python_fun__' in message: + if "__python_fun__" in message: return { - 'model': message['model'], - 'error': message['error'], - '__python_fun__': message['__python_fun__'], + "model": message["model"], + "error": message["error"], + "__python_fun__": message["__python_fun__"], } error_present = True - match = message['line'].search(msg_dict['message']) + match = message["line"].search(msg_dict["message"]) if not match: continue - positions = message.get('positions', {}) - values = message.get('values') + positions = message.get("positions", {}) + values = message.get("values") ret = { - 'model': message['model'], - 'mapping': message['mapping'], - 'replace': message['replace'], - 'error': message['error'], + "model": message["model"], + "mapping": message["mapping"], + "replace": message["replace"], + "error": message["error"], + "_state": message["state"], + "_state_tag": message["state_tag"], } for key in values.keys(): # Check if the value needs to be replaced - if key in message['replace']: + if key in message["replace"]: result = napalm_logs.utils.cast( - match.group(positions.get(key)), message['replace'][key] + match.group(positions.get(key)), message["replace"][key] ) else: result = match.group(positions.get(key)) @@ -188,29 +178,29 @@ def _parse(self, msg_dict): return ret if error_present is True: log.info( - 'Configured regex did not match for os: %s tag %s', + "Configured regex did not match for os: %s tag %s", self._name, - msg_dict.get('tag', ''), + msg_dict.get("tag", ""), ) else: log.info( - 'Syslog message not configured for os: %s tag %s', + "Syslog message not configured for os: %s tag %s", self._name, - msg_dict.get('tag', ''), + msg_dict.get("tag", ""), ) def _emit(self, **kwargs): - ''' + """ Emit an OpenConfig object given a certain combination of fields mappeed in the config to the corresponding hierarchy. - ''' + """ oc_dict = {} - for mapping, result_key in kwargs['mapping']['variables'].items(): + for mapping, result_key in kwargs["mapping"]["variables"].items(): result = kwargs[result_key] oc_dict = napalm_logs.utils.setval( mapping.format(**kwargs), result, oc_dict ) - for mapping, result in kwargs['mapping']['static'].items(): + for mapping, result in kwargs["mapping"]["static"].items(): oc_dict = napalm_logs.utils.setval( mapping.format(**kwargs), result, oc_dict ) @@ -218,51 +208,51 @@ def _emit(self, **kwargs): return oc_dict def _publish(self, obj): - ''' + """ Publish the OC object. - ''' + """ bin_obj = umsgpack.packb(obj) self.pub.send(bin_obj) def _format_time(self, time, date, timezone, prefix_id): date_time = None if time and date: - date_time = dateparser.parse('{} {}'.format(date, time)) + date_time = dateparser.parse("{} {}".format(date, time)) if not date_time: tz = dateutil.tz.gettz(timezone) date_time = datetime.datetime.now(tz) return int(calendar.timegm(date_time.utctimetuple())) def start(self): - ''' + """ Start the worker process. - ''' + """ # metrics napalm_logs_device_messages_received = Counter( - 'napalm_logs_device_messages_received', + "napalm_logs_device_messages_received", "Count of messages received by the device process", - ['device_os'], + ["device_os"], ) napalm_logs_device_raw_published_messages = Counter( - 'napalm_logs_device_raw_published_messages', + "napalm_logs_device_raw_published_messages", "Count of raw type published messages", - ['device_os'], + ["device_os"], ) napalm_logs_device_published_messages = Counter( - 'napalm_logs_device_published_messages', + "napalm_logs_device_published_messages", "Count of published messages", - ['device_os'], + ["device_os"], ) napalm_logs_device_oc_object_failed = Counter( - 'napalm_logs_device_oc_object_failed', + "napalm_logs_device_oc_object_failed", "Counter of failed OpenConfig object generations", - ['device_os'], + ["device_os"], ) - if self.opts.get('metrics_include_attributes', True): + if self.opts.get("metrics_include_attributes", True): napalm_logs_device_published_messages_attrs = Counter( - 'napalm_logs_device_published_messages_attrs', + "napalm_logs_device_published_messages_attrs", "Counter of published messages, with more granular selection", - ['device_os', 'host', 'error'], + ["device_os", "host", "error"], ) self._setup_ipc() # Start suicide polling thread @@ -280,44 +270,44 @@ def start(self): msg_dict, address = umsgpack.unpackb(bin_obj, use_list=False) except zmq.ZMQError as error: if self.__up is False: - log.info('Exiting on process shutdown [%s]', self._name) + log.info("Exiting on process shutdown [%s]", self._name) return else: raise NapalmLogsExit(error) log.debug( - '%s: dequeued %s, received from %s', self._name, msg_dict, address + "%s: dequeued %s, received from %s", self._name, msg_dict, address ) napalm_logs_device_messages_received.labels(device_os=self._name).inc() - host = msg_dict.get('host') - prefix_id = msg_dict.pop('__prefix_id__') - if 'timestamp' in msg_dict: - timestamp = msg_dict.pop('timestamp') + host = msg_dict.get("host") + prefix_id = msg_dict.pop("__prefix_id__") + if "timestamp" in msg_dict: + timestamp = msg_dict.pop("timestamp") else: timestamp = self._format_time( - msg_dict.get('time', ''), - msg_dict.get('date', ''), - msg_dict.get('timeZone', 'UTC'), + msg_dict.get("time", ""), + msg_dict.get("date", ""), + msg_dict.get("timeZone", "UTC"), prefix_id, ) - facility = msg_dict.get('facility') - severity = msg_dict.get('severity') + facility = msg_dict.get("facility") + severity = msg_dict.get("severity") kwargs = self._parse(msg_dict) if not kwargs: # Unable to identify what model to generate for the message in cause. # But publish the message when the user requested to push raw messages. to_publish = { - 'ip': address, - 'host': host, - 'timestamp': timestamp, - 'message_details': msg_dict, - 'os': self._name, - 'error': 'RAW', - 'model_name': 'raw', - 'facility': facility, - 'severity': severity, + "ip": address, + "host": host, + "timestamp": timestamp, + "message_details": msg_dict, + "os": self._name, + "error": "RAW", + "model_name": "raw", + "facility": facility, + "severity": severity, } - log.debug('Queueing to be published:') + log.debug("Queueing to be published:") log.debug(to_publish) # self.pub_pipe.send(to_publish) self.pub.send(umsgpack.packb(to_publish)) @@ -326,53 +316,57 @@ def start(self): ).inc() continue try: - if '__python_fun__' in kwargs: + if "__python_fun__" in kwargs: log.debug( - 'Using the Python parser to determine the YANG-equivalent object' + "Using the Python parser to determine the YANG-equivalent object" ) - yang_obj = kwargs['__python_fun__'](msg_dict) + yang_obj = kwargs["__python_fun__"](msg_dict) else: yang_obj = self._emit(**kwargs) except Exception: log.exception( - 'Unexpected error when generating the OC object.', exc_info=True + "Unexpected error when generating the OC object.", exc_info=True ) napalm_logs_device_oc_object_failed.labels(device_os=self._name).inc() continue - log.debug('Generated OC object:') + log.debug("Generated OC object:") log.debug(yang_obj) - error = kwargs.get('error') - model_name = kwargs.get('model') + error = kwargs.get("error") + model_name = kwargs.get("model") to_publish = { - 'error': error, - 'host': host, - 'ip': address, - 'timestamp': timestamp, - 'yang_message': yang_obj, - 'message_details': msg_dict, - 'yang_model': model_name, - 'os': self._name, - 'facility': facility, - 'severity': severity, + "error": error, + "host": host, + "ip": address, + "timestamp": timestamp, + "yang_message": yang_obj, + "message_details": msg_dict, + "yang_model": model_name, + "os": self._name, + "facility": facility, + "severity": severity, } - log.debug('Queueing to be published:') + if kwargs.get("_state") is not None: + to_publish["state"] = kwargs["_state"] + if kwargs.get("_state_tag"): + to_publish["state_tag"] = kwargs["_state_tag"] + log.debug("Queueing to be published:") log.debug(to_publish) # self.pub_pipe.send(to_publish) self.pub.send(umsgpack.packb(to_publish)) # self._publish(to_publish) napalm_logs_device_published_messages.labels(device_os=self._name).inc() - if self.opts.get('metrics_include_attributes', True): + if self.opts.get("metrics_include_attributes", True): napalm_logs_device_published_messages_attrs.labels( device_os=self._name, - error=to_publish['error'], - host=to_publish['host'], + error=to_publish["error"], + host=to_publish["host"], ).inc() def stop(self): - ''' + """ Stop the worker process. - ''' - log.info('Stopping %s device process', self._name) + """ + log.info("Stopping %s device process", self._name) self.__up = False self.sub.close() self.pub.close() diff --git a/napalm_logs/exceptions.py b/napalm_logs/exceptions.py index 3a75d215..cf1e7aa9 100644 --- a/napalm_logs/exceptions.py +++ b/napalm_logs/exceptions.py @@ -1,145 +1,145 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs exceptions. -''' +""" from __future__ import absolute_import class NapalmLogsException(Exception): - ''' + """ Base exception class; all napalm-logs exceptions should inherit this. - ''' + """ - def __init__(self, msg=''): + def __init__(self, msg=""): super(NapalmLogsException, self).__init__(msg) self.strerror = msg class BindException(NapalmLogsException): - ''' + """ Exception raised when unable to bind the listener to the specified IP address / port. Either the values are not correct, either another processs is already using them. - ''' + """ pass class TransportException(NapalmLogsException): - ''' + """ Exception raised when encounering an error in a transport process. - ''' + """ pass class InvalidTransportException(TransportException): - ''' + """ Raised when the user selects a transport that does not exist. - ''' + """ pass class ListenerException(NapalmLogsException): - ''' + """ Exception raised when encountering an exception in a listener process. - ''' + """ pass class InvalidListenerException(ListenerException): - ''' + """ Raised when the user selets a listener that does not exist. - ''' + """ pass class SerializerException(NapalmLogsException): - ''' + """ Raised in case of serializer-related errors. - ''' + """ pass class InvalidSerializerException(SerializerException): - ''' + """ Raised when the user selects a serializer not available. - ''' + """ pass class BufferException(NapalmLogsException): - ''' + """ Raised in case of buffer errors. - ''' + """ pass class InvalidBufferException(BufferException): - ''' + """ Raised when the user selects a buffer interface that is not available. - ''' + """ pass class ConfigurationException(NapalmLogsException): - ''' + """ Exception thrown when the user configuration is not correct. - ''' + """ pass class OpenConfigPathException(NapalmLogsException): - ''' + """ Unable to set the open config path specified. - ''' + """ pass class NapalmLogsExit(NapalmLogsException): - ''' + """ Raised on unexpected exit. - ''' + """ pass class CryptoException(NapalmLogsException): - ''' + """ Raised when unable to decrypt. - ''' + """ pass class BadSignatureException(NapalmLogsException): - ''' + """ Raised when the signature was forged or corrupted. - ''' + """ pass class SSLMismatchException(NapalmLogsException): - ''' + """ Raised when the SSL certificate and key do not match - ''' + """ pass class ClientConnectException(NapalmLogsException): - ''' + """ Raised when the client is unable to connect. - ''' + """ pass diff --git a/napalm_logs/ext/__init__.py b/napalm_logs/ext/__init__.py deleted file mode 100644 index 557f4f9e..00000000 --- a/napalm_logs/ext/__init__.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- -''' -External modules and functions. -''' -from __future__ import absolute_import - -import re -import fnmatch -import logging - -log = logging.getLogger(__name__) - - -def expr_match(line, expr): - ''' - Evaluate a line of text against an expression. First try a full-string - match, next try globbing, and then try to match assuming expr is a regular - expression. Originally designed to match minion IDs for - whitelists/blacklists. - ''' - if line == expr: - return True - if fnmatch.fnmatch(line, expr): - return True - try: - if re.match(r'\A{0}\Z'.format(expr), line): - return True - except re.error: - pass - return False - - -def check_whitelist_blacklist(value, whitelist=None, blacklist=None): - ''' - Check a whitelist and/or blacklist to see if the value matches it. - - value - The item to check the whitelist and/or blacklist against. - - whitelist - The list of items that are white-listed. If ``value`` is found - in the whitelist, then the function returns ``True``. Otherwise, - it returns ``False``. - - blacklist - The list of items that are black-listed. If ``value`` is found - in the blacklist, then the function returns ``False``. Otherwise, - it returns ``True``. - - If both a whitelist and a blacklist are provided, value membership - in the blacklist will be examined first. If the value is not found - in the blacklist, then the whitelist is checked. If the value isn't - found in the whitelist, the function returns ``False``. - ''' - if blacklist is not None: - if not hasattr(blacklist, '__iter__'): - blacklist = [blacklist] - try: - for expr in blacklist: - if expr_match(value, expr): - return False - except TypeError: - log.error('Non-iterable blacklist {0}'.format(blacklist)) - - if whitelist: - if not hasattr(whitelist, '__iter__'): - whitelist = [whitelist] - try: - for expr in whitelist: - if expr_match(value, expr): - return True - except TypeError: - log.error('Non-iterable whitelist {0}'.format(whitelist)) - else: - return True - - return False diff --git a/napalm_logs/ext/six.py b/napalm_logs/ext/six.py deleted file mode 100644 index 6d35ce35..00000000 --- a/napalm_logs/ext/six.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -''' -Python 2-3 compatibility. -''' -from __future__ import absolute_import -from __future__ import unicode_literals - -import sys - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -if PY3: - string_type = str - text_type = str - binary_type = bytes -else: - string_type = basestring # noqa - text_type = unicode # noqa - binary_type = str diff --git a/napalm_logs/listener/__init__.py b/napalm_logs/listener/__init__.py index dab785f4..76cb4168 100644 --- a/napalm_logs/listener/__init__.py +++ b/napalm_logs/listener/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs pluggable listener. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -24,34 +24,34 @@ log = logging.getLogger(__file__) LISTENER_LOOKUP = { - 'tcp': TCPListener, - 'udp': UDPListener, - '*': UDPListener, # default listener + "tcp": TCPListener, + "udp": UDPListener, + "*": UDPListener, # default listener } if HAS_KAFKA: - log.info('Kafka dependency seems to be installed, making kafka listener available.') - LISTENER_LOOKUP['kafka'] = KafkaListener + log.info("Kafka dependency seems to be installed, making kafka listener available.") + LISTENER_LOOKUP["kafka"] = KafkaListener if HAS_ZMQ: - log.info('Adding ZMQ listener') - LISTENER_LOOKUP['zmq'] = ZMQListener - LISTENER_LOOKUP['zeromq'] = ZMQListener + log.info("Adding ZMQ listener") + LISTENER_LOOKUP["zmq"] = ZMQListener + LISTENER_LOOKUP["zeromq"] = ZMQListener def get_listener(name): - ''' + """ Return the listener class. - ''' + """ try: - log.debug('Using %s as listener', name) + log.debug("Using %s as listener", name) return LISTENER_LOOKUP[name] except KeyError: - msg = 'Listener {} is not available. Are the dependencies installed?'.format( + msg = "Listener {} is not available. Are the dependencies installed?".format( name ) log.error(msg, exc_info=True) raise InvalidListenerException(msg) -__all__ = ('get_listener',) +__all__ = ("get_listener",) diff --git a/napalm_logs/listener/base.py b/napalm_logs/listener/base.py index d999a332..17a67c49 100644 --- a/napalm_logs/listener/base.py +++ b/napalm_logs/listener/base.py @@ -1,32 +1,32 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs listener base. -''' +""" class ListenerBase: - ''' + """ The base class for the listener. - ''' + """ def __init__(self, address, port, **kwargs): pass def start(self): - ''' + """ Starts the listener. - ''' + """ pass def receive(self): - ''' + """ Return an object read from the source, and the location identification object. - ''' + """ pass def stop(self): - ''' + """ Shuts down the listener. - ''' + """ pass diff --git a/napalm_logs/listener/kafka.py b/napalm_logs/listener/kafka.py index 9a9991bb..2232933b 100644 --- a/napalm_logs/listener/kafka.py +++ b/napalm_logs/listener/kafka.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Kafka listener for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -26,27 +26,27 @@ class KafkaListener(ListenerBase): - ''' + """ Kafka listener class. - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - address = kwargs['address'] - if kwargs.get('port'): - port = kwargs['port'] + if kwargs.get("address"): + address = kwargs["address"] + if kwargs.get("port"): + port = kwargs["port"] self.bootstrap_servers = kwargs.get( - 'bootstrap_servers', '{}:{}'.format(address, port) + "bootstrap_servers", "{}:{}".format(address, port) ) - self.group_id = kwargs.get('group_id', 'napalm-logs') - self.topic = kwargs.get('topic', 'syslog.net') + self.group_id = kwargs.get("group_id", "napalm-logs") + self.topic = kwargs.get("topic", "syslog.net") def start(self): - ''' + """ Startup the kafka consumer. - ''' + """ log.debug( - 'Creating the consumer using the bootstrap servers: %s and the group ID: %s', + "Creating the consumer using the bootstrap servers: %s and the group ID: %s", self.bootstrap_servers, self.group_id, ) @@ -57,32 +57,32 @@ def start(self): except kafka.errors.NoBrokersAvailable as err: log.error(err, exc_info=True) raise ListenerException(err) - log.debug('Subscribing to the %s topic', self.topic) + log.debug("Subscribing to the %s topic", self.topic) self.consumer.subscribe(topics=[self.topic]) def receive(self): - ''' + """ Return the message received and the address. - ''' + """ try: msg = next(self.consumer) except ValueError as error: - log.error('Received kafka error: %s', error, exc_info=True) + log.error("Received kafka error: %s", error, exc_info=True) raise ListenerException(error) log_source = msg.key try: - decoded = json.loads(msg.value.decode('utf-8')) + decoded = json.loads(msg.value.decode("utf-8")) except ValueError: - log.error('Not in json format: %s', msg.value.decode('utf-8')) - return '', '' - log_message = decoded.get('message') - log.debug('[%s] Received %s from %s', log_message, log_source, time.time()) + log.error("Not in json format: %s", msg.value.decode("utf-8")) + return "", "" + log_message = decoded.get("message") + log.debug("[%s] Received %s from %s", log_message, log_source, time.time()) return log_message, log_source def stop(self): - ''' + """ Shutdown kafka consumer. - ''' - log.info('Stopping te kafka listener class') + """ + log.info("Stopping te kafka listener class") self.consumer.unsubscribe() self.consumer.close() diff --git a/napalm_logs/listener/tcp.py b/napalm_logs/listener/tcp.py index 6c70a5de..9f2989e4 100644 --- a/napalm_logs/listener/tcp.py +++ b/napalm_logs/listener/tcp.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Syslog TCP listener for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -33,50 +33,50 @@ log = logging.getLogger(__name__) -OCTET_FRAMING_RGX = r'\d+\s(<\d+>)' +OCTET_FRAMING_RGX = r"\d+\s(<\d+>)" class TCPListener(ListenerBase): - ''' + """ TCP syslog listener class - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - self.address = kwargs['address'] + if kwargs.get("address"): + self.address = kwargs["address"] else: self.address = address - if kwargs.get('port'): - self.port = kwargs['port'] + if kwargs.get("port"): + self.port = kwargs["port"] else: self.port = port - self.buffer_size = kwargs.get('buffer_size', BUFFER_SIZE) - self.reuse_port = kwargs.get('reuse_port', REUSE_PORT) - self.socket_timeout = kwargs.get('socket_timeout', TIMEOUT) - self.max_clients = kwargs.get('max_clients', MAX_TCP_CLIENTS) - self.framing = kwargs.get('framing', 'traditional') - self.frame_delimiter = kwargs.get('frame_delimiter', '\n') + self.buffer_size = kwargs.get("buffer_size", BUFFER_SIZE) + self.reuse_port = kwargs.get("reuse_port", REUSE_PORT) + self.socket_timeout = kwargs.get("socket_timeout", TIMEOUT) + self.max_clients = kwargs.get("max_clients", MAX_TCP_CLIENTS) + self.framing = kwargs.get("framing", "traditional") + self.frame_delimiter = kwargs.get("frame_delimiter", "\n") self.buffer = queue.Queue() def _client_connection(self, conn, addr): - ''' + """ Handle the connecition with one client. - ''' - log.debug('Established connection with %s:%d', addr[0], addr[1]) + """ + log.debug("Established connection with %s:%d", addr[0], addr[1]) conn.settimeout(self.socket_timeout) try: - prev_msg = '' + prev_msg = "" while self.__up: msg = conn.recv(self.buffer_size) if not msg: # log.debug('Received empty message from %s', addr) # disabled ^ as it was too noisy continue - log.debug('[%s] Received %s from %s', time.time(), msg, addr) + log.debug("[%s] Received %s from %s", time.time(), msg, addr) messages = [] if isinstance(msg, bytes): - msg = msg.decode('utf-8') - if self.framing == 'traditional': + msg = msg.decode("utf-8") + if self.framing == "traditional": msg = prev_msg + msg msg_lines = msg.split(self.frame_delimiter) if len(msg_lines) > 1: @@ -85,76 +85,80 @@ def _client_connection(self, conn, addr): prev_msg = msg_lines[-1] else: messages = [msg] - elif self.framing == 'octet-counted': + elif self.framing == "octet-counted": msg_chunks = re.split(OCTET_FRAMING_RGX, msg) messages = [ - '{}{}'.format(pri, body).strip() + "{}{}".format(pri, body).strip() for pri, body in zip(msg_chunks[1::2], msg_chunks[2::2]) ] for message in messages: - log.debug('[%s] Queueing %s', time.time(), message) - self.buffer.put((message, '{}:{}'.format(addr[0], addr[1]))) + log.debug("[%s] Queueing %s", time.time(), message) + self.buffer.put((message, "{}:{}".format(addr[0], addr[1]))) except socket.timeout: if not self.__up: return - log.info('Connection %s:%d timed out', addr[0], addr[1]) + log.info("Connection %s:%d timed out", addr[0], addr[1]) finally: - log.debug('Closing connection with %s', addr) + log.debug("Closing connection with %s", addr) conn.close() def _serve_clients(self): - ''' + """ Accept cients and serve, one separate thread per client. - ''' + """ self.__up = True while self.__up: - log.debug('Waiting for a client to connect') + log.debug("Waiting for a client to connect") try: conn, addr = self.skt.accept() - log.debug('Received connection from %s:%d', addr[0], addr[1]) + log.debug("Received connection from %s:%d", addr[0], addr[1]) except socket.error as error: if not self.__up: return - msg = 'Received listener socket error: {}'.format(error) + msg = "Received listener socket error: {}".format(error) log.error(msg, exc_info=True) raise ListenerException(msg) client_thread = threading.Thread( - target=self._client_connection, args=(conn, addr,) + target=self._client_connection, + args=( + conn, + addr, + ), ) client_thread.start() def start(self): - ''' + """ Start listening for messages. - ''' - log.debug('Creating the TCP server') - if ':' in self.address: + """ + log.debug("Creating the TCP server") + if ":" in self.address: self.skt = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) else: self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if self.reuse_port: self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if hasattr(socket, 'SO_REUSEPORT'): + if hasattr(socket, "SO_REUSEPORT"): self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) else: - log.error('SO_REUSEPORT not supported') + log.error("SO_REUSEPORT not supported") try: self.skt.bind((self.address, int(self.port))) except socket.error as msg: - error_string = 'Unable to bind to port {} on {}: {}'.format( + error_string = "Unable to bind to port {} on {}: {}".format( self.port, self.address, msg ) log.error(error_string, exc_info=True) raise BindException(error_string) - log.debug('Accepting max %d parallel connections', self.max_clients) + log.debug("Accepting max %d parallel connections", self.max_clients) self.skt.listen(self.max_clients) self.thread_serve = threading.Thread(target=self._serve_clients) self.thread_serve.start() def receive(self): - ''' + """ Return one message dequeued from the listen buffer. - ''' + """ while self.buffer.empty() and self.__up: # This sequence is skipped when the buffer is not empty. sleep_ms = random.randint(0, 1000) @@ -163,16 +167,16 @@ def receive(self): time.sleep(sleep_ms / 1000.0) if not self.buffer.empty(): return self.buffer.get(block=False) - return '', '' + return "", "" def stop(self): - ''' + """ Closing the socket. - ''' - log.info('Stopping the TCP listener') + """ + log.info("Stopping the TCP listener") self.__up = False try: self.skt.shutdown(socket.SHUT_RDWR) except socket.error: - log.error('The following error may not be critical:', exc_info=True) + log.error("The following error may not be critical:", exc_info=True) self.skt.close() diff --git a/napalm_logs/listener/udp.py b/napalm_logs/listener/udp.py index 5c16e6d7..b8e161c0 100644 --- a/napalm_logs/listener/udp.py +++ b/napalm_logs/listener/udp.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Syslog UDP listener for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -25,61 +25,61 @@ class UDPListener(ListenerBase): - ''' + """ UDP syslog listener class. - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - self.address = kwargs['address'] + if kwargs.get("address"): + self.address = kwargs["address"] else: self.address = address - if kwargs.get('port'): - self.port = kwargs['port'] + if kwargs.get("port"): + self.port = kwargs["port"] else: self.port = port - self.buffer_size = kwargs.get('buffer_size', BUFFER_SIZE) - self.reuse_port = kwargs.get('reuse_port', REUSE_PORT) - log.debug('Buffer size: %d', self.buffer_size) + self.buffer_size = kwargs.get("buffer_size", BUFFER_SIZE) + self.reuse_port = kwargs.get("reuse_port", REUSE_PORT) + log.debug("Buffer size: %d", self.buffer_size) def start(self): - ''' + """ Create the UDP listener socket. - ''' - if ':' in self.address: + """ + if ":" in self.address: self.skt = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) else: self.skt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if self.reuse_port: self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if hasattr(socket, 'SO_REUSEPORT'): + if hasattr(socket, "SO_REUSEPORT"): self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) else: - log.error('SO_REUSEPORT not supported') + log.error("SO_REUSEPORT not supported") try: self.skt.bind((self.address, int(self.port))) except socket.error as msg: - error_string = 'Unable to bind to port {} on {}: {}'.format( + error_string = "Unable to bind to port {} on {}: {}".format( self.port, self.address, msg ) log.error(error_string, exc_info=True) raise BindException(error_string) def receive(self): - ''' + """ Return the message received and the address. - ''' + """ try: msg, addr = self.skt.recvfrom(self.buffer_size) except socket.error as error: - log.error('Received listener socket error: %s', error, exc_info=True) + log.error("Received listener socket error: %s", error, exc_info=True) raise ListenerException(error) - log.debug('[%s] Received %s from %s', msg, addr, time.time()) + log.debug("[%s] Received %s from %s", msg, addr, time.time()) return msg, addr[0] def stop(self): - ''' + """ Shut down the UDP listener. - ''' - log.info('Stopping the UDP listener') + """ + log.info("Stopping the UDP listener") self.skt.close() diff --git a/napalm_logs/listener/zeromq.py b/napalm_logs/listener/zeromq.py index df1160a7..8428466a 100644 --- a/napalm_logs/listener/zeromq.py +++ b/napalm_logs/listener/zeromq.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" ZeroMQ listener for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -25,39 +25,39 @@ class ZMQListener(ListenerBase): - ''' + """ ZMQ listener class. - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - address = kwargs['address'] - if kwargs.get('port'): - port = kwargs['port'] + if kwargs.get("address"): + address = kwargs["address"] + if kwargs.get("port"): + port = kwargs["port"] self.address = address self.port = port - self.hwm = kwargs.get('hwm') - self.keepalive = kwargs.get('keepalive', 1) - self.keepalive_idle = kwargs.get('keepalive_idle', 300) - self.keepalive_interval = kwargs.get('keepalive_interval', -1) - self.recvtimeout = kwargs.get('timeout') - self.protocol = kwargs.get('protocol', 'tcp') - self.type = kwargs.get('socket_type', 'PULL') + self.hwm = kwargs.get("hwm") + self.keepalive = kwargs.get("keepalive", 1) + self.keepalive_idle = kwargs.get("keepalive_idle", 300) + self.keepalive_interval = kwargs.get("keepalive_interval", -1) + self.recvtimeout = kwargs.get("timeout") + self.protocol = kwargs.get("protocol", "tcp") + self.type = kwargs.get("socket_type", "PULL") def start(self): - ''' + """ Startup the zmq consumer. - ''' + """ zmq_uri = ( - '{protocol}://{address}:{port}'.format( + "{protocol}://{address}:{port}".format( protocol=self.protocol, address=self.address, port=self.port ) if self.port - else '{protocol}://{address}'.format( # noqa + else "{protocol}://{address}".format( # noqa protocol=self.protocol, address=self.address ) ) - log.debug('ZMQ URI: %s', zmq_uri) + log.debug("ZMQ URI: %s", zmq_uri) self.ctx = zmq.Context() if hasattr(zmq, self.type): skt_type = getattr(zmq, self.type) @@ -66,42 +66,39 @@ def start(self): self.sub = self.ctx.socket(skt_type) self.sub.connect(zmq_uri) if self.hwm is not None: - try: - self.sub.setsockopt(zmq.HWM, self.hwm) - except AttributeError: - self.sub.setsockopt(zmq.RCVHWM, self.hwm) + self.sub.setsockopt(zmq.RCVHWM, self.hwm) if self.recvtimeout is not None: - log.debug('Setting RCVTIMEO to %d', self.recvtimeout) + log.debug("Setting RCVTIMEO to %d", self.recvtimeout) self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout) if self.keepalive is not None: - log.debug('Setting TCP_KEEPALIVE to %d', self.keepalive) + log.debug("Setting TCP_KEEPALIVE to %d", self.keepalive) self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive) if self.keepalive_idle is not None: - log.debug('Setting TCP_KEEPALIVE_IDLE to %d', self.keepalive_idle) + log.debug("Setting TCP_KEEPALIVE_IDLE to %d", self.keepalive_idle) self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle) if self.keepalive_interval is not None: - log.debug('Setting TCP_KEEPALIVE_INTVL to %d', self.keepalive_interval) + log.debug("Setting TCP_KEEPALIVE_INTVL to %d", self.keepalive_interval) self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval) def receive(self): - ''' + """ Return the message received. ..note:: In ZMQ we are unable to get the address where we got the message from. - ''' + """ try: msg = self.sub.recv() except zmq.Again as error: - log.error('Unable to receive messages: %s', error, exc_info=True) + log.error("Unable to receive messages: %s", error, exc_info=True) raise ListenerException(error) - log.debug('[%s] Received %s', time.time(), msg) - return msg, '' + log.debug("[%s] Received %s", time.time(), msg) + return msg, "" def stop(self): - ''' + """ Shutdown zmq listener. - ''' - log.info('Stopping the zmq listener class') + """ + log.info("Stopping the zmq listener class") self.sub.close() self.ctx.term() diff --git a/napalm_logs/listener_proc.py b/napalm_logs/listener_proc.py index b2289cb4..727322e0 100644 --- a/napalm_logs/listener_proc.py +++ b/napalm_logs/listener_proc.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Listener worker process -''' +""" from __future__ import absolute_import # Import pythond stdlib @@ -26,9 +26,9 @@ class NapalmLogsListenerProc(NapalmLogsProc): - ''' + """ publisher sub-process class. - ''' + """ def __init__( self, @@ -48,51 +48,46 @@ def __init__( self.listener_opts = {} or listener_opts def _exit_gracefully(self, signum, _): - log.debug('Caught signal in the listener process') + log.debug("Caught signal in the listener process") self.stop() def _setup_listener(self): - ''' + """ Setup the transport. - ''' + """ listener_class = get_listener(self._listener_type) - self.address = self.listener_opts.pop('address', self.address) - self.port = self.listener_opts.pop('port', self.port) + self.address = self.listener_opts.pop("address", self.address) + self.port = self.listener_opts.pop("port", self.port) self.listener = listener_class(self.address, self.port, **self.listener_opts) def _setup_ipc(self): - ''' + """ Setup the listener ICP pusher. - ''' - log.debug('Setting up the listener IPC pusher') + """ + log.debug("Setting up the listener IPC pusher") self.ctx = zmq.Context() self.pub = self.ctx.socket(zmq.PUSH) self.pub.connect(LST_IPC_URL) - log.debug('Setting HWM for the listener: %d', self.opts['hwm']) - try: - self.pub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) + log.debug("Setting HWM for the listener: %d", self.opts["hwm"]) + self.pub.setsockopt(zmq.SNDHWM, self.opts["hwm"]) def start(self): - ''' + """ Listen to messages and publish them. - ''' + """ # counter metrics for messages c_logs_ingested = Counter( - 'napalm_logs_listener_logs_ingested', - 'Count of ingested log messages', - ['listener_type', 'address', 'port'], + "napalm_logs_listener_logs_ingested", + "Count of ingested log messages", + ["listener_type", "address", "port"], ) c_messages_published = Counter( - 'napalm_logs_listener_messages_published', - 'Count of published messages', - ['listener_type', 'address', 'port'], + "napalm_logs_listener_messages_published", + "Count of published messages", + ["listener_type", "address", "port"], ) self._setup_ipc() - log.debug('Using the %s listener', self._listener_type) + log.debug("Using the %s listener", self._listener_type) self._setup_listener() self.listener.start() # Start suicide polling thread @@ -107,17 +102,17 @@ def start(self): log_message, log_source = self.listener.receive() except ListenerException as lerr: if self.__up is False: - log.info('Exiting on process shutdown') + log.info("Exiting on process shutdown") return else: log.error(lerr, exc_info=True) raise NapalmLogsExit(lerr) log.debug( - 'Received %s from %s. Queueing to the server.', log_message, log_source + "Received %s from %s. Queueing to the server.", log_message, log_source ) if not log_message: log.info( - 'Empty message received from %s. Not queueing to the server.', + "Empty message received from %s. Not queueing to the server.", log_source, ) continue @@ -130,7 +125,7 @@ def start(self): ).inc() def stop(self): - log.info('Stopping the listener process') + log.info("Stopping the listener process") self.__up = False self.pub.close() self.ctx.term() diff --git a/napalm_logs/proc.py b/napalm_logs/proc.py index 3709daff..2270a513 100644 --- a/napalm_logs/proc.py +++ b/napalm_logs/proc.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Base worker process -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -14,14 +14,14 @@ class NapalmLogsProc: - ''' + """ Sub-process base class. - ''' + """ def _suicide_when_without_parent(self, parent_pid): - ''' + """ Kill this process when the parent died. - ''' + """ while True: time.sleep(5) try: @@ -31,5 +31,5 @@ def _suicide_when_without_parent(self, parent_pid): # Forcibly exit # Regular sys.exit raises an exception self.stop() - log.warning('The parent is not alive, exiting.') + log.warning("The parent is not alive, exiting.") os._exit(999) diff --git a/napalm_logs/pub_proxy.py b/napalm_logs/pub_proxy.py index 91913a73..8a67f392 100644 --- a/napalm_logs/pub_proxy.py +++ b/napalm_logs/pub_proxy.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Listener worker process -''' +""" from __future__ import absolute_import # Import pythond stdlib @@ -25,50 +25,40 @@ class NapalmLogsPublisherProxy(NapalmLogsProc): - ''' + """ Internal IPC proxy sub-process class. - ''' + """ def __init__(self, hwm): self.hwm = hwm self.__up = False def _exit_gracefully(self, signum, _): - log.debug('Caught signal in the internal proxy process') + log.debug("Caught signal in the internal proxy process") self.stop() def _setup_ipc(self): - ''' + """ Setup the IPC PUB and SUB sockets for the proxy. - ''' - log.debug('Setting up the internal IPC proxy') + """ + log.debug("Setting up the internal IPC proxy") self.ctx = zmq.Context() # Frontend self.sub = self.ctx.socket(zmq.SUB) self.sub.bind(PUB_PX_IPC_URL) - self.sub.setsockopt(zmq.SUBSCRIBE, b'') - log.debug('Setting HWM for the proxy frontend: %d', self.hwm) - try: - self.sub.setsockopt(zmq.HWM, self.hwm) - # zmq 2 - except AttributeError: - # zmq 3 - self.sub.setsockopt(zmq.SNDHWM, self.hwm) + self.sub.setsockopt(zmq.SUBSCRIBE, b"") + log.debug("Setting HWM for the proxy frontend: %d", self.hwm) + self.sub.setsockopt(zmq.SNDHWM, self.hwm) # Backend self.pub = self.ctx.socket(zmq.PUB) self.pub.bind(PUB_IPC_URL) - log.debug('Setting HWM for the proxy backend: %d', self.hwm) - try: - self.pub.setsockopt(zmq.HWM, self.hwm) - # zmq 2 - except AttributeError: - # zmq 3 - self.pub.setsockopt(zmq.SNDHWM, self.hwm) + log.debug("Setting HWM for the proxy backend: %d", self.hwm) + self.pub.setsockopt(zmq.SNDHWM, self.hwm) def start(self): - ''' + """ Listen to messages and publish them. - ''' + """ self._setup_ipc() # Start suicide polling thread thread = threading.Thread( @@ -80,14 +70,14 @@ def start(self): zmq.proxy(self.sub, self.pub) except zmq.ZMQError as error: if self.__up is False: - log.info('Exiting on process shutdown') + log.info("Exiting on process shutdown") return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) def stop(self): - log.info('Stopping the internal IPC proxy') + log.info("Stopping the internal IPC proxy") self.__up = False self.sub.close() self.pub.close() diff --git a/napalm_logs/publisher.py b/napalm_logs/publisher.py index e77ea12b..36680c1d 100644 --- a/napalm_logs/publisher.py +++ b/napalm_logs/publisher.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Listener worker process -''' +""" from __future__ import absolute_import # Import pythond stdlib @@ -32,9 +32,9 @@ class NapalmLogsPublisherProc(NapalmLogsProc): - ''' + """ publisher sub-process class. - ''' + """ def __init__( self, @@ -52,65 +52,60 @@ def __init__( self.__up = False self.opts = opts self.pub_id = pub_id - self.address = publisher_opts.pop('address', None) or address - self.port = publisher_opts.pop('port', None) or port - log.debug('Publishing to %s:%d', self.address, self.port) - self.serializer = publisher_opts.get('serializer') or serializer + self.address = publisher_opts.pop("address", None) or address + self.port = publisher_opts.pop("port", None) or port + log.debug("Publishing to %s:%d", self.address, self.port) + self.serializer = publisher_opts.get("serializer") or serializer self.default_serializer = self.serializer == SERIALIZER - self.disable_security = publisher_opts.get('disable_security', disable_security) + self.disable_security = publisher_opts.get("disable_security", disable_security) self._transport_type = transport_type self.publisher_opts = publisher_opts - self.error_whitelist = publisher_opts.get('error_whitelist', []) - self.error_blacklist = publisher_opts.get('error_blacklist', []) + self.error_whitelist = publisher_opts.get("error_whitelist", []) + self.error_blacklist = publisher_opts.get("error_blacklist", []) if not disable_security: self.__safe = nacl.secret.SecretBox(private_key) self.__signing_key = signing_key - self._strip_message_details = publisher_opts.pop('strip_message_details', False) + self._strip_message_details = publisher_opts.pop("strip_message_details", False) self._setup_transport() def _exit_gracefully(self, signum, _): - log.debug('Caught signal in publisher process') + log.debug("Caught signal in publisher process") self.stop() def _setup_ipc(self): - ''' + """ Subscribe to the pub IPC and publish the messages on the right transport. - ''' + """ self.ctx = zmq.Context() log.debug( - 'Setting up the %s publisher subscriber #%d', + "Setting up the %s publisher subscriber #%d", self._transport_type, self.pub_id, ) self.sub = self.ctx.socket(zmq.SUB) self.sub.connect(PUB_IPC_URL) - self.sub.setsockopt(zmq.SUBSCRIBE, b'') - try: - self.sub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) + self.sub.setsockopt(zmq.SUBSCRIBE, b"") + self.sub.setsockopt(zmq.RCVHWM, self.opts["hwm"]) def _setup_transport(self): - ''' + """ Setup the transport. - ''' - if 'RAW' in self.error_whitelist: + """ + if "RAW" in self.error_whitelist: log.info( - '%s %d will publish partially parsed messages', + "%s %d will publish partially parsed messages", self._transport_type, self.pub_id, ) - if 'UNKNOWN' in self.error_whitelist: + if "UNKNOWN" in self.error_whitelist: log.info( - '%s %d will publish unknown messages', self._transport_type, self.pub_id + "%s %d will publish unknown messages", self._transport_type, self.pub_id ) transport_class = get_transport(self._transport_type) log.debug( - 'Serializing the object for %s using %s', + "Serializing the object for %s using %s", self._transport_type, self.serializer, ) @@ -118,15 +113,15 @@ def _setup_transport(self): self.transport = transport_class(self.address, self.port, **self.publisher_opts) self.__transport_encrypt = True if ( - hasattr(self.transport, 'NO_ENCRYPT') - and getattr(self.transport, 'NO_ENCRYPT') is True + hasattr(self.transport, "NO_ENCRYPT") + and getattr(self.transport, "NO_ENCRYPT") is True ): self.__transport_encrypt = False def _prepare(self, serialized_obj): - ''' + """ Prepare the object to be sent over the untrusted channel. - ''' + """ # generating a nonce nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE) # encrypting using the nonce @@ -146,24 +141,24 @@ def _serialize(self, obj, bin_obj): return self.serializer_fun(obj) def start(self): - ''' + """ Listen to messages and publish them. - ''' + """ # metrics napalm_logs_publisher_received_messages = Counter( - 'napalm_logs_publisher_received_messages', + "napalm_logs_publisher_received_messages", "Count of messages received by the publisher", - ['publisher_type', 'address', 'port'], + ["publisher_type", "address", "port"], ) napalm_logs_publisher_whitelist_blacklist_check_fail = Counter( - 'napalm_logs_publisher_whitelist_blacklist_check_fail', + "napalm_logs_publisher_whitelist_blacklist_check_fail", "Count of messages which fail the whitelist/blacklist check", - ['publisher_type', 'address', 'port'], + ["publisher_type", "address", "port"], ) napalm_logs_publisher_messages_published = Counter( - 'napalm_logs_publisher_messages_published', + "napalm_logs_publisher_messages_published", "Count of published messages", - ['publisher_type', 'address', 'port'], + ["publisher_type", "address", "port"], ) self._setup_ipc() # Start suicide polling thread @@ -179,30 +174,30 @@ def start(self): bin_obj = self.sub.recv() except zmq.ZMQError as error: if self.__up is False: - log.info('Exiting on process shutdown') + log.info("Exiting on process shutdown") return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) obj = umsgpack.unpackb(bin_obj) if self._strip_message_details: - obj.pop('message_details', None) + obj.pop("message_details", None) bin_obj = self.serializer_fun(obj) napalm_logs_publisher_received_messages.labels( publisher_type=self._transport_type, address=self.address, port=self.port, ).inc() - if not napalm_logs.ext.check_whitelist_blacklist( - obj['error'], + if not napalm_logs.utils.check_whitelist_blacklist( + obj["error"], whitelist=self.error_whitelist, blacklist=self.error_blacklist, ): # Apply the whitelist / blacklist logic # If it doesn't match, jump over. log.debug( - 'This error type is %s. Skipping for %s #%d', - obj['error'], + "This error type is %s. Skipping for %s #%d", + obj["error"], self._transport_type, self.pub_id, ) @@ -213,7 +208,7 @@ def start(self): ).inc() continue serialized_obj = self._serialize(obj, bin_obj) - log.debug('Publishing the OC object') + log.debug("Publishing the OC object") if not self.disable_security and self.__transport_encrypt: # Encrypt only when needed. serialized_obj = self._prepare(serialized_obj) @@ -226,7 +221,7 @@ def start(self): def stop(self): log.info( - 'Stopping publisher process %s (publisher #%d)', + "Stopping publisher process %s (publisher #%d)", self._transport_type, self.pub_id, ) diff --git a/napalm_logs/scripts/__init__.py b/napalm_logs/scripts/__init__.py index a8922ef4..f8b354a4 100644 --- a/napalm_logs/scripts/__init__.py +++ b/napalm_logs/scripts/__init__.py @@ -1,4 +1,4 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs scripts -''' +""" diff --git a/napalm_logs/scripts/cli.py b/napalm_logs/scripts/cli.py index ccfa8f9b..7a5378e0 100644 --- a/napalm_logs/scripts/cli.py +++ b/napalm_logs/scripts/cli.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -''' +""" Kick off the napalm-logs engine. -''' +""" # Import python stdlib import os @@ -17,7 +17,6 @@ # Import napalm-logs import napalm_logs import napalm_logs.config as defaults -import napalm_logs.ext.six as six log = logging.getLogger(__name__) @@ -32,27 +31,27 @@ def take_action(self, action, dest, *args, **kwargs): class OptionParser(optparse.OptionParser, object): VERSION = napalm_logs.__version__ - usage = 'napalm-logs [options]' - epilog = 'Documentation at: http://napalm-logs.readthedocs.io/en/latest/' + usage = "napalm-logs [options]" + epilog = "Documentation at: http://napalm-logs.readthedocs.io/en/latest/" description = ( - 'Process listening to syslog messages from network device' - 'from various sources, and publishing JSON serializable Python objects, ' - 'in a vendor agnostic shape. The output objects are structured following' - 'the OpenConfig or IETF YANG models.' + "Process listening to syslog messages from network device" + "from various sources, and publishing JSON serializable Python objects, " + "in a vendor agnostic shape. The output objects are structured following" + "the OpenConfig or IETF YANG models." ) def __init__(self, *args, **kwargs): - kwargs.setdefault('version', '%prog {0}'.format(self.VERSION)) - kwargs.setdefault('usage', self.usage) - kwargs.setdefault('description', self.description) - kwargs.setdefault('epilog', self.epilog) - kwargs.setdefault('option_class', CustomOption) + kwargs.setdefault("version", "%prog {0}".format(self.VERSION)) + kwargs.setdefault("usage", self.usage) + kwargs.setdefault("description", self.description) + kwargs.setdefault("epilog", self.epilog) + kwargs.setdefault("option_class", CustomOption) optparse.OptionParser.__init__(self, *args, **kwargs) self.add_option( - '-v', - action='store_true', - dest='version', - help='Show version number and exit.', + "-v", + action="store_true", + dest="version", + help="Show version number and exit.", ) def add_option_group(self, *args, **kwargs): @@ -62,188 +61,188 @@ def add_option_group(self, *args, **kwargs): def parse_args(self, args=None, values=None): options, args = optparse.OptionParser.parse_args(self, args, values) - if 'args_stdin' in options.__dict__ and options.args_stdin is True: + if "args_stdin" in options.__dict__ and options.args_stdin is True: new_inargs = sys.stdin.readlines() - new_inargs = [arg.rstrip('\r\n') for arg in new_inargs] + new_inargs = [arg.rstrip("\r\n") for arg in new_inargs] new_options, new_args = optparse.OptionParser.parse_args(self, new_inargs) options.__dict__.update(new_options.__dict__) args.extend(new_args) self.options, self.args = options, args def print_version(self): - print('napalm-logs {0}'.format(self.VERSION)) + print("napalm-logs {0}".format(self.VERSION)) class NLOptionParser(OptionParser, object): def prepare(self): self.add_option( - '-c', - '--config-file', - dest='config_file', + "-c", + "--config-file", + dest="config_file", help=( - 'Config file absolute path. Default: {0}'.format(defaults.CONFIG_FILE) + "Config file absolute path. Default: {0}".format(defaults.CONFIG_FILE) ), ) self.add_option( - '-a', - '--address', - dest='address', - help=('Listener address. Default: {0}'.format(defaults.ADDRESS)), + "-a", + "--address", + dest="address", + help=("Listener address. Default: {0}".format(defaults.ADDRESS)), ) self.add_option( - '--config-path', dest='config_path', help=('Device config path.') + "--config-path", dest="config_path", help=("Device config path.") ) self.add_option( - '--extension-config-path', - dest='extension_config_path', - help=('Extension config path.'), + "--extension-config-path", + dest="extension_config_path", + help=("Extension config path."), ) self.add_option( - '-p', - '--port', - dest='port', + "-p", + "--port", + dest="port", type=int, - help=('Listener bind port. Default: {0}'.format(defaults.PORT)), + help=("Listener bind port. Default: {0}".format(defaults.PORT)), ) self.add_option( - '--listener', - dest='listener', - help=('Listener type. Default: {0}'.format(defaults.LISTENER)), + "--listener", + dest="listener", + help=("Listener type. Default: {0}".format(defaults.LISTENER)), ) self.add_option( - '-s', - '--serializer', - dest='serializer', - help=('Serializer type. Default: {0}'.format(defaults.SERIALIZER)), + "-s", + "--serializer", + dest="serializer", + help=("Serializer type. Default: {0}".format(defaults.SERIALIZER)), ) self.add_option( - '--publisher', - dest='publisher', - help=('Publish transport. Default: {0}'.format(defaults.PUBLISHER)), + "--publisher", + dest="publisher", + help=("Publish transport. Default: {0}".format(defaults.PUBLISHER)), ) self.add_option( - '--publish-address', - dest='publish_address', + "--publish-address", + dest="publish_address", help=( - 'Publisher bind address. Default: {0}'.format(defaults.PUBLISH_ADDRESS) + "Publisher bind address. Default: {0}".format(defaults.PUBLISH_ADDRESS) ), ) self.add_option( - '--publish-port', - dest='publish_port', + "--publish-port", + dest="publish_port", type=int, - help=('Publisher bind port. Default: {0}'.format(defaults.PUBLISH_PORT)), + help=("Publisher bind port. Default: {0}".format(defaults.PUBLISH_PORT)), ) self.add_option( - '--auth-address', - dest='auth_address', + "--auth-address", + dest="auth_address", help=( - 'Authenticator bind address. Default: {0}'.format(defaults.AUTH_ADDRESS) + "Authenticator bind address. Default: {0}".format(defaults.AUTH_ADDRESS) ), ) self.add_option( - '--auth-port', - dest='auth_port', + "--auth-port", + dest="auth_port", type=int, - help=('Authenticator bind port. Default: {0}'.format(defaults.AUTH_PORT)), + help=("Authenticator bind port. Default: {0}".format(defaults.AUTH_PORT)), ) self.add_option( - '--enable-metrics', - dest='metrics_enabled', + "--enable-metrics", + dest="metrics_enabled", action="store_true", default=False, - help=('Enable metrics collection and exporting (Prometheus metrics).'), + help=("Enable metrics collection and exporting (Prometheus metrics)."), ) self.add_option( - '--metrics-address', - dest='metrics_address', + "--metrics-address", + dest="metrics_address", help=( - 'Prometheus metrics HTTP server listener address. Default: {0}'.format( + "Prometheus metrics HTTP server listener address. Default: {0}".format( defaults.METRICS_ADDRESS ) ), ) self.add_option( - '--metrics-port', - dest='metrics_port', + "--metrics-port", + dest="metrics_port", type=int, help=( - 'Prometheus metrics HTTP server listener bind port. Default: {0}'.format( + "Prometheus metrics HTTP server listener bind port. Default: {0}".format( defaults.METRICS_PORT ) ), ) self.add_option( - '--metrics-dir', - dest='metrics_dir', + "--metrics-dir", + dest="metrics_dir", help=( - 'Directory to store metrics in. Must be writable by the processes. ' - 'Default: {0}'.format(defaults.METRICS_DIR) + "Directory to store metrics in. Must be writable by the processes. " + "Default: {0}".format(defaults.METRICS_DIR) ), ) self.add_option( - '--certificate', - dest='certificate', + "--certificate", + dest="certificate", help=( - 'Absolute path to the SSL certificate used for client authentication.' + "Absolute path to the SSL certificate used for client authentication." ), ) self.add_option( - '--keyfile', dest='keyfile', help=('Absolute path to the SSL keyfile') + "--keyfile", dest="keyfile", help=("Absolute path to the SSL keyfile") ) self.add_option( - '--disable-security', - dest='disable_security', + "--disable-security", + dest="disable_security", action="store_true", default=False, - help=('Disable encryption and data signing when publishing.'), + help=("Disable encryption and data signing when publishing."), ) self.add_option( - '-l', - '--log-level', - dest='log_level', - help=('Logging level. Default: {0}'.format(defaults.LOG_LEVEL)), + "-l", + "--log-level", + dest="log_level", + help=("Logging level. Default: {0}".format(defaults.LOG_LEVEL)), ) self.add_option( - '--log-file', - dest='log_file', - help=('Logging file. Default: {0}'.format(defaults.LOG_FILE)), + "--log-file", + dest="log_file", + help=("Logging file. Default: {0}".format(defaults.LOG_FILE)), ) self.add_option( - '--log-format', - dest='log_format', - help=('Logging format. Default: {0}'.format(defaults.LOG_FORMAT)), + "--log-format", + dest="log_format", + help=("Logging format. Default: {0}".format(defaults.LOG_FORMAT)), ) self.add_option( - '--hwm', - dest='hwm', + "--hwm", + dest="hwm", type=int, help=( - 'Internal ZeroMQ high water mark. ' - 'This option controls the length of the internal message queue,' - 'and it tunes the capacity of the napalm-logs engine. ' - 'For high performance, this number can be increased, but implies' - 'higher memory consumption. ' - 'Default: {0}'.format(defaults.ZMQ_INTERNAL_HWM) + "Internal ZeroMQ high water mark. " + "This option controls the length of the internal message queue," + "and it tunes the capacity of the napalm-logs engine. " + "For high performance, this number can be increased, but implies" + "higher memory consumption. " + "Default: {0}".format(defaults.ZMQ_INTERNAL_HWM) ), ) self.add_option( - '-w', - '--device-worker-processes', - dest='device_worker_processes', + "-w", + "--device-worker-processes", + dest="device_worker_processes", type=int, - help='Number of worker processes per device. Default: 1.', + help="Number of worker processes per device. Default: 1.", default=1, ) def convert_env_dict(self, d): for k, v in d.items(): - if isinstance(v, six.string_type): - if not v.startswith('${') or not v.endswith('}'): + if isinstance(v, str): + if not v.startswith("${") or not v.endswith("}"): continue if not os.environ.get(v[2:-1]): log.error( - 'No env variable found for %s, please check your config file', + "No env variable found for %s, please check your config file", v[2:-1], ) sys.exit(1) @@ -255,12 +254,12 @@ def convert_env_dict(self, d): def convert_env_list(self, lst): for name, value in enumerate(lst): - if isinstance(value, six.string_type): - if not value.startswith('${') or not value.endswith('}'): + if isinstance(value, str): + if not value.startswith("${") or not value.endswith("}"): continue if not os.environ.get(value[2:-1]): log.error( - 'No env variable found for %s, please check your config file', + "No env variable found for %s, please check your config file", value[2:-1], ) sys.exit(1) @@ -273,10 +272,10 @@ def convert_env_list(self, lst): def read_config_file(self, filepath): config = {} try: - with open(filepath, 'r') as fstream: + with open(filepath, "r") as fstream: config = yaml.load(fstream, Loader=yaml.FullLoader) except (IOError, yaml.YAMLError): - log.info('Unable to read from %s', filepath) + log.info("Unable to read from %s", filepath) # Convert any env variables self.convert_env_dict(config) return config @@ -290,60 +289,60 @@ def parse(self, log, screen_handler): config_file_path = self.options.config_file or defaults.CONFIG_FILE file_cfg = self.read_config_file(config_file_path) log_file = ( - self.options.log_file or file_cfg.get('log_file') or defaults.LOG_FILE + self.options.log_file or file_cfg.get("log_file") or defaults.LOG_FILE ) log_lvl = ( - self.options.log_level or file_cfg.get('log_level') or defaults.LOG_LEVEL + self.options.log_level or file_cfg.get("log_level") or defaults.LOG_LEVEL ) log_fmt = ( - self.options.log_format or file_cfg.get('log_format') or defaults.LOG_FORMAT + self.options.log_format or file_cfg.get("log_format") or defaults.LOG_FORMAT ) if log_file.lower() not in defaults.LOG_FILE_CLI_OPTIONS: log_file_dir = os.path.dirname(log_file) if not os.path.isdir(log_file_dir): - log.warning('%s does not exist, trying to create', log_file_dir) + log.warning("%s does not exist, trying to create", log_file_dir) try: os.mkdir(log_file_dir) except OSError: - log.error('Unable to create %s', log_file_dir, exc_info=True) + log.error("Unable to create %s", log_file_dir, exc_info=True) sys.exit(0) log.removeHandler(screen_handler) # remove printing to the screen logging.basicConfig( filename=log_file, - level=defaults.LOGGING_LEVEL.get(log_lvl.lower(), 'warning'), + level=defaults.LOGGING_LEVEL.get(log_lvl.lower(), "warning"), format=log_fmt, ) # log to filecm - cert = self.options.certificate or file_cfg.get('certificate') + cert = self.options.certificate or file_cfg.get("certificate") disable_security = self.options.disable_security or file_cfg.get( - 'disable_security', False + "disable_security", False ) metrics_enabled = self.options.metrics_enabled or file_cfg.get( - 'metrics_enabled', False + "metrics_enabled", False ) if not cert and disable_security is False: - log.error('certfile must be specified for server-side operations') - raise ValueError('Please specify a valid SSL certificate.') + log.error("certfile must be specified for server-side operations") + raise ValueError("Please specify a valid SSL certificate.") # For each module we need to merge the defaults with the # config file, but prefer the config file listener_opts = defaults.LISTENER_OPTS publisher_opts = defaults.PUBLISHER_OPTS - device_whitelist = file_cfg.get('device_whitelist', []) - device_blacklist = file_cfg.get('device_blacklist', []) - buffer_cfg = file_cfg.get('buffer', {}) + device_whitelist = file_cfg.get("device_whitelist", []) + device_blacklist = file_cfg.get("device_blacklist", []) + buffer_cfg = file_cfg.get("buffer", {}) listener = [] if self.options.listener: listener = [{self.options.listener: {}}] - if 'listener' in file_cfg: - listener_cfg = file_cfg['listener'] + if "listener" in file_cfg: + listener_cfg = file_cfg["listener"] if isinstance(listener_cfg, dict): for listener_name, listener_opts in listener_cfg.items(): listener.append({listener_name: listener_opts}) - elif isinstance(listener_cfg, six.string_type): + elif isinstance(listener_cfg, str): listener = [{listener_cfg: {}}] elif isinstance(listener_cfg, list): for lst_cfg in listener_cfg: - if isinstance(lst_cfg, six.string_type): + if isinstance(lst_cfg, str): listener.append({lst_cfg: {}}) elif isinstance(lst_cfg, dict): listener.append(lst_cfg) @@ -353,16 +352,16 @@ def parse(self, log, screen_handler): publisher = [] if self.options.publisher: publisher = [{self.options.publisher: {}}] - if 'publisher' in file_cfg: - publisher_cfg = file_cfg['publisher'] + if "publisher" in file_cfg: + publisher_cfg = file_cfg["publisher"] if isinstance(publisher_cfg, dict): for publisher_name, publisher_opts in publisher_cfg.items(): publisher.append({publisher_name: publisher_opts}) - elif isinstance(publisher_cfg, six.string_type): + elif isinstance(publisher_cfg, str): publisher = [{publisher_cfg: {}}] elif isinstance(publisher_cfg, list): for lst_cfg in publisher_cfg: - if isinstance(lst_cfg, six.string_type): + if isinstance(lst_cfg, str): publisher.append({lst_cfg: {}}) elif isinstance(lst_cfg, dict): publisher.append(lst_cfg) @@ -372,68 +371,68 @@ def parse(self, log, screen_handler): hwm = defaults.ZMQ_INTERNAL_HWM if self.options.hwm is not None: hwm = self.options.hwm - elif file_cfg.get('hwm') is not None: - hwm = file_cfg['hwm'] + elif file_cfg.get("hwm") is not None: + hwm = file_cfg["hwm"] cfg = { - 'address': self.options.address - or file_cfg.get('address') + "address": self.options.address + or file_cfg.get("address") or defaults.ADDRESS, - 'port': self.options.port or file_cfg.get('port') or defaults.PORT, - 'listener': listener, - 'publisher': publisher, - 'publish_address': self.options.publish_address - or file_cfg.get('publish_address') + "port": self.options.port or file_cfg.get("port") or defaults.PORT, + "listener": listener, + "publisher": publisher, + "publish_address": self.options.publish_address + or file_cfg.get("publish_address") or defaults.PUBLISH_ADDRESS, # noqa - 'publish_port': self.options.publish_port - or file_cfg.get('publish_port') + "publish_port": self.options.publish_port + or file_cfg.get("publish_port") or defaults.PUBLISH_PORT, # noqa - 'auth_address': self.options.auth_address - or file_cfg.get('auth_address') + "auth_address": self.options.auth_address + or file_cfg.get("auth_address") or defaults.AUTH_ADDRESS, # noqa - 'auth_port': self.options.auth_port - or file_cfg.get('auth_port') + "auth_port": self.options.auth_port + or file_cfg.get("auth_port") or defaults.AUTH_PORT, - 'metrics_enabled': metrics_enabled, - 'metrics_address': self.options.metrics_address - or file_cfg.get('metrics_address') + "metrics_enabled": metrics_enabled, + "metrics_address": self.options.metrics_address + or file_cfg.get("metrics_address") or defaults.METRICS_ADDRESS, - 'metrics_port': self.options.metrics_port - or file_cfg.get('metrics_port') + "metrics_port": self.options.metrics_port + or file_cfg.get("metrics_port") or defaults.METRICS_PORT, - 'metrics_dir': self.options.metrics_dir - or file_cfg.get('metrics_dir') + "metrics_dir": self.options.metrics_dir + or file_cfg.get("metrics_dir") or defaults.METRICS_DIR, - 'certificate': cert, - 'keyfile': self.options.keyfile or file_cfg.get('keyfile'), - 'disable_security': disable_security, - 'config_path': self.options.config_path or file_cfg.get('config_path'), - 'extension_config_path': self.options.extension_config_path - or file_cfg.get('extension_config_path'), - 'log_level': log_lvl, - 'log_format': log_fmt, - 'device_whitelist': device_whitelist, - 'device_blacklist': device_blacklist, - 'hwm': hwm, - 'device_worker_processes': self.options.device_worker_processes - or file_cfg.get('device_worker_processes') + "certificate": cert, + "keyfile": self.options.keyfile or file_cfg.get("keyfile"), + "disable_security": disable_security, + "config_path": self.options.config_path or file_cfg.get("config_path"), + "extension_config_path": self.options.extension_config_path + or file_cfg.get("extension_config_path"), + "log_level": log_lvl, + "log_format": log_fmt, + "device_whitelist": device_whitelist, + "device_blacklist": device_blacklist, + "hwm": hwm, + "device_worker_processes": self.options.device_worker_processes + or file_cfg.get("device_worker_processes") or 1, - 'serializer': self.options.serializer - or file_cfg.get('serializer') + "serializer": self.options.serializer + or file_cfg.get("serializer") or defaults.SERIALIZER, - 'buffer': buffer_cfg, - 'opts': {}, + "buffer": buffer_cfg, + "opts": {}, } for opt, val in file_cfg.items(): if opt not in cfg: - cfg['opts'][opt] = val + cfg["opts"][opt] = val return cfg def _exit_gracefully(signum, _): - ''' + """ Called when a signal is caught and marks exiting variable True - ''' + """ global _up _up = False @@ -442,8 +441,8 @@ def _exit_gracefully(signum, _): def napalm_logs_engine(): - if '' in sys.path: - sys.path.remove('') + if "" in sys.path: + sys.path.remove("") # Temporarily will forward the log entries to the screen # After reading the config and all good, will write into the right # log file. @@ -466,5 +465,5 @@ def napalm_logs_engine(): nl.stop_engine() -if __name__ == '__main__': +if __name__ == "__main__": napalm_logs_engine() diff --git a/napalm_logs/serializer/__init__.py b/napalm_logs/serializer/__init__.py index 3b66d32d..d39a1529 100644 --- a/napalm_logs/serializer/__init__.py +++ b/napalm_logs/serializer/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs pluggable serializer. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -22,26 +22,26 @@ log = logging.getLogger(__file__) SERIALIZER_LOOKUP = { - 'msgpack': umsgpack.packb, - 'json': json.dumps, - 'str': str, - 'yaml': yaml.safe_dump, - 'pprint': pprint.pformat, - '*': umsgpack.packb, # default serializer + "msgpack": umsgpack.packb, + "json": json.dumps, + "str": str, + "yaml": yaml.safe_dump, + "pprint": pprint.pformat, + "*": umsgpack.packb, # default serializer } def get_serializer(name): - ''' + """ Return the serialize function. - ''' + """ try: - log.debug('Using %s as serializer', name) + log.debug("Using %s as serializer", name) return SERIALIZER_LOOKUP[name] except KeyError: - msg = 'Serializer {} is not available'.format(name) + msg = "Serializer {} is not available".format(name) log.error(msg, exc_info=True) raise InvalidSerializerException(msg) -__all__ = ('get_listener',) +__all__ = ("get_listener",) diff --git a/napalm_logs/server.py b/napalm_logs/server.py index 53ed89d0..5281ae41 100644 --- a/napalm_logs/server.py +++ b/napalm_logs/server.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Server worker process -''' +""" from __future__ import absolute_import # Import pythond stdlib @@ -19,7 +19,6 @@ from prometheus_client import Counter # Import napalm-logs pkgs -import napalm_logs.ext.six as six from napalm_logs.config import LST_IPC_URL from napalm_logs.config import DEV_IPC_URL from napalm_logs.config import PUB_PX_IPC_URL @@ -33,9 +32,9 @@ class NapalmLogsServerProc(NapalmLogsProc): - ''' + """ Server sub-process class. - ''' + """ def __init__(self, opts, config, started_os_proc, buffer=None): self.opts = opts @@ -47,208 +46,195 @@ def __init__(self, opts, config, started_os_proc, buffer=None): self._compile_prefixes() def _exit_gracefully(self, signum, _): - log.debug('Caught signal in server process') + log.debug("Caught signal in server process") self.stop() def _setup_ipc(self): - ''' + """ Setup the IPC pub and sub. Subscript to the listener IPC and publish to the device specific IPC. - ''' - log.debug('Setting up the server IPC puller to receive from the listener') + """ + log.debug("Setting up the server IPC puller to receive from the listener") self.ctx = zmq.Context() # subscribe to listener self.sub = self.ctx.socket(zmq.PULL) self.sub.bind(LST_IPC_URL) - try: - self.sub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) + self.sub.setsockopt(zmq.RCVHWM, self.opts["hwm"]) # device publishers - log.debug('Creating the router ICP on the server') + log.debug("Creating the router ICP on the server") self.pub = self.ctx.socket(zmq.ROUTER) self.pub.bind(DEV_IPC_URL) - try: - self.pub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) + self.pub.setsockopt(zmq.SNDHWM, self.opts["hwm"]) # Pipe to the publishers self.publisher_pub = self.ctx.socket(zmq.PUB) self.publisher_pub.connect(PUB_PX_IPC_URL) - try: - self.publisher_pub.setsockopt(zmq.HWM, self.opts['hwm']) - # zmq 2 - except AttributeError: - # zmq 3 - self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) + self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts["hwm"]) def _cleanup_buffer(self): - ''' + """ Periodically cleanup the buffer. - ''' + """ if not self._buffer: return while True: time.sleep(60) - log.debug('Cleaning up buffer') + log.debug("Cleaning up buffer") items = self._buffer.items() # The ``items`` function should also cleanup the buffer - log.debug('Collected items') + log.debug("Collected items") log.debug(list(items)) def _compile_prefixes(self): - ''' + """ Create a dict of all OS prefixes and their compiled regexs - ''' + """ self.compiled_prefixes = {} for dev_os, os_config in self.config.items(): if not os_config: continue self.compiled_prefixes[dev_os] = [] - for prefix in os_config.get('prefixes', []): - values = prefix.get('values', {}) - line = prefix.get('line', '') - if prefix.get('__python_fun__'): + for prefix in os_config.get("prefixes", []): + values = prefix.get("values", {}) + line = prefix.get("line", "") + if prefix.get("__python_fun__"): self.compiled_prefixes[dev_os].append( { - '__python_fun__': prefix['__python_fun__'], - '__python_mod__': prefix['__python_mod__'], + "__python_fun__": prefix["__python_fun__"], + "__python_mod__": prefix["__python_mod__"], } ) continue # if python profiler defined for this prefix, # no need to go further, but jump to the next prefix # Add 'pri' and 'message' to the line, and values - line = '{{pri}}{}{{message}}'.format(line) + line = "{{pri}}{}{{message}}".format(line) # PRI https://tools.ietf.org/html/rfc5424#section-6.2.1 - values['pri'] = r'\<(\d+)\>' - values['message'] = '(.*)' + values["pri"] = r"\<(\d+)\>" + values["message"] = "(.*)" # We will now figure out which position each value is in so we can use it with the match statement position = {} for key in values.keys(): - position[line.find('{' + key + '}')] = key + position[line.find("{" + key + "}")] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 # Escape the line, then remove the escape for the curly bracets so they can be used when formatting - escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') + escaped = re.escape(line).replace(r"\{", "{").replace(r"\}", "}") # Replace a whitespace with \s+ - escaped = escaped.replace(r'\ ', r'\s+') + escaped = escaped.replace(r"\ ", r"\s+") self.compiled_prefixes[dev_os].append( { - 'prefix': re.compile(escaped.format(**values)), - 'prefix_positions': sorted_position, - 'raw_prefix': escaped.format(**values), - 'values': values, + "prefix": re.compile(escaped.format(**values)), + "prefix_positions": sorted_position, + "raw_prefix": escaped.format(**values), + "values": values, + "state": prefix.get("state"), + "state_tag": prefix.get("state_tag"), } ) # log.debug('Compiled prefixes') # log.debug(self.compiled_prefixes) def _identify_prefix(self, msg, data): - ''' + """ Check the message again each OS prefix and if matched return the message dict - ''' + """ prefix_id = -1 for prefix in data: msg_dict = {} prefix_id += 1 match = None - if '__python_fun__' in prefix: + if "__python_fun__" in prefix: log.debug( - 'Trying to match using the %s custom python profiler', - prefix['__python_mod__'], + "Trying to match using the %s custom python profiler", + prefix["__python_mod__"], ) try: - match = prefix['__python_fun__'](msg) + match = prefix["__python_fun__"](msg) except Exception: log.error( - 'Exception while parsing %s with the %s python profiler', + "Exception while parsing %s with the %s python profiler", msg, - prefix['__python_mod__'], + prefix["__python_mod__"], exc_info=True, ) else: - log.debug('Matching using YAML-defined profiler:') - log.debug(prefix['raw_prefix']) - match = prefix['prefix'].search(msg) + log.debug("Matching using YAML-defined profiler:") + log.debug(prefix["raw_prefix"]) + match = prefix["prefix"].search(msg) if not match: - log.debug('Match not found') + log.debug("Match not found") continue - if '__python_fun__' in prefix: + if "__python_fun__" in prefix: log.debug( - '%s matched using the custom python profiler %s', + "%s matched using the custom python profiler %s", msg, - prefix['__python_mod__'], + prefix["__python_mod__"], ) msg_dict = match # the output as-is from the custom function else: - positions = prefix.get('prefix_positions', {}) - values = prefix.get('values') + positions = prefix.get("prefix_positions", {}) + values = prefix.get("values") msg_dict = {} for key in values.keys(): msg_dict[key] = match.group(positions.get(key)) # Remove whitespace from the start or end of the message - msg_dict['__prefix_id__'] = prefix_id - msg_dict['message'] = msg_dict['message'].strip() + msg_dict["__prefix_id__"] = prefix_id + msg_dict["message"] = msg_dict["message"].strip() # The pri has to be an int as it is retrived using regex '\<(\d+)\>' - if 'pri' in msg_dict: - msg_dict['facility'] = int(int(msg_dict['pri']) / 8) - msg_dict['severity'] = int( - int(msg_dict['pri']) - (msg_dict['facility'] * 8) + if "pri" in msg_dict: + msg_dict["facility"] = int(int(msg_dict["pri"]) / 8) + msg_dict["severity"] = int( + int(msg_dict["pri"]) - (msg_dict["facility"] * 8) ) return msg_dict def _identify_os(self, msg): - ''' + """ Using the prefix of the syslog message, we are able to identify the operating system and then continue parsing. - ''' + """ ret = [] for dev_os, data in self.compiled_prefixes.items(): # TODO Should we prevent attepmting to determine the OS for the blacklisted? # [mircea] I think its good from a logging perspective to know at least that # that the server found the matching and it tells that it won't be processed # further. Later, we could potentially add an option to control this. - log.debug('Matching under %s', dev_os) + log.debug("Matching under %s", dev_os) msg_dict = self._identify_prefix(msg, data) if msg_dict: - log.debug('Adding %s to list of matched OS', dev_os) + log.debug("Adding %s to list of matched OS", dev_os) ret.append((dev_os, msg_dict)) else: - log.debug('No match found for %s', dev_os) + log.debug("No match found for %s", dev_os) if not ret: - log.debug('Not matched any OS, returning original log') - msg_dict = {'message': msg} + log.debug("Not matched any OS, returning original log") + msg_dict = {"message": msg} ret.append((None, msg_dict)) return ret def start(self): - ''' + """ Take the messages from the queue, inspect and identify the operating system, then queue the message correspondingly. - ''' + """ # metric counters napalm_logs_server_messages_received = Counter( "napalm_logs_server_messages_received", "Count of messages received from listener processes", ) napalm_logs_server_skipped_buffered_messages = Counter( - 'napalm_logs_server_skipped_buffered_messages', - 'Count of messages skipped as they were already buffered', - ['device_os'], + "napalm_logs_server_skipped_buffered_messages", + "Count of messages skipped as they were already buffered", + ["device_os"], ) napalm_logs_server_messages_with_identified_os = Counter( "napalm_logs_server_messages_with_identified_os", "Count of messages with positive os identification", - ['device_os'], + ["device_os"], ) napalm_logs_server_messages_without_identified_os = Counter( "napalm_logs_server_messages_without_identified_os", @@ -257,22 +243,22 @@ def start(self): napalm_logs_server_messages_failed_device_queuing = Counter( "napalm_logs_server_messages_failed_device_queuing", "Count of messages per device os that fail to be queued to a device process", - ['device_os'], + ["device_os"], ) napalm_logs_server_messages_device_queued = Counter( "napalm_logs_server_messages_device_queued", "Count of messages queued to device processes", - ['device_os'], + ["device_os"], ) napalm_logs_server_messages_unknown_queued = Counter( "napalm_logs_server_messages_unknown_queued", "Count of messages queued as unknown", ) - if self.opts.get('metrics_include_attributes', True): + if self.opts.get("metrics_include_attributes", True): napalm_logs_server_messages_attrs = Counter( "napalm_logs_server_messages_attrs", "Count of messages from the server process with their details", - ['device_os', 'host', 'tag'], + ["device_os", "host", "tag"], ) self._setup_ipc() # Start suicide polling thread @@ -291,17 +277,14 @@ def start(self): msg, address = umsgpack.unpackb(bin_obj, use_list=False) except zmq.ZMQError as error: if self.__up is False: - log.info('Exiting on process shutdown') + log.info("Exiting on process shutdown") return else: log.error(error, exc_info=True) raise NapalmLogsExit(error) if isinstance(msg, bytes): - if six.PY3: - msg = str(msg, 'utf-8') - else: - msg = msg.encode('utf-8') - log.debug('[%s] Dequeued message from %s: %s', address, msg, time.time()) + msg = str(msg, "utf-8") + log.debug("[%s] Dequeued message from %s: %s", address, msg, time.time()) napalm_logs_server_messages_received.inc() os_list = self._identify_os(msg) @@ -309,36 +292,30 @@ def start(self): if dev_os and dev_os in self.started_os_proc: # Identified the OS and the corresponding process is started. # Then send the message in the right queue - log.debug('Identified OS: %s', dev_os) - log.debug('Queueing message to %s', dev_os) - if six.PY3: - dev_os = bytes(dev_os, 'utf-8') + log.debug("Identified OS: %s", dev_os) + log.debug("Queueing message to %s", dev_os) + dev_os = bytes(dev_os, "utf-8") napalm_logs_server_messages_with_identified_os.labels( device_os=dev_os.decode() ).inc() if self._buffer: - message = '{dev_os}/{host}/{msg}'.format( + message = "{dev_os}/{host}/{msg}".format( dev_os=dev_os.decode(), - host=msg_dict['host'], - msg=msg_dict['message'], + host=msg_dict["host"], + msg=msg_dict["message"], ) - if six.PY3: - message_key = base64.b64encode( - bytes(message, 'utf-8') - ).decode() - else: - message_key = base64.b64encode(message) + message_key = base64.b64encode(bytes(message, "utf-8")).decode() if self._buffer[message_key]: log.info( '"%s" seems to be already buffered, skipping', - msg_dict['message'], + msg_dict["message"], ) napalm_logs_server_skipped_buffered_messages.labels( device_os=dev_os.decode() ).inc() continue log.debug( - '"%s" is not buffered yet, added', msg_dict['message'] + '"%s" is not buffered yet, added', msg_dict["message"] ) self._buffer[message_key] = 1 self.pub.send_multipart( @@ -347,17 +324,17 @@ def start(self): napalm_logs_server_messages_device_queued.labels( device_os=dev_os.decode() ).inc() - if self.opts.get('metrics_server_include_attributes', True): + if self.opts.get("metrics_server_include_attributes", True): napalm_logs_server_messages_attrs.labels( device_os=dev_os.decode(), - host=msg_dict['host'], - tag=msg_dict['tag'], + host=msg_dict["host"], + tag=msg_dict["tag"], ).inc() elif dev_os and dev_os not in self.started_os_proc: # Identified the OS, but the corresponding process does not seem to be started. log.info( - 'Unable to queue the message to %s. Is the sub-process started?', + "Unable to queue the message to %s. Is the sub-process started?", dev_os, ) napalm_logs_server_messages_with_identified_os.labels( @@ -367,26 +344,26 @@ def start(self): device_os=dev_os.decode() ).inc() - elif not dev_os and self.opts['_server_send_unknown']: + elif not dev_os and self.opts["_server_send_unknown"]: # OS not identified, but the user requested to publish the message as-is log.debug( - 'Unable to identify the OS, sending directly to the publishers' + "Unable to identify the OS, sending directly to the publishers" ) to_publish = { - 'ip': address, - 'host': 'unknown', - 'timestamp': int(time.time()), - 'message_details': msg_dict, - 'os': UNKNOWN_DEVICE_NAME, - 'error': 'UNKNOWN', - 'model_name': 'unknown', + "ip": address, + "host": "unknown", + "timestamp": int(time.time()), + "message_details": msg_dict, + "os": UNKNOWN_DEVICE_NAME, + "error": "UNKNOWN", + "model_name": "unknown", } self.publisher_pub.send(umsgpack.packb(to_publish)) napalm_logs_server_messages_unknown_queued.inc() napalm_logs_server_messages_without_identified_os.inc() def stop(self): - log.info('Stopping server process') + log.info("Stopping server process") self.__up = False self.sub.close() self.pub.close() diff --git a/napalm_logs/transport/__init__.py b/napalm_logs/transport/__init__.py index 84fd25cf..60828481 100644 --- a/napalm_logs/transport/__init__.py +++ b/napalm_logs/transport/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs pluggable publisher. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -37,44 +37,44 @@ log = logging.getLogger(__file__) TRANSPORT_LOOKUP = { - 'zeromq': ZMQTransport, - 'zmq': ZMQTransport, - 'cli': CLITransport, - 'print': CLITransport, - 'console': CLITransport, - 'log': LogTransport, - 'prometheus': PrometheusTransport, + "zeromq": ZMQTransport, + "zmq": ZMQTransport, + "cli": CLITransport, + "print": CLITransport, + "console": CLITransport, + "log": LogTransport, + "prometheus": PrometheusTransport, # 'rmq': RabbitMQransport, # 'rabbitmq': RabbitMQransport, - '*': ZMQTransport, + "*": ZMQTransport, } if HAS_KAFKA: log.info( - 'Kafka dependency seems to be installed, making kafka transport available.' + "Kafka dependency seems to be installed, making kafka transport available." ) - TRANSPORT_LOOKUP['kafka'] = KafkaTransport + TRANSPORT_LOOKUP["kafka"] = KafkaTransport if HAS_REQUESTS or HAS_TORNADO: - TRANSPORT_LOOKUP['http'] = HTTPTransport + TRANSPORT_LOOKUP["http"] = HTTPTransport if HAS_REQUESTS or HAS_TORNADO: - TRANSPORT_LOOKUP['alerta'] = AlertaTransport + TRANSPORT_LOOKUP["alerta"] = AlertaTransport def get_transport(name): - ''' + """ Return the transport class. - ''' + """ try: - log.debug('Using %s as transport', name) + log.debug("Using %s as transport", name) return TRANSPORT_LOOKUP[name] except KeyError: - msg = 'Transport {} is not available. Are the dependencies installed?'.format( + msg = "Transport {} is not available. Are the dependencies installed?".format( name ) log.error(msg, exc_info=True) raise InvalidTransportException(msg) -__all__ = ('get_transport',) +__all__ = ("get_transport",) diff --git a/napalm_logs/transport/alerta.py b/napalm_logs/transport/alerta.py index eac30f9e..02ddbedd 100644 --- a/napalm_logs/transport/alerta.py +++ b/napalm_logs/transport/alerta.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Alerta publisher for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -11,65 +11,65 @@ # As defined in https://docs.alerta.io/en/latest/api/alert.html#severity-table ALERTA_SEVERITY = { - 0: 'security', - 1: 'critical', - 2: 'major', - 3: 'minor', - 4: 'warning', - 5: 'informational', - 6: 'debug', - 7: 'trace', - 8: 'indeterminate', - 9: 'normal', - 10: 'unknown', + 0: "security", + 1: "critical", + 2: "major", + 3: "minor", + 4: "warning", + 5: "informational", + 6: "debug", + 7: "trace", + 8: "indeterminate", + 9: "normal", + 10: "unknown", } class AlertaTransport(HTTPTransport): - ''' + """ Alerta publisher class. - ''' + """ def __init__(self, address, port, **kwargs): super().__init__(address, port, **kwargs) - if not self.address.endswith('/alert') and not self.address.endswith('/alert/'): - self.address = '{}/alert'.format(self.address) - self.method = 'POST' - self.headers['Content-type'] = 'application/json' - key = kwargs.get('key') - if key and 'Authorization' not in self.headers: - self.headers.update({'Authorization': 'Key {}'.format(key)}) - token = kwargs.get('token') - if token and 'Authorization' not in self.headers: - self.headers.update({'Authorization': 'Bearer {}'.format(token)}) - self.environment = kwargs.get('environment') - self.pairs = kwargs.get('pairs') + if not self.address.endswith("/alert") and not self.address.endswith("/alert/"): + self.address = "{}/alert".format(self.address) + self.method = "POST" + self.headers["Content-type"] = "application/json" + key = kwargs.get("key") + if key and "Authorization" not in self.headers: + self.headers.update({"Authorization": "Key {}".format(key)}) + token = kwargs.get("token") + if token and "Authorization" not in self.headers: + self.headers.update({"Authorization": "Bearer {}".format(token)}) + self.environment = kwargs.get("environment") + self.pairs = kwargs.get("pairs") if not self.pairs: self.pairs = { - 'INTERFACE_UP': 'INTERFACE_DOWN', - 'OSPF_NEIGHBOR_UP': 'OSPF_NEIGHBOR_DOWN', - 'ISIS_NEIGHBOR_UP': 'ISIS_NEIGHBOR_DOWN', + "INTERFACE_UP": "INTERFACE_DOWN", + "OSPF_NEIGHBOR_UP": "OSPF_NEIGHBOR_DOWN", + "ISIS_NEIGHBOR_UP": "ISIS_NEIGHBOR_DOWN", } def publish(self, obj): data = napalm_logs.utils.unserialize(obj) - error = data['error'] - status = 'open' + error = data["error"] + status = "open" if error in self.pairs: error = self.pairs[error] - status = 'closed' + status = "closed" alerta_data = { - 'resource': '{host}::{msg}'.format(host=data['host'], msg=error), - 'event': data['error'], - 'service': ['napalm-logs'], - 'text': data['message_details']['message'].strip(), - 'attributes': data, - 'status': status, + "resource": "{host}::{msg}".format(host=data["host"], msg=error), + "event": data["error"], + "service": ["napalm-logs"], + "text": data["message_details"]["message"].strip(), + "attributes": data, + "status": status, } if self.environment: - alerta_data['environment'] = self.environment - alerta_data['severity'] = ALERTA_SEVERITY.get(data['severity'], 'unknown') - if self.backend == 'tornado': + alerta_data["environment"] = self.environment + alerta_data["severity"] = ALERTA_SEVERITY.get(data["severity"], "unknown") + if self.backend == "tornado": self.tornado_client.fetch( self.address, callback=self._handle_tornado_response, @@ -82,6 +82,6 @@ def publish(self, obj): allow_nonstandard_methods=True, decompress_response=False, ) - elif self.backend == 'requests': + elif self.backend == "requests": # Queue the publish object async self._publish_queue.put_nowait(alerta_data) diff --git a/napalm_logs/transport/base.py b/napalm_logs/transport/base.py index 80bcd6e1..5e147899 100644 --- a/napalm_logs/transport/base.py +++ b/napalm_logs/transport/base.py @@ -1,13 +1,13 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs transport base. -''' +""" class TransportBase: - ''' + """ The base class for the transport. - ''' + """ def __init__(self, address, port, **kwargs): pass diff --git a/napalm_logs/transport/cli.py b/napalm_logs/transport/cli.py index f6d45808..06689043 100644 --- a/napalm_logs/transport/cli.py +++ b/napalm_logs/transport/cli.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -''' +""" CLI transport for napalm-logs. Useful for debug only, publishes (prints) on the CLI. -''' +""" from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals @@ -12,9 +12,9 @@ class CLITransport(TransportBase): - ''' + """ CLI transport class. - ''' + """ NO_ENCRYPT = True # This tells the publisher to not encrypt the messages diff --git a/napalm_logs/transport/http.py b/napalm_logs/transport/http.py index f87489f4..a263fdb6 100644 --- a/napalm_logs/transport/http.py +++ b/napalm_logs/transport/http.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" HTTP(s) transport for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -40,59 +40,59 @@ class HTTPTransport(TransportBase): - ''' + """ HTTP transport class. - ''' + """ NO_ENCRYPT = True # This tells the publisher to not encrypt the messages # published over this channel. def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - self.address = kwargs['address'] + if kwargs.get("address"): + self.address = kwargs["address"] else: self.address = address - self.method = kwargs.get('method', 'POST') - log.debug('Publishing to %s using method %s', self.address, self.method) - self.auth = kwargs.get('auth') - self.username = kwargs.get('username') - self.password = kwargs.get('password') + self.method = kwargs.get("method", "POST") + log.debug("Publishing to %s using method %s", self.address, self.method) + self.auth = kwargs.get("auth") + self.username = kwargs.get("username") + self.password = kwargs.get("password") if not self.auth: if self.username and self.password: self.auth = (self.username, self.password) - self.headers = kwargs.get('headers', {}) - self.verify_ssl = kwargs.get('verify_ssl', True) - self.params = kwargs.get('params') - self.max_clients = kwargs.get('max_clients', 10) - self.backend = kwargs.get('backend') + self.headers = kwargs.get("headers", {}) + self.verify_ssl = kwargs.get("verify_ssl", True) + self.params = kwargs.get("params") + self.max_clients = kwargs.get("max_clients", 10) + self.backend = kwargs.get("backend") if not self.backend: - log.info('No explicit backend requested') + log.info("No explicit backend requested") if HAS_TORNADO: - self.backend = 'tornado' - log.info('Tornado seems to be installed, so will use') + self.backend = "tornado" + log.info("Tornado seems to be installed, so will use") elif HAS_REQUESTS: - self.backend = 'requests' - log.info('Requests seems to be installed, so will use') + self.backend = "requests" + log.info("Requests seems to be installed, so will use") def start(self): # Throw errors if backend it not properly configured - if self.backend not in ('requests', 'tornado'): - raise TransportException('Invalid HTTP backend: %s', self.backend) - if self.backend == 'requests' and not HAS_REQUESTS: + if self.backend not in ("requests", "tornado"): + raise TransportException("Invalid HTTP backend: %s", self.backend) + if self.backend == "requests" and not HAS_REQUESTS: raise TransportException( - 'Trying to use Requests as backend, but it is not installed' + "Trying to use Requests as backend, but it is not installed" ) - if self.backend == 'tornado' and not HAS_TORNADO: + if self.backend == "tornado" and not HAS_TORNADO: raise TransportException( - 'Trying to use Tornado as backend, but it is not installed' + "Trying to use Tornado as backend, but it is not installed" ) # Prepare the tornado backend - if self.backend == 'tornado': + if self.backend == "tornado": self.tornado_client = tornado.httpclient.AsyncHTTPClient( max_clients=self.max_clients ) - elif self.backend == 'requests': + elif self.backend == "requests": # When using requests, we start a threaded pool # with the size specified using max_clients. # Tornado already has this feature built-in. @@ -112,7 +112,7 @@ def start(self): def publish(self, obj): data = napalm_logs.utils.unserialize(obj) - if self.backend == 'tornado': + if self.backend == "tornado": self.tornado_client.fetch( self.address, callback=self._handle_tornado_response, @@ -125,7 +125,7 @@ def publish(self, obj): allow_nonstandard_methods=True, decompress_response=False, ) - elif self.backend == 'requests': + elif self.backend == "requests": # Queue the publish object async self._publish_queue.put_nowait(data) @@ -151,8 +151,8 @@ def _publish_requests(self): self.method, self.address, params=self.params, data=json.dumps(data) ) if not result.ok: - log.error('Unable to publish to %s', self.address) - log.error('Status code: %d', result.status_code) + log.error("Unable to publish to %s", self.address) + log.error("Status code: %d", result.status_code) log.error(result.text) else: log.debug(result.text) @@ -163,14 +163,14 @@ def _publish_requests(self): def _handle_tornado_response(self, response): if response.error: - log.error('Unable to publish to %s', self.address) + log.error("Unable to publish to %s", self.address) log.error(response.error) else: log.debug(response.body) def stop(self): - if self.backend == 'tornado': + if self.backend == "tornado": self.tornado_client.close() - elif self.backend == 'requests': + elif self.backend == "requests": for thread in self._pool: thread.join() diff --git a/napalm_logs/transport/kafka.py b/napalm_logs/transport/kafka.py index f0fa34ba..8969b548 100644 --- a/napalm_logs/transport/kafka.py +++ b/napalm_logs/transport/kafka.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Kafka transport for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -24,22 +24,22 @@ class KafkaTransport(TransportBase): - ''' + """ Kafka transport class. - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - address = kwargs['address'] - if kwargs.get('port'): - address = kwargs['port'] - if kwargs.get('no_encrypt'): - self.NO_ENCRYPT = kwargs['no_encrypt'] - if kwargs.get('bootstrap_servers'): - self.bootstrap_servers = kwargs['bootstrap_servers'] + if kwargs.get("address"): + address = kwargs["address"] + if kwargs.get("port"): + address = kwargs["port"] + if kwargs.get("no_encrypt"): + self.NO_ENCRYPT = kwargs["no_encrypt"] + if kwargs.get("bootstrap_servers"): + self.bootstrap_servers = kwargs["bootstrap_servers"] else: - self.bootstrap_servers = '{}:{}'.format(address, port) - self.kafka_topic = kwargs.get('topic', 'napalm-logs') + self.bootstrap_servers = "{}:{}".format(address, port) + self.kafka_topic = kwargs.get("topic", "napalm-logs") def start(self): try: @@ -54,5 +54,5 @@ def publish(self, obj): self.producer.send(self.kafka_topic, obj) def stop(self): - if hasattr(self, 'producer'): + if hasattr(self, "producer"): self.producer.close() diff --git a/napalm_logs/transport/log.py b/napalm_logs/transport/log.py index 786ef460..41f02255 100644 --- a/napalm_logs/transport/log.py +++ b/napalm_logs/transport/log.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- -''' +""" Log transport for napalm-logs. Send logging events across the network. -''' +""" from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals @@ -16,25 +16,25 @@ class LogTransport(TransportBase): - ''' + """ Log transport class. - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - self.address = kwargs['address'] + if kwargs.get("address"): + self.address = kwargs["address"] else: self.address = address - if kwargs.get('port'): - self.port = kwargs['port'] + if kwargs.get("port"): + self.port = kwargs["port"] else: self.port = port def start(self): - self.logger = logging.getLogger('napalm-logs') + self.logger = logging.getLogger("napalm-logs") self.logger.setLevel(logging.INFO) handler = logging.handlers.SocketHandler(self.address, self.port) - formatter = logging.Formatter('%(asctime)s: %(message)s') + formatter = logging.Formatter("%(asctime)s: %(message)s") handler.setFormatter(formatter) self.logger.addHandler(handler) diff --git a/napalm_logs/transport/prometheus.py b/napalm_logs/transport/prometheus.py index 71719a8b..816ce0c6 100644 --- a/napalm_logs/transport/prometheus.py +++ b/napalm_logs/transport/prometheus.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Export napalm-logs notifications as Prometheus metrics. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -19,498 +19,512 @@ class PrometheusTransport(TransportBase): - ''' + """ Prom transport class. - ''' + """ def __init__(self, address, port, **kwargs): self.metrics = {} def __parse_without_details(self, msg): - ''' + """ Helper to generate Counter metrics that only provide the host label from the structured message. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host"], ) - self.metrics[error].labels(host=msg['host']).inc() + self.metrics[error].labels(host=msg["host"]).inc() + + if msg.get("state") is not None: + base = error.split("_")[:-1] + metric = msg.get("state_tag", "_".join(base + ["state"]).lower()) + if metric not in self.metrics: + self.metrics[metric] = Gauge( + "napalm_logs_{}".format(metric), + "State for {} type notifications".format("_".join(base)), + ["host"], + ) + self.metrics[metric].labels(host=msg["host"]).set(msg["state"]) def __parse_user_action(self, msg): - ''' + """ Helper to generate Counter metrics that provide the host label, together with the username under a YANG structure users > user > [USER]. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'user'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "user"], ) self.metrics[error].labels( - host=msg['host'], user=list(msg['yang_message']['users']['user'].keys())[0] + host=msg["host"], user=list(msg["yang_message"]["users"]["user"].keys())[0] ).inc() def __parse_interface_basic(self, msg): - ''' + """ Helper to generate Counter metrics for interface notifications. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'interface'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "interface"], ) - if 'interface_state' not in self.metrics: - self.metrics['interface_state'] = Gauge( - 'napalm_logs_interface_state', - 'State of this interface. 0=DOWN, 1=UP', - ['host', 'interface'], + if "interface_state" not in self.metrics: + self.metrics["interface_state"] = Gauge( + "napalm_logs_interface_state", + "State of this interface. 0=DOWN, 1=UP", + ["host", "interface"], ) labels = { - 'host': msg['host'], - 'interface': list(msg['yang_message']['interfaces']['interface'].keys())[0], + "host": msg["host"], + "interface": list(msg["yang_message"]["interfaces"]["interface"].keys())[0], } self.metrics[error].labels(**labels).inc() - state = 1 if error == 'INTERFACE_UP' else 0 - self.metrics['interface_state'].labels(**labels).set(state) + state = 1 if error == "INTERFACE_UP" else 0 + self.metrics["interface_state"].labels(**labels).set(state) def __parse_lacp(self, msg): - ''' + """ Helper to generate Counter metrics for LACP notifications. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'interface', 'member'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "interface", "member"], ) - lacp_dict = msg['yang_message']['lacp']['interfaces']['interface'] + lacp_dict = msg["yang_message"]["lacp"]["interfaces"]["interface"] if_name = list(lacp_dict.keys())[0] - members_dict = lacp_dict[if_name]['members']['member'] + members_dict = lacp_dict[if_name]["members"]["member"] member_name = list(members_dict.keys())[0] self.metrics[error].labels( - host=msg['host'], interface=if_name, member=member_name + host=msg["host"], interface=if_name, member=member_name ).inc() def __parse_bgp_basic(self, msg): - ''' + """ Helper to generate Counter metrics for simple BGP notifications, providing the neighbor address and peer AS number. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'neighbor', 'peer_as'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "neighbor", "peer_as"], ) - neigh_dict = msg['yang_message']['bgp']['neighbors']['neighbor'] + neigh_dict = msg["yang_message"]["bgp"]["neighbors"]["neighbor"] neighbor = list(neigh_dict.keys())[0] self.metrics[error].labels( - host=msg['host'], + host=msg["host"], neighbor=neighbor, - peer_as=neigh_dict[neighbor]['state']['peer_as'], + peer_as=neigh_dict[neighbor]["state"]["peer_as"], ).inc() def __parse_network_instance_bgp(self, msg): - ''' + """ Helper to generate Counter metrics for simple BGP notifications, nested under the network-instance OpenConfig model, providing the neighbor address, instance name, and peer AS number. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'routing_instance', 'neighbor', 'peer_as'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "routing_instance", "neighbor", "peer_as"], ) instance_name = list( - msg['yang_message']['network-instances']['network-instance'].keys() + msg["yang_message"]["network-instances"]["network-instance"].keys() )[0] - instance_dict = msg['yang_message']['network-instances']['network-instance'][ + instance_dict = msg["yang_message"]["network-instances"]["network-instance"][ instance_name ] - neigh_dict = instance_dict['protocols']['protocol']['bgp']['neighbors'][ - 'neighbor' + neigh_dict = instance_dict["protocols"]["protocol"]["bgp"]["neighbors"][ + "neighbor" ] neighbor = list(neigh_dict.keys())[0] self.metrics[error].labels( - host=msg['host'], + host=msg["host"], routing_instance=instance_name, neighbor=neighbor, - peer_as=neigh_dict[neighbor]['state']['peer_as'], + peer_as=neigh_dict[neighbor]["state"]["peer_as"], ).inc() def __parse_bgp_no_asn(self, msg): - ''' + """ Helper to generate Counter metrics for simple BGP notifications - but without the AS number, so only the neighbor IP address. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'neighbor'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "neighbor"], ) - neigh_dict = msg['yang_message']['bgp']['neighbors']['neighbor'] + neigh_dict = msg["yang_message"]["bgp"]["neighbors"]["neighbor"] neighbor = list(neigh_dict.keys())[0] - self.metrics[error].labels(host=msg['host'], neighbor=neighbor,).inc() + self.metrics[error].labels( + host=msg["host"], + neighbor=neighbor, + ).inc() def __parse_ospf_neighbor(self, msg): - error = msg['error'] + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'area', 'neighbor', 'interface'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "area", "neighbor", "interface"], ) - if 'ospf_neighbor' not in self.metrics: - self.metrics['ospf_neighbor'] = Gauge( - 'napalm_logs_ospf_neighbor_state', - 'State of the OSPF neighbor. 0=DOWN, 1=UP', - ['host', 'area', 'neighbor', 'interface'], + if "ospf_neighbor" not in self.metrics: + self.metrics["ospf_neighbor"] = Gauge( + "napalm_logs_ospf_neighbor_state", + "State of the OSPF neighbor. 0=DOWN, 1=UP", + ["host", "area", "neighbor", "interface"], ) - area_dict = msg['yang_message']['network-instances']['network-instance'][ - 'global' - ]['protocols']['protocol']['ospf']['ospfv2']['areas']['area'] + area_dict = msg["yang_message"]["network-instances"]["network-instance"][ + "global" + ]["protocols"]["protocol"]["ospf"]["ospfv2"]["areas"]["area"] area_id = list(area_dict.keys())[0] - iface_dict = area_dict[area_id]['interfaces']['interface'] + iface_dict = area_dict[area_id]["interfaces"]["interface"] iface_name = list(iface_dict.keys())[0] - neighbor = list(iface_dict[iface_name]['neighbors']['neighbor'].keys())[0] + neighbor = list(iface_dict[iface_name]["neighbors"]["neighbor"].keys())[0] labels = { - 'host': msg['host'], - 'area': area_id, - 'neighbor': neighbor, - 'interface': iface_name, + "host": msg["host"], + "area": area_id, + "neighbor": neighbor, + "interface": iface_name, } self.metrics[error].labels(**labels).inc() - state = 1 if error == 'OSPF_NEIGHBOR_UP' else 0 - self.metrics['ospf_neighbor'].labels(**labels).set(state) + state = 1 if error == "OSPF_NEIGHBOR_UP" else 0 + self.metrics["ospf_neighbor"].labels(**labels).set(state) def __parse_isis_neighbor(self, msg): - error = msg['error'] + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'interface', 'level', 'neighbor'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "interface", "level", "neighbor"], ) - if 'isis_neighbor' not in self.metrics: - self.metrics['isis_neighbor'] = Gauge( - 'napalm_logs_isis_neighbor_state', - 'State of the ISIS neighbor. 0=DOWN, 1=UP', - ['host', 'interface', 'level', 'neighbor'], + if "isis_neighbor" not in self.metrics: + self.metrics["isis_neighbor"] = Gauge( + "napalm_logs_isis_neighbor_state", + "State of the ISIS neighbor. 0=DOWN, 1=UP", + ["host", "interface", "level", "neighbor"], ) - iface_dict = msg['yang_message']['network-instances']['network-instance'][ - 'global' - ]['protocols']['protocol']['isis']['interfaces']['interface'] + iface_dict = msg["yang_message"]["network-instances"]["network-instance"][ + "global" + ]["protocols"]["protocol"]["isis"]["interfaces"]["interface"] iface_name = list(iface_dict.keys())[0] - level_dict = iface_dict[iface_name]['levels']['level'] + level_dict = iface_dict[iface_name]["levels"]["level"] level = list(level_dict.keys())[0] - neighbor = list(level_dict[level]['adjacencies']['adjacency'].keys())[0] + neighbor = list(level_dict[level]["adjacencies"]["adjacency"].keys())[0] labels = { - 'host': msg['host'], - 'interface': iface_name, - 'level': level, - 'neighbor': neighbor, + "host": msg["host"], + "interface": iface_name, + "level": level, + "neighbor": neighbor, } self.metrics[error].labels(**labels).inc() - state = 1 if error == 'ISIS_NEIGHBOR_UP' else 0 - self.metrics['isis_neighbor'].labels(**labels).set(state) + state = 1 if error == "ISIS_NEIGHBOR_UP" else 0 + self.metrics["isis_neighbor"].labels(**labels).set(state) def __parse_nat_session(self, msg): - error = msg['error'] + error = msg["error"] labels = [ - 'service_name', - 'source_address', - 'source_port', - 'destination_address', - 'destination_port', - 'nat_destination_address', - 'nat_destination_port', - 'nat_source_address', - 'nat_source_port', + "service_name", + "source_address", + "source_port", + "destination_address", + "destination_port", + "nat_destination_address", + "nat_destination_port", + "nat_source_address", + "nat_source_port", ] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host'] + labels, + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host"] + labels, ) - event = list(msg['yang_message']['security']['flow'].keys())[0] - label_values = {'host': msg['host']} + event = list(msg["yang_message"]["security"]["flow"].keys())[0] + label_values = {"host": msg["host"]} for label in labels: - label_values[label] = msg['yang_message']['security']['flow'][event][label] + label_values[label] = msg["yang_message"]["security"]["flow"][event][label] self.metrics[error].labels(**label_values).inc() def _parse_interface_down(self, msg): - ''' + """ Build metrics from INTERFACE_DOWN notifications. - ''' + """ self.__parse_interface_basic(msg) def _parse_interface_up(self, msg): - ''' + """ Build metrics from INTERFACE_UP notifications. - ''' + """ self.__parse_interface_basic(msg) def _parse_interface_duplex_mode(self, msg): - ''' + """ Build metrics from INTERFACE_DUPLEX_MODE notifications. - ''' - if 'INTERFACE_DUPLEX_MODE' not in self.metrics: - self.metrics['INTERFACE_DUPLEX_MODE'] = Counter( - 'napalm_logs_interface_duplex_mode', - 'Counter for INTERFACE_DUPLEX_MODE notifications', - ['host', 'interface', 'duplex_mode'], + """ + if "INTERFACE_DUPLEX_MODE" not in self.metrics: + self.metrics["INTERFACE_DUPLEX_MODE"] = Counter( + "napalm_logs_interface_duplex_mode", + "Counter for INTERFACE_DUPLEX_MODE notifications", + ["host", "interface", "duplex_mode"], ) - iface_dict = msg['yang_message']['interfaces']['interface'] + iface_dict = msg["yang_message"]["interfaces"]["interface"] iface_name = list(iface_dict.keys())[0] - self.metrics['INTERFACE_DUPLEX_MODE'].labels( - host=msg['host'], + self.metrics["INTERFACE_DUPLEX_MODE"].labels( + host=msg["host"], interface=iface_name, - duplex_mode=iface_dict[iface_name]['ethernet']['state']['duplex_mode'], + duplex_mode=iface_dict[iface_name]["ethernet"]["state"]["duplex_mode"], ) def _parse_interface_mac_limit_reached(self, msg): - ''' + """ Build metrics from INTERFACE_MAC_LIMIT_REACHED notifications. - ''' - if 'INTERFACE_MAC_LIMIT_REACHED' not in self.metrics: - self.metrics['INTERFACE_MAC_LIMIT_REACHED'] = Gauge( - 'napalm_logs_interface_mac_limit_reached', - 'Counter for INTERFACE_MAC_LIMIT_REACHED notifications', - ['host', 'interface'], + """ + if "INTERFACE_MAC_LIMIT_REACHED" not in self.metrics: + self.metrics["INTERFACE_MAC_LIMIT_REACHED"] = Gauge( + "napalm_logs_interface_mac_limit_reached", + "Counter for INTERFACE_MAC_LIMIT_REACHED notifications", + ["host", "interface"], ) - iface_dict = msg['yang_message']['interfaces']['interface'] + iface_dict = msg["yang_message"]["interfaces"]["interface"] iface_name = list(iface_dict.keys())[0] - self.metrics['INTERFACE_MAC_LIMIT_REACHED'].labels( - host=msg['host'], interface=iface_name - ).set(iface_dict[iface_name]['ethernet']['state']['learned-mac-addresses']) + self.metrics["INTERFACE_MAC_LIMIT_REACHED"].labels( + host=msg["host"], interface=iface_name + ).set(iface_dict[iface_name]["ethernet"]["state"]["learned-mac-addresses"]) def _parse_lacp_interface_down(self, msg): - ''' + """ Build metrics for LACP_INTERFACE_DOWN messages. - ''' + """ self.__parse_lacp(msg) def _parse_bfd_state_change(self, msg): - ''' + """ Build metrics from BFD_STATE_CHANGE. - ''' - if 'BFD_STATE_CHANGE' not in self.metrics: - self.metrics['BFD_STATE_CHANGE'] = Counter( - 'napalm_logs_bfd_state_change', - 'Counter for BFD_STATE_CHANGE notifications', - ['host', 'interface', 'session_state'], + """ + if "BFD_STATE_CHANGE" not in self.metrics: + self.metrics["BFD_STATE_CHANGE"] = Counter( + "napalm_logs_bfd_state_change", + "Counter for BFD_STATE_CHANGE notifications", + ["host", "interface", "session_state"], ) - iface_dict = msg['yang_message']['bfd']['interfaces']['interface'] - self.metrics['BFD_STATE_CHANGE'].labels( - host=msg['host'], - interface=iface_dict['id'], - session_state=iface_dict['peers']['peer']['state']['session-state'], + iface_dict = msg["yang_message"]["bfd"]["interfaces"]["interface"] + self.metrics["BFD_STATE_CHANGE"].labels( + host=msg["host"], + interface=iface_dict["id"], + session_state=iface_dict["peers"]["peer"]["state"]["session-state"], ).inc() def _parse_ntp_server_unreachable(self, msg): - ''' + """ Build metrics from NTP_SERVER_UNREACHABLE notifications. - ''' - if 'NTP_SERVER_UNREACHABLE' not in self.metrics: - self.metrics['NTP_SERVER_UNREACHABLE'] = Counter( - 'napalm_logs_ntp_server_unreachable', - 'Counter for NTP_SERVER_UNREACHABLE notifications', - ['host', 'ntp_server'], + """ + if "NTP_SERVER_UNREACHABLE" not in self.metrics: + self.metrics["NTP_SERVER_UNREACHABLE"] = Counter( + "napalm_logs_ntp_server_unreachable", + "Counter for NTP_SERVER_UNREACHABLE notifications", + ["host", "ntp_server"], ) - self.metrics['NTP_SERVER_UNREACHABLE'].labels( - host=msg['host'], + self.metrics["NTP_SERVER_UNREACHABLE"].labels( + host=msg["host"], ntp_server=list( - msg['yang_message']['system']['ntp']['servers']['server'].keys() + msg["yang_message"]["system"]["ntp"]["servers"]["server"].keys() )[0], ).inc() def _parse_bgp_prefix_limit_exceeded(self, msg): - ''' + """ Build metrics form BGP_PREFIX_LIMIT_EXCEEDED notifications. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_prefix_thresh_exceeded(self, msg): - ''' + """ Build metrics from BGP_PREFIX_THRESH_EXCEEDED notifications. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_peer_not_configured(self, msg): - ''' + """ Build metrics from BGP_PEER_NOT_CONFIGURED notifications. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_session_not_configured(self, msg): - ''' + """ Build metrics from BGP_SESSION_NOT_CONFIGURED notifications. - ''' + """ self.__parse_bgp_no_asn(msg) def _parse_bgp_connection_rejected(self, msg): - ''' + """ Build metrics from BGP_CONNECTION_REJECTED notifications. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_connection_reset(self, msg): - ''' + """ Build metrics from BGP_CONNECTION_RESET notifications. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_incorrect_as_number(self, msg): - ''' + """ Build metrics from BGP_INCORRECT_AS_NUMBER notifications. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_neighbor_state_changed(self, msg): - ''' + """ Build metrics from BGP_NEIGHBOR_STATE_CHANGED. - ''' + """ self.__parse_bgp_basic(msg) def _parse_bgp_md5_incorrect(self, msg): - ''' + """ Build metrics from BGP_MD5_INCORRECT. - ''' + """ self.__parse_bgp_no_asn(msg) def _parse_bgp_cease_prefix_limit_exceeded(self, msg): - ''' + """ Build metrics for BGP_CEASE_PREFIX_LIMIT_EXCEEDED. - ''' + """ self.__parse_network_instance_bgp(msg) def _parse_user_enter_config_mode(self, msg): - ''' + """ Build metrics for USER_ENTER_CONFIG_MODE. - ''' + """ self.__parse_user_action(msg) def _parse_user_exit_config_mode(self, msg): - ''' + """ Build metrics for USER_EXIT_CONFIG_MODE. - ''' + """ self.__parse_user_action(msg) def _parse_user_write_config(self, msg): - ''' + """ Build metrics for USER_WRITE_CONFIG. - ''' + """ self.__parse_user_action(msg) def _parse_user_login(self, msg): - ''' + """ Build metrics for USER_LOGIN. - ''' + """ self.__parse_user_action(msg) def _parse_user_logout(self, msg): - ''' + """ Build metrics for USER_LOGOUT. - ''' + """ self.__parse_user_action(msg) def _parse_configuration_commit_requested(self, msg): - ''' + """ Build metrics for CONFIGURATION_COMMIT_REQUESTED. - ''' + """ self.__parse_user_action(msg) def _parse_configuration_rollback(self, msg): - ''' + """ Build metrics for CONFIGURATION_ROLLBACK. - ''' + """ self.__parse_user_action(msg) def _parse_system_alarm(self, msg): - ''' + """ Build metrics for SYSTEM_ALARM. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), [ - 'host', - 'component_name', - 'component_class', - 'alarm_state', - 'alarm_reason', + "host", + "component_name", + "component_class", + "alarm_state", + "alarm_reason", ], ) - if 'system_alarm_state' not in self.metrics: - self.metrics['system_alarm_state'] = Gauge( - 'napalm_logs_system_alarm_state', - 'State of the system alarm. 1=SET, 0=CLEARED', + if "system_alarm_state" not in self.metrics: + self.metrics["system_alarm_state"] = Gauge( + "napalm_logs_system_alarm_state", + "State of the system alarm. 1=SET, 0=CLEARED", [ - 'host', - 'component_name', - 'component_class', - 'alarm_state', - 'alarm_reason', + "host", + "component_name", + "component_class", + "alarm_state", + "alarm_reason", ], ) - component = msg['yang_message']['hardware-state']['component'] + component = msg["yang_message"]["hardware-state"]["component"] component_name = list(component.keys())[0] labels = { - 'host': msg['host'], - 'component_name': component_name, - 'component_class': component[component_name]['class'], - 'alarm_state': component[component_name]['state']['alarm-state'], - 'alarm_reason': component[component_name]['state']['alarm-reason'], + "host": msg["host"], + "component_name": component_name, + "component_class": component[component_name]["class"], + "alarm_state": component[component_name]["state"]["alarm-state"], + "alarm_reason": component[component_name]["state"]["alarm-reason"], } self.metrics[error].labels(**labels).inc() - state = 1 if error == 'SYSTEM_ALARM' else 0 - self.metrics['system_alarm_state'].labels(**labels).set(state) + state = 1 if error == "SYSTEM_ALARM" else 0 + self.metrics["system_alarm_state"].labels(**labels).set(state) def _parse_system_alarm_cleared(self, msg): return self._parse_system_alarm(msg) def __parse_minor_major_alarm(self, msg): - ''' + """ Build metrics for MINOR_ALARM_* and MAJOR_ALARM_* notifications. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'alarm_reason'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "alarm_reason"], ) - severity = error.split('_')[0].lower() - alarm_state_metric = '{}_alarm_state'.format(severity) + severity = error.split("_")[0].lower() + alarm_state_metric = "{}_alarm_state".format(severity) if alarm_state_metric not in self.metrics: self.metrics[alarm_state_metric] = Gauge( - 'napalm_logs_{}'.format(alarm_state_metric), - 'State of the {} system alarm. 1=SET, 0=CLEARED'.format(severity), - ['host', 'alarm_reason'], + "napalm_logs_{}".format(alarm_state_metric), + "State of the {} system alarm. 1=SET, 0=CLEARED".format(severity), + ["host", "alarm_reason"], ) labels = { - 'host': msg['host'], - 'alarm_reason': msg['yang_message']['alarms']['alarm']['additional-text'], + "host": msg["host"], + "alarm_reason": msg["yang_message"]["alarms"]["alarm"]["additional-text"], } self.metrics[error].labels(**labels).inc() - state = 1 if error == '{}_ALARM_SET'.format(severity.upper()) else 0 + state = 1 if error == "{}_ALARM_SET".format(severity.upper()) else 0 self.metrics[alarm_state_metric].labels(**labels).set(state) def _parse_major_alarm_set(self, msg): @@ -526,69 +540,69 @@ def _parse_minor_alarm_cleared(self, msg): return self.__parse_minor_major_alarm(msg) def _parse_ospf_neighbor_up(self, msg): - ''' + """ Build metrics for OSPF_NEIGHBOR_UP. - ''' + """ self.__parse_ospf_neighbor(msg) def _parse_ospf_neighbor_down(self, msg): - ''' + """ Build metrics for OSPF_NEIGHBOR_DOWN. - ''' + """ self.__parse_ospf_neighbor(msg) def _parse_isis_neighbor_up(self, msg): - ''' + """ Build metrics for ISIS_NEIGHBOR_UP. - ''' + """ self.__parse_isis_neighbor(msg) def _parse_isis_neighbor_down(self, msg): - ''' + """ Build metrics for ISIS_NEIGHBOR_DOWN. - ''' + """ self.__parse_isis_neighbor(msg) def _parse_nat_session_created(self, msg): - ''' + """ Build metrics for NAT_SESSION_CREATED. - ''' + """ self.__parse_nat_session(msg) def _parse_nat_session_closed(self, msg): - ''' + """ Build metrics for NAT_SESSION_CLOSED. - ''' + """ self.__parse_nat_session(msg) def _parse_ddos_protocol_violation_set(self, msg): - ''' + """ Build metrics for DDOS_PROTOCOL_VIOLATION_SET messages. - ''' - error = msg['error'] + """ + error = msg["error"] if error not in self.metrics: self.metrics[error] = Counter( - 'napalm_logs_{error}'.format(error=error.lower()), - 'Counter for {error} notifications'.format(error=error), - ['host', 'event_type', 'entity_type', 'additional_text'], + "napalm_logs_{error}".format(error=error.lower()), + "Counter for {error} notifications".format(error=error), + ["host", "event_type", "entity_type", "additional_text"], ) - alarm_dict = msg['yang_message']['alarms']['alarm'] + alarm_dict = msg["yang_message"]["alarms"]["alarm"] labels = { - 'host': msg['host'], - 'event_type': alarm_dict['event-type'], - 'entity_type': alarm_dict['entity-type'], - 'additional_text': alarm_dict['additional-text'], + "host": msg["host"], + "event_type": alarm_dict["event-type"], + "entity_type": alarm_dict["entity-type"], + "additional_text": alarm_dict["additional-text"], } self.metrics[error].labels(**labels).inc() def start(self): - log.debug('Starting the Prometheus publisher') + log.debug("Starting the Prometheus publisher") def publish(self, obj): data = napalm_logs.utils.unserialize(obj) - if data['error'] in ('RAW', 'UNKNOWN'): + if data["error"] in ("RAW", "UNKNOWN"): return - fun_name = '_parse_{}'.format(data['error'].lower()) + fun_name = "_parse_{}".format(data["error"].lower()) if hasattr(self, fun_name): try: getattr(self, fun_name)(data) @@ -603,4 +617,4 @@ def publish(self, obj): self.__parse_without_details(data) def stop(self): - log.debug('Stopping the Prometheus publisher') + log.debug("Stopping the Prometheus publisher") diff --git a/napalm_logs/transport/zeromq.py b/napalm_logs/transport/zeromq.py index 15faaa7f..af73c4a1 100644 --- a/napalm_logs/transport/zeromq.py +++ b/napalm_logs/transport/zeromq.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" ZeroMQ transport for napalm-logs. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -19,30 +19,30 @@ class ZMQTransport(TransportBase): - ''' + """ ZMQ transport class. - ''' + """ def __init__(self, address, port, **kwargs): - if kwargs.get('address'): - self.address = kwargs['address'] + if kwargs.get("address"): + self.address = kwargs["address"] else: self.address = address - if kwargs.get('port'): - self.port = kwargs['port'] + if kwargs.get("port"): + self.port = kwargs["port"] else: self.port = port - if kwargs.get('no_encrypt'): - self.NO_ENCRYPT = kwargs['no_encrypt'] + if kwargs.get("no_encrypt"): + self.NO_ENCRYPT = kwargs["no_encrypt"] def start(self): self.context = zmq.Context() self.socket = self.context.socket(zmq.PUB) - if ':' in self.address: + if ":" in self.address: self.socket.ipv6 = True try: self.socket.bind( - 'tcp://{addr}:{port}'.format(addr=self.address, port=self.port) + "tcp://{addr}:{port}".format(addr=self.address, port=self.port) ) except zmq.error.ZMQError as err: log.error(err, exc_info=True) @@ -52,7 +52,7 @@ def publish(self, obj): self.socket.send(obj) def stop(self): - if hasattr(self, 'socket'): + if hasattr(self, "socket"): self.socket.close() - if hasattr(self, 'context'): + if hasattr(self, "context"): self.context.term() diff --git a/napalm_logs/utils/__init__.py b/napalm_logs/utils/__init__.py index fd9ce594..1cb361b4 100644 --- a/napalm_logs/utils/__init__.py +++ b/napalm_logs/utils/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" napalm-logs utilities -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -11,9 +11,10 @@ import copy import time import socket +import fnmatch import logging import threading -import collections +import collections.abc from pydoc import locate from datetime import datetime @@ -27,7 +28,6 @@ # Import napalm-logs pkgs import napalm_logs.config as defaults -import napalm_logs.ext.six as six from napalm_logs.exceptions import ClientConnectException from napalm_logs.exceptions import CryptoException from napalm_logs.exceptions import BadSignatureException @@ -36,9 +36,9 @@ class ClientAuth: - ''' + """ Client auth class. - ''' + """ def __init__( self, @@ -62,69 +62,69 @@ def __init__( self._start_keep_alive() def _start_keep_alive(self): - ''' + """ Start the keep alive thread as a daemon - ''' + """ keep_alive_thread = threading.Thread(target=self.keep_alive) keep_alive_thread.daemon = True keep_alive_thread.start() def keep_alive(self): - ''' + """ Send a keep alive request periodically to make sure that the server is still alive. If not then try to reconnect. - ''' + """ self.ssl_skt.settimeout(defaults.AUTH_KEEP_ALIVE_INTERVAL) while self.__up: try: - log.debug('Sending keep-alive message to the server') + log.debug("Sending keep-alive message to the server") self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE) except socket.error: - log.error('Unable to send keep-alive message to the server.') - log.error('Re-init the SSL socket.') + log.error("Unable to send keep-alive message to the server.") + log.error("Re-init the SSL socket.") self.reconnect() - log.debug('Trying to re-send the keep-alive message to the server.') + log.debug("Trying to re-send the keep-alive message to the server.") self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE) msg = self.ssl_skt.recv(len(defaults.AUTH_KEEP_ALIVE_ACK)) - log.debug('Received %s from the keep-alive server', msg) + log.debug("Received %s from the keep-alive server", msg) if msg != defaults.AUTH_KEEP_ALIVE_ACK: log.error( - 'Received %s instead of %s form the auth keep-alive server', + "Received %s instead of %s form the auth keep-alive server", msg, defaults.AUTH_KEEP_ALIVE_ACK, ) - log.error('Re-init the SSL socket.') + log.error("Re-init the SSL socket.") self.reconnect() time.sleep(defaults.AUTH_KEEP_ALIVE_INTERVAL) def reconnect(self): - ''' + """ Try to reconnect and re-authenticate with the server. - ''' - log.debug('Closing the SSH socket.') + """ + log.debug("Closing the SSH socket.") try: self.ssl_skt.close() except socket.error: - log.error('The socket seems to be closed already.') - log.debug('Re-opening the SSL socket.') + log.error("The socket seems to be closed already.") + log.debug("Re-opening the SSL socket.") self.authenticate() def authenticate(self): - ''' + """ Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. - ''' + """ log.debug( - 'Authenticate to %s:%d, using the certificate %s', + "Authenticate to %s:%d, using the certificate %s", self.address, self.port, self.certificate, ) - if ':' in self.address: + if ":" in self.address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET @@ -136,14 +136,14 @@ def authenticate(self): self.ssl_skt.connect((self.address, self.port)) self.auth_try_id = 0 except socket.error as err: - log.error('Unable to open the SSL socket.') + log.error("Unable to open the SSL socket.") self.auth_try_id += 1 if not self.max_try or self.auth_try_id < self.max_try: - log.error('Trying to authenticate again in %d seconds', self.timeout) + log.error("Trying to authenticate again in %d seconds", self.timeout) time.sleep(self.timeout) self.authenticate() log.critical( - 'Giving up, unable to authenticate to %s:%d using the certificate %s', + "Giving up, unable to authenticate to %s:%d using the certificate %s", self.address, self.port, self.certificate, @@ -166,54 +166,54 @@ def authenticate(self): ) def decrypt(self, binary): - ''' + """ Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. - ''' + """ try: encrypted = self.verify_key.verify(binary) except BadSignatureError: - log.error('Signature was forged or corrupt', exc_info=True) - raise BadSignatureException('Signature was forged or corrupt') + log.error("Signature was forged or corrupt", exc_info=True) + raise BadSignatureException("Signature was forged or corrupt") try: packed = self.priv_key.decrypt(encrypted) except CryptoError: - log.error('Unable to decrypt', exc_info=True) - raise CryptoException('Unable to decrypt') + log.error("Unable to decrypt", exc_info=True) + raise CryptoException("Unable to decrypt") return umsgpack.unpackb(packed) def stop(self): - ''' + """ Stop the client. - ''' + """ self.__up = False self.ssl_skt.close() def cast(var, function): # If the function is a build in function - if locate(function) and hasattr(locate(function), '__call__'): + if locate(function) and hasattr(locate(function), "__call__"): try: return locate(function)(var) except ValueError: log.error( - 'Unable to use function %s on value %s', function, var, exc_info=True + "Unable to use function %s on value %s", function, var, exc_info=True ) # If the function is str function - if hasattr(str, function) and hasattr(getattr(str, function), '__call__'): + if hasattr(str, function) and hasattr(getattr(str, function), "__call__"): return getattr(str, function)(var) glob = globals() # If the function is defined in this module - if function in glob and hasattr(glob[function], '__call__'): + if function in glob and hasattr(glob[function], "__call__"): return glob[function](var) # If none of the above, just return the original var return var def color_to_severity(var): - colour_dict = {'RED': 3, 'YELLOW': 4} + colour_dict = {"RED": 3, "YELLOW": 4} return colour_dict.get(var, var) @@ -222,10 +222,10 @@ def bgp_state_convert(state): Given a matched BGP state, map it to a vendor agnostic version. """ state_dict = { - 'OpenSent': 'OPEN_SENT', - 'OpenConfirm': 'OPEN_CONFIRM', - 'Up': 'ESTABLISHED', - 'Down': 'ACTIVE', + "OpenSent": "OPEN_SENT", + "OpenConfirm": "OPEN_CONFIRM", + "Up": "ESTABLISHED", + "Down": "ACTIVE", } return state_dict.get(state, state.upper()) @@ -234,16 +234,16 @@ def bfd_state_convert(state): """ Given a matched BFD state, map it to a vendor agnostic version. """ - state_dict = {'AdminDown': 'ADMIN_DOWN'} + state_dict = {"AdminDown": "ADMIN_DOWN"} return state_dict.get(state, state.upper()) def unserialize(binary): - ''' + """ Unpack the original OpenConfig object, serialized using MessagePack. This is to be used when disable_security is set. - ''' + """ return umsgpack.unpackb(binary) @@ -252,7 +252,7 @@ def extract(rgx, msg, mapping, time_format=None): log.debug('Matching regex "%s" on "%s"', rgx, msg) matched = re.search(rgx, msg, re.I) if not matched: - log.info('The regex didnt match') + log.info("The regex didnt match") return None else: group_index = 0 @@ -260,7 +260,7 @@ def extract(rgx, msg, mapping, time_format=None): group_name = list(mapping.keys())[group_index] ret[group_name] = group_value group_index += 1 - log.debug('Regex matched') + log.debug("Regex matched") log.debug(ret) if time_format: try: @@ -268,13 +268,13 @@ def extract(rgx, msg, mapping, time_format=None): time_format[0].format(**ret), time_format[1] ) except ValueError as error: - log.error('Unable to convert date and time into a timestamp: %s', error) - ret['timestamp'] = int((parsed_time - datetime(1970, 1, 1)).total_seconds()) + log.error("Unable to convert date and time into a timestamp: %s", error) + ret["timestamp"] = int((parsed_time - datetime(1970, 1, 1)).total_seconds()) return ret def setval(key, val, dict_=None, delim=defaults.DEFAULT_DELIM): - ''' + """ Set a value under the dictionary hierarchy identified under the key. The target 'foo/bar/baz' returns the dictionary hierarchy {'foo': {'bar': {'baz': {}}}}. @@ -284,13 +284,13 @@ def setval(key, val, dict_=None, delim=defaults.DEFAULT_DELIM): Currently this doesn't work with integers, i.e. cannot build lists dynamically. TODO - ''' + """ if not dict_: dict_ = {} prev_hier = dict_ dict_hier = key.split(delim) for each in dict_hier[:-1]: - if isinstance(each, six.string_type): + if isinstance(each, str): if each not in prev_hier: prev_hier[each] = {} prev_hier = prev_hier[each] @@ -302,17 +302,17 @@ def setval(key, val, dict_=None, delim=defaults.DEFAULT_DELIM): def traverse(data, key, delim=defaults.DEFAULT_DELIM): - ''' + """ Traverse a dict or list using a slash delimiter target string. The target 'foo/bar/0' will return data['foo']['bar'][0] if this value exists, otherwise will return empty dict. Return None when not found. This can be used to verify if a certain key exists under dictionary hierarchy. - ''' + """ for each in key.split(delim): if isinstance(data, list): - if isinstance(each, six.string_type): + if isinstance(each, str): embed_match = False # Index was not numeric, lets look at any embedded dicts for embedded in (x for x in data if isinstance(x, dict)): @@ -339,15 +339,15 @@ def traverse(data, key, delim=defaults.DEFAULT_DELIM): def dictupdate(dest, upd): - ''' + """ Recursive version of the default dict.update Merges upd recursively into dest. - ''' + """ recursive_update = True - if (not isinstance(dest, collections.Mapping)) or ( - not isinstance(upd, collections.Mapping) + if (not isinstance(dest, collections.abc.Mapping)) or ( + not isinstance(upd, collections.abc.Mapping) ): - raise TypeError('Cannot update using non-dict types in dictupdate.update()') + raise TypeError("Cannot update using non-dict types in dictupdate.update()") updkeys = list(upd.keys()) if not set(list(dest.keys())) & set(updkeys): recursive_update = False @@ -358,8 +358,8 @@ def dictupdate(dest, upd): dest_subkey = dest.get(key, None) except AttributeError: dest_subkey = None - if isinstance(dest_subkey, collections.Mapping) and isinstance( - val, collections.Mapping + if isinstance(dest_subkey, collections.abc.Mapping) and isinstance( + val, collections.abc.Mapping ): ret = dictupdate(dest_subkey, val) dest[key] = ret @@ -379,3 +379,69 @@ def dictupdate(dest, upd): for k in upd: dest[k] = upd[k] return dest + + +def expr_match(line, expr): + """ + Evaluate a line of text against an expression. First try a full-string + match, next try globbing, and then try to match assuming expr is a regular + expression. Originally designed to match minion IDs for + whitelists/blacklists. + """ + if line == expr: + return True + if fnmatch.fnmatch(line, expr): + return True + try: + if re.match(r"\A{0}\Z".format(expr), line): + return True + except re.error: + pass + return False + + +def check_whitelist_blacklist(value, whitelist=None, blacklist=None): + """ + Check a whitelist and/or blacklist to see if the value matches it. + + value + The item to check the whitelist and/or blacklist against. + + whitelist + The list of items that are white-listed. If ``value`` is found + in the whitelist, then the function returns ``True``. Otherwise, + it returns ``False``. + + blacklist + The list of items that are black-listed. If ``value`` is found + in the blacklist, then the function returns ``False``. Otherwise, + it returns ``True``. + + If both a whitelist and a blacklist are provided, value membership + in the blacklist will be examined first. If the value is not found + in the blacklist, then the whitelist is checked. If the value isn't + found in the whitelist, the function returns ``False``. + """ + if blacklist is not None: + if not hasattr(blacklist, "__iter__"): + blacklist = [blacklist] + try: + for expr in blacklist: + if expr_match(value, expr): + return False + except TypeError: + log.error("Non-iterable blacklist {0}".format(blacklist)) + + if whitelist: + if not hasattr(whitelist, "__iter__"): + whitelist = [whitelist] + try: + for expr in whitelist: + if expr_match(value, expr): + return True + except TypeError: + log.error("Non-iterable whitelist {0}".format(whitelist)) + else: + return True + + return False diff --git a/requirements-dev.txt b/requirements-dev.txt index ec240323..7ddf5d36 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,10 +1,9 @@ -r requirements.txt -tox==3.21.4 -black==19.10b0 -pytest==6.2.2 -pylama==7.7.1 -jinja2==2.11.3 -pytest-cov==2.11.1 +tox==3.27.0 +black==22.10.0 +pytest==7.2.0 +pylama==8.4.1 +jinja2==3.1.2 +pytest-cov==4.0.0 pytest-json==0.4.0 -pytest-pythonpath==0.7.3 flake8-import-order==0.18.1 diff --git a/requirements.txt b/requirements.txt index 8458cd5e..6df24e00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,5 +2,6 @@ pyzmq pyyaml pynacl dateparser +sentry_sdk u-msgpack-python prometheus_client diff --git a/setup.py b/setup.py index 44afd241..6ea47bfb 100644 --- a/setup.py +++ b/setup.py @@ -1,48 +1,48 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -''' +""" The setup script for napalm-logs -''' +""" import codecs from setuptools import setup, find_packages -__author__ = 'Mircea Ulinic ' +__author__ = "Mircea Ulinic " -with codecs.open('README.rst', 'r', encoding='utf8') as file: +with codecs.open("README.rst", "r", encoding="utf8") as file: long_description = file.read() with open("requirements.txt", "r") as fs: reqs = [r for r in fs.read().splitlines() if (len(r) > 0 and not r.startswith("#"))] setup( - name='napalm-logs', - version='0.10.0', + name="napalm-logs", + version="0.11.0", packages=find_packages(), - author='Mircea Ulinic', - author_email='mircea.ulinic@gmail.com', - description='Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser', + author="Mircea Ulinic", + author_email="ping@mirceaulinic.net", + description="Network Automation and Programmability Abstraction Layer with Multivendor support: syslog parser", long_description=long_description, classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Topic :: Utilities', - 'Topic :: System :: Networking', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Operating System :: POSIX :: Linux', - 'Operating System :: MacOS', - 'Intended Audience :: Developers', + "Development Status :: 5 - Production/Stable", + "Topic :: Utilities", + "Topic :: System :: Networking", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS", + "Intended Audience :: Developers", ], - url='https://github.com/napalm-automation/napalm-logs', + url="https://github.com/napalm-automation/napalm-logs", license="Apache License 2.0", - keywords=('napalm', 'syslog', 'zeromq', 'engine'), + keywords=("napalm", "syslog", "zeromq", "engine"), include_package_data=True, install_requires=reqs, entry_points={ - 'console_scripts': ['napalm-logs=napalm_logs.scripts.cli:napalm_logs_engine'], + "console_scripts": ["napalm-logs=napalm_logs.scripts.cli:napalm_logs_engine"], }, ) diff --git a/tests/config/eos/MAINTENANCE_MODE_ENDED/default/syslog.msg b/tests/config/eos/MAINTENANCE_MODE_ENDED/default/syslog.msg new file mode 100644 index 00000000..c97dd75c --- /dev/null +++ b/tests/config/eos/MAINTENANCE_MODE_ENDED/default/syslog.msg @@ -0,0 +1 @@ +<100>Feb 28 17:54:21 some-switch MaintenanceMode: %MMODE-5-MAINT_UNIT_STATE_CHANGE: Maintenance unit state changed for unit MAINT-UNIT. Old State maintenanceModeExit, New State active diff --git a/tests/config/eos/MAINTENANCE_MODE_ENDED/default/yang.json b/tests/config/eos/MAINTENANCE_MODE_ENDED/default/yang.json new file mode 100644 index 00000000..e44ec810 --- /dev/null +++ b/tests/config/eos/MAINTENANCE_MODE_ENDED/default/yang.json @@ -0,0 +1,29 @@ +{ + "error": "MAINTENANCE_MODE_ENDED", + "host": "some-switch", + "ip": "127.0.0.1", + "timestamp": 1646070861, + "yang_message": { + "notification-messages": { + "notification-message": { + "message-text": "maintenance ended" + } + } + }, + "message_details": { + "date": "Feb 28", + "time": "17:54:21", + "host": "some-switch", + "processName": "MaintenanceMode", + "tag": "MMODE-5-MAINT_UNIT_STATE_CHANGE", + "pri": "100", + "message": ": Maintenance unit state changed for unit MAINT-UNIT. Old State maintenanceModeExit, New State active", + "facility": 12, + "severity": 4 + }, + "yang_model": "ietf-notification-messages", + "os": "eos", + "facility": 12, + "severity": 4, + "state": 0 +} diff --git a/tests/config/eos/MAINTENANCE_MODE_STARTED/default/syslog.msg b/tests/config/eos/MAINTENANCE_MODE_STARTED/default/syslog.msg new file mode 100644 index 00000000..e03bde2b --- /dev/null +++ b/tests/config/eos/MAINTENANCE_MODE_STARTED/default/syslog.msg @@ -0,0 +1 @@ +<100>Feb 28 17:54:21 some-switch MaintenanceMode: %MMODE-5-MAINT_UNIT_STATE_CHANGE: Maintenance unit state changed for unit MAINT-UNIT. Old State maintenanceModeEnter, New State underMaintenance diff --git a/tests/config/eos/MAINTENANCE_MODE_STARTED/default/yang.json b/tests/config/eos/MAINTENANCE_MODE_STARTED/default/yang.json new file mode 100644 index 00000000..bf8d4f9f --- /dev/null +++ b/tests/config/eos/MAINTENANCE_MODE_STARTED/default/yang.json @@ -0,0 +1,29 @@ +{ + "error": "MAINTENANCE_MODE_STARTED", + "host": "some-switch", + "ip": "127.0.0.1", + "timestamp": 1646070861, + "yang_message": { + "notification-messages": { + "notification-message": { + "message-text": "maintenance started" + } + } + }, + "message_details": { + "date": "Feb 28", + "time": "17:54:21", + "host": "some-switch", + "processName": "MaintenanceMode", + "tag": "MMODE-5-MAINT_UNIT_STATE_CHANGE", + "pri": "100", + "message": ": Maintenance unit state changed for unit MAINT-UNIT. Old State maintenanceModeEnter, New State underMaintenance", + "facility": 12, + "severity": 4 + }, + "yang_model": "ietf-notification-messages", + "os": "eos", + "facility": 12, + "severity": 4, + "state": 1 +} diff --git a/tests/config/eos/PROCESS_RESTART/default/syslog.msg b/tests/config/eos/PROCESS_RESTART/default/syslog.msg new file mode 100644 index 00000000..d152f8c9 --- /dev/null +++ b/tests/config/eos/PROCESS_RESTART/default/syslog.msg @@ -0,0 +1 @@ +<166>Jan 24 02:50:31 device01 ProcMgr-worker: %PROCMGR-6-PROCESS_RESTART: Restarting 'Bgp' immediately (it had PID=32058) diff --git a/tests/config/eos/PROCESS_RESTART/default/yang.json b/tests/config/eos/PROCESS_RESTART/default/yang.json new file mode 100644 index 00000000..1e44ef0b --- /dev/null +++ b/tests/config/eos/PROCESS_RESTART/default/yang.json @@ -0,0 +1,32 @@ +{ +"yang_message": { + "system": { + "processes": { + "process": { + "name": "Bgp", + "state": "restarting", + "pid": 32058 + } + } + } +}, +"message_details": { + "severity": 6, + "facility": 20, + "pri": "166", + "processName": "ProcMgr-worker", + "host": "device01", + "tag": "PROCMGR-6-PROCESS_RESTART", + "time": "02:50:31", + "date": "Jan 24", + "message": ": Restarting 'Bgp' immediately (it had PID=32058)" +}, +"facility": 20, +"ip": "127.0.0.1", +"error": "PROCESS_RESTART", +"host": "device01", +"yang_model": "NO_MODEL", +"timestamp": 1642992631, +"os": "eos", +"severity": 6 +} diff --git a/tests/config/eos/PROCESS_STARTED/default/syslog.msg b/tests/config/eos/PROCESS_STARTED/default/syslog.msg new file mode 100644 index 00000000..38f4e1f7 --- /dev/null +++ b/tests/config/eos/PROCESS_STARTED/default/syslog.msg @@ -0,0 +1 @@ +<166>Jan 24 02:50:31 device01 ProcMgr-worker: %PROCMGR-6-PROCESS_STARTED: 'Bgp' starting with PID=6186 (PPID=2030) -- execing '/usr/bin/Bgp' diff --git a/tests/config/eos/PROCESS_STARTED/default/yang.json b/tests/config/eos/PROCESS_STARTED/default/yang.json new file mode 100644 index 00000000..97ab99bb --- /dev/null +++ b/tests/config/eos/PROCESS_STARTED/default/yang.json @@ -0,0 +1,33 @@ +{ +"yang_message": { + "system": { + "processes": { + "process": { + "name": "Bgp", + "state": "started", + "pid": 6186, + "uptime": 0 + } + } + } +}, +"message_details": { + "severity": 6, + "facility": 20, + "pri": "166", + "processName": "ProcMgr-worker", + "host": "device01", + "tag": "PROCMGR-6-PROCESS_STARTED", + "time": "02:50:31", + "date": "Jan 24", + "message": ": 'Bgp' starting with PID=6186 (PPID=2030) -- execing '/usr/bin/Bgp'" +}, +"facility": 20, +"ip": "127.0.0.1", +"error": "PROCESS_STARTED", +"host": "device01", +"yang_model": "NO_MODEL", +"timestamp": 1642992631, +"os": "eos", +"severity": 6 +} diff --git a/tests/config/eos/PROCESS_TERMINATED/default/syslog.msg b/tests/config/eos/PROCESS_TERMINATED/default/syslog.msg new file mode 100644 index 00000000..6aec3735 --- /dev/null +++ b/tests/config/eos/PROCESS_TERMINATED/default/syslog.msg @@ -0,0 +1 @@ +<166>Jan 24 02:50:31 device01 ProcMgr-worker: %PROCMGR-6-PROCESS_TERMINATED: 'Tmp75-system' (PID=6170, status=9) has terminated. diff --git a/tests/config/eos/PROCESS_TERMINATED/default/yang.json b/tests/config/eos/PROCESS_TERMINATED/default/yang.json new file mode 100644 index 00000000..af0df090 --- /dev/null +++ b/tests/config/eos/PROCESS_TERMINATED/default/yang.json @@ -0,0 +1,32 @@ +{ +"yang_message": { + "system": { + "processes": { + "process": { + "name": "Tmp75-system", + "state": "terminated", + "pid": 6170 + } + } + } +}, +"message_details": { + "severity": 6, + "facility": 20, + "pri": "166", + "processName": "ProcMgr-worker", + "host": "device01", + "tag": "PROCMGR-6-PROCESS_TERMINATED", + "time": "02:50:31", + "date": "Jan 24", + "message": ": 'Tmp75-system' (PID=6170, status=9) has terminated." +}, +"facility": 20, +"ip": "127.0.0.1", +"error": "PROCESS_TERMINATED", +"host": "device01", +"yang_model": "NO_MODEL", +"timestamp": 1642992631, +"os": "eos", +"severity": 6 +} diff --git a/tests/config/junos/FAILED_ALLOCATING_PACKET_BUFFER/default/syslog.msg b/tests/config/junos/FAILED_ALLOCATING_PACKET_BUFFER/default/syslog.msg new file mode 100644 index 00000000..d5c08d2e --- /dev/null +++ b/tests/config/junos/FAILED_ALLOCATING_PACKET_BUFFER/default/syslog.msg @@ -0,0 +1 @@ +<7>Dec 10 00:00:01 some-switch fpc0 (buf alloc) failed allocating packet buffer diff --git a/tests/config/junos/FAILED_ALLOCATING_PACKET_BUFFER/default/yang.json b/tests/config/junos/FAILED_ALLOCATING_PACKET_BUFFER/default/yang.json new file mode 100644 index 00000000..a08e8e12 --- /dev/null +++ b/tests/config/junos/FAILED_ALLOCATING_PACKET_BUFFER/default/yang.json @@ -0,0 +1,22 @@ +{ + "error": "FAILED_ALLOCATING_PACKET_BUFFER", + "host": "some-switch", + "ip": "127.0.0.1", + "timestamp": 1639094401, + "yang_message": {}, + "message_details": { + "date": "Dec 10", + "time": "00:00:01", + "hostPrefix": null, + "host": "some-switch", + "tag": "fpc0", + "pri": "7", + "message": "(buf alloc) failed allocating packet buffer", + "facility": 0, + "severity": 7 + }, + "yang_model": "NO_MODEL", + "os": "junos", + "facility": 0, + "severity": 7 +} diff --git a/tests/config/junos/INTERFACE_UP/default/syslog.msg b/tests/config/junos/INTERFACE_UP/default/syslog.msg new file mode 100644 index 00000000..4afe1c08 --- /dev/null +++ b/tests/config/junos/INTERFACE_UP/default/syslog.msg @@ -0,0 +1 @@ +<28>Jul 20 21:45:59 vmx01 mib2d[2424]: SNMP_TRAP_LINK_UP: ifIndex 502, ifAdminStatus up(1), ifOperStatus up(1), ifName xe-0/0/0 diff --git a/tests/config/junos/INTERFACE_UP/default/yang.json b/tests/config/junos/INTERFACE_UP/default/yang.json new file mode 100644 index 00000000..f4d9edb5 --- /dev/null +++ b/tests/config/junos/INTERFACE_UP/default/yang.json @@ -0,0 +1,35 @@ +{ + "yang_message": { + "interfaces": { + "interface": { + "xe-0/0/0": { + "state": { + "oper_status": "UP", + "admin_status": "UP" + } + } + } + } + }, + "message_details": { + "processId": "2424", + "severity": 4, + "facility": 3, + "hostPrefix": null, + "pri": "28", + "processName": "mib2d", + "host": "vmx01", + "tag": "SNMP_TRAP_LINK_UP", + "time": "21:45:59", + "date": "Jul 20", + "message": "ifIndex 502, ifAdminStatus up(1), ifOperStatus up(1), ifName xe-0/0/0" + }, + "timestamp": 1500587159, + "facility": 3, + "ip": "127.0.0.1", + "host": "vmx01", + "yang_model": "openconfig-interfaces", + "error": "INTERFACE_UP", + "os": "junos", + "severity": 4 +} diff --git a/tests/test_base.py b/tests/test_base.py index 1f5516e6..600ec759 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -''' +""" Test the napalm-logs base class. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals diff --git a/tests/test_config.py b/tests/test_config.py index 2667a529..7f957692 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Test fixtures for the napalm-logs profiles. -''' +""" from __future__ import absolute_import # Import python std lib @@ -27,29 +27,29 @@ TEST_SKT = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) TEST_CLIENT = None -NAPALM_LOGS_TEST_LOG_LEVEL = os.getenv('NAPALM_LOGS_TEST_LOG_LEVEL', default='warning') -NAPALM_LOGS_TEST_ADDR = os.getenv('NAPALM_LOGS_TEST_ADDR', default='0.0.0.0') -NAPALM_LOGS_TEST_PORT = os.getenv('NAPALM_LOGS_TEST_PORT', default=17191) -NAPALM_LOGS_TEST_PUB_ADDR = os.getenv('NAPALM_LOGS_TEST_PUB_ADDR', default='0.0.0.0') -NAPALM_LOGS_TEST_PUB_PORT = os.getenv('NAPALM_LOGS_TEST_PUB_PORT', default=17193) +NAPALM_LOGS_TEST_LOG_LEVEL = os.getenv("NAPALM_LOGS_TEST_LOG_LEVEL", default="warning") +NAPALM_LOGS_TEST_ADDR = os.getenv("NAPALM_LOGS_TEST_ADDR", default="0.0.0.0") +NAPALM_LOGS_TEST_PORT = os.getenv("NAPALM_LOGS_TEST_PORT", default=17191) +NAPALM_LOGS_TEST_PUB_ADDR = os.getenv("NAPALM_LOGS_TEST_PUB_ADDR", default="0.0.0.0") +NAPALM_LOGS_TEST_PUB_PORT = os.getenv("NAPALM_LOGS_TEST_PUB_PORT", default=17193) logging_level = napalm_logs.config.LOGGING_LEVEL.get(NAPALM_LOGS_TEST_LOG_LEVEL.lower()) logging.basicConfig(level=logging_level, format=napalm_logs.config.LOG_FORMAT) def startup_proc(): - ''' + """ Startup the napalm-logs process. - ''' + """ global NL_BASE global NL_PROC - log.debug('Starting up the napalm-logs process') + log.debug("Starting up the napalm-logs process") NL_BASE = NapalmLogs( disable_security=True, address=NAPALM_LOGS_TEST_ADDR, port=NAPALM_LOGS_TEST_PORT, - publisher=[{'zmq': {'send_unknown': True}}], - listener=[{'udp': {}}], + publisher=[{"zmq": {"send_unknown": True}}], + listener=[{"udp": {}}], publish_address=NAPALM_LOGS_TEST_PUB_ADDR, publish_port=NAPALM_LOGS_TEST_PUB_PORT, log_level=NAPALM_LOGS_TEST_LOG_LEVEL, @@ -63,19 +63,19 @@ def startup_proc(): def startup_local_client(): - ''' + """ Startup a local ZMQ client to receive the published messages. - ''' + """ time.sleep(2) global TEST_CLIENT context = zmq.Context() TEST_CLIENT = context.socket(zmq.SUB) TEST_CLIENT.connect( - 'tcp://{addr}:{port}'.format( + "tcp://{addr}:{port}".format( addr=NAPALM_LOGS_TEST_PUB_ADDR, port=NAPALM_LOGS_TEST_PUB_PORT ) ) - TEST_CLIENT.setsockopt(zmq.SUBSCRIBE, b'') + TEST_CLIENT.setsockopt(zmq.SUBSCRIBE, b"") # Startup the local ZMQ client. @@ -83,17 +83,17 @@ def startup_local_client(): def generate_tests(): - ''' + """ Generate the list of tests. - ''' + """ expected_os_errors = {} for os_name, os_cfg in NL_BASE.config_dict.items(): expected_os_errors[os_name] = [] - for message in os_cfg['messages']: - expected_os_errors[os_name].append(message['error']) + for message in os_cfg["messages"]: + expected_os_errors[os_name].append(message["error"]) test_cases = [] cwd = os.path.dirname(__file__) - test_path = os.path.join(cwd, 'config') + test_path = os.path.join(cwd, "config") os_dir_list = [ name for name in os.listdir(test_path) @@ -103,7 +103,7 @@ def generate_tests(): tested_oss = set(os_dir_list) missing_oss = expected_oss - tested_oss for missing_os in missing_oss: - test_cases.append(('__missing__{}'.format(missing_os), '', '')) + test_cases.append(("__missing__{}".format(missing_os), "", "")) for os_name in os_dir_list: # Subdir is the OS name os_path = os.path.join(test_path, os_name) @@ -116,7 +116,7 @@ def generate_tests(): defined_errors = set(errors) missing_errors = expected_errors - defined_errors for mising_err in missing_errors: - test_cases.append((os_name, '__missing__{}'.format(mising_err), '')) + test_cases.append((os_name, "__missing__{}".format(mising_err), "")) for error_name in errors: error_path = os.path.join(os_path, error_name) cases = [ @@ -125,7 +125,7 @@ def generate_tests(): if os.path.isdir(os.path.join(error_path, name)) ] if not cases: - test_cases.append((os_name, error_name, '__missing__')) + test_cases.append((os_name, error_name, "__missing__")) for test_case in cases: test_cases.append((os_name, error_name, test_case)) return test_cases @@ -137,15 +137,15 @@ def generate_tests(): @pytest.mark.parametrize("os_name,error_name,test_case", tests) def test_config(os_name, error_name, test_case): - assert not os_name.startswith('__missing__'), 'No tests defined for {}'.format( - os_name.replace('__missing__', '') + assert not os_name.startswith("__missing__"), "No tests defined for {}".format( + os_name.replace("__missing__", "") ) assert not error_name.startswith( - '__missing__' - ), 'No tests defined for {}, under {}'.format( - error_name.replace('__missing__', ''), os_name + "__missing__" + ), "No tests defined for {}, under {}".format( + error_name.replace("__missing__", ""), os_name ) - assert test_case != '__missing__', 'No test cases defined for {}, under {}'.format( + assert test_case != "__missing__", "No test cases defined for {}, under {}".format( error_name, os_name ) print( @@ -154,37 +154,37 @@ def test_config(os_name, error_name, test_case): ) ) cwd = os.path.dirname(__file__) - test_path = os.path.join(cwd, 'config', os_name, error_name, test_case) - raw_message_filepath = os.path.join(test_path, 'syslog.msg') - log.debug('Looking for %s', raw_message_filepath) + test_path = os.path.join(cwd, "config", os_name, error_name, test_case) + raw_message_filepath = os.path.join(test_path, "syslog.msg") + log.debug("Looking for %s", raw_message_filepath) assert os.path.isfile(raw_message_filepath) - with open(raw_message_filepath, 'r') as raw_message_fh: + with open(raw_message_filepath, "r") as raw_message_fh: raw_message = raw_message_fh.read() - log.debug('Read raw message:') + log.debug("Read raw message:") log.debug(raw_message) - yang_message_filepath = os.path.join(test_path, 'yang.json') - log.debug('Looking for %s', yang_message_filepath) + yang_message_filepath = os.path.join(test_path, "yang.json") + log.debug("Looking for %s", yang_message_filepath) try: - with open(yang_message_filepath, 'r') as yang_message_fh: + with open(yang_message_filepath, "r") as yang_message_fh: yang_message = yang_message_fh.read() except IOError: - yang_message = '' - log.debug('Read YANG text:') + yang_message = "" + log.debug("Read YANG text:") log.debug(yang_message) if yang_message: struct_yang_message = json.loads(yang_message) else: struct_yang_message = {} - log.debug('Struct YANG message:') + log.debug("Struct YANG message:") log.debug(struct_yang_message) - log.debug('Sending the raw message to the napalm-logs daemon') + log.debug("Sending the raw message to the napalm-logs daemon") TEST_SKT.sendto( - raw_message.strip().encode('utf-8'), + raw_message.strip().encode("utf-8"), (NAPALM_LOGS_TEST_ADDR, NAPALM_LOGS_TEST_PORT), ) zmq_msg = TEST_CLIENT.recv() deserialised_zmq_msg = napalm_logs.utils.unserialize(zmq_msg) - log.debug('Received from the napalm-logs daemon:') + log.debug("Received from the napalm-logs daemon:") log.debug(deserialised_zmq_msg) returned_yang = json.loads(json.dumps(deserialised_zmq_msg)) if not struct_yang_message: @@ -197,22 +197,22 @@ def test_config(os_name, error_name, test_case): # check the timestamp. # We still expect both to contain a timestamp though. assert struct_yang_message.pop( - 'timestamp', False - ), 'Yang test file does not contain a timestamp key for {} under {}'.format( + "timestamp", False + ), "Yang test file does not contain a timestamp key for {} under {}".format( error_name, os_name ) assert returned_yang.pop( - 'timestamp', False - ), 'The returned yang does not contain a timestamp key for {} under {}'.format( + "timestamp", False + ), "The returned yang does not contain a timestamp key for {} under {}".format( error_name, os_name ) assert struct_yang_message == returned_yang def test_napalm_logs_shut(): - ''' + """ Shutdown the napalm-logs engine. - ''' + """ NL_BASE.stop_engine() assert NL_PROC.is_alive() NL_PROC.terminate() diff --git a/tests/test_device.py b/tests/test_device.py index 2016e1a0..dfd55854 100644 --- a/tests/test_device.py +++ b/tests/test_device.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -''' +""" Test the device process. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals diff --git a/tests/test_publisher.py b/tests/test_publisher.py index 40663e6c..ca54c471 100644 --- a/tests/test_publisher.py +++ b/tests/test_publisher.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -''' +""" Test the publisher process. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals diff --git a/tests/test_server.py b/tests/test_server.py index c0fb2e1a..67b5a631 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -''' +""" Test the server process. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals diff --git a/tests/test_utils.py b/tests/test_utils.py index 989722ab..47bd50ef 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -''' +""" Test the napalm-logs base class. -''' +""" from __future__ import absolute_import from __future__ import unicode_literals @@ -12,8 +12,8 @@ def test_bgp_state_convert_in_dict(): """ Test bgp_state_convert returns values from its internal dict """ - assert bgp_state_convert('OpenSent') == 'OPEN_SENT' - assert bgp_state_convert('OpenConfirm') == 'OPEN_CONFIRM' + assert bgp_state_convert("OpenSent") == "OPEN_SENT" + assert bgp_state_convert("OpenConfirm") == "OPEN_CONFIRM" def test_bgp_state_convert_not_dict(): @@ -21,14 +21,14 @@ def test_bgp_state_convert_not_dict(): Test bgp_state_convert returns upper values for items not in its internal dict """ - assert bgp_state_convert('Connect') == 'CONNECT' + assert bgp_state_convert("Connect") == "CONNECT" def test_bfd_state_convert_in_dict(): """ Test bfd_state_convert returns values from its internal dict """ - assert bfd_state_convert('AdminDown') == 'ADMIN_DOWN' + assert bfd_state_convert("AdminDown") == "ADMIN_DOWN" def test_bfd_state_convert_not_dict(): @@ -36,4 +36,4 @@ def test_bfd_state_convert_not_dict(): Test bfd_state_convert returns upper values for items not in its internal dict """ - assert bfd_state_convert('Up') == 'UP' + assert bfd_state_convert("Up") == "UP" diff --git a/tox.ini b/tox.ini index a37d8b82..3729c853 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{6,7},black,pylama +envlist = py3{7,8,9,10},black,pylama skip_missing_interpreters = true [testenv] @@ -13,14 +13,14 @@ commands= deps = -rrequirements-dev.txt -basepython = python3.6 +basepython = python3.9 commands = - black --check --skip-string-normalization . + black --check . [testenv:pylama] deps = -rrequirements-dev.txt -basepython = python3.6 +basepython = python3.9 commands = pylama napalm_logs/