From 6bdf46111b667805d83d540752fe2da16c2140ca Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 19:16:01 +0200 Subject: [PATCH 01/11] replace config files with pyproject.toml --- .codespellrc | 5 - .pylintrc | 519 ----------------------------------------------- MANIFEST.in | 3 - pyproject.toml | 195 ++++++++++++++++++ pytest.ini | 5 - requirements.txt | 1 - setup.py | 46 ----- 7 files changed, 195 insertions(+), 579 deletions(-) delete mode 100644 .codespellrc delete mode 100644 .pylintrc delete mode 100644 MANIFEST.in create mode 100644 pyproject.toml delete mode 100644 pytest.ini delete mode 100644 requirements.txt delete mode 100644 setup.py diff --git a/.codespellrc b/.codespellrc deleted file mode 100644 index 44f905f..0000000 --- a/.codespellrc +++ /dev/null @@ -1,5 +0,0 @@ -[codespell] -skip = *.po,*.ts,./docs/_build,./docs/_static,./.git -count = -quiet-level = 3 -ignore-words-list = spindel diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 653cb92..0000000 --- a/.pylintrc +++ /dev/null @@ -1,519 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-whitelist= - -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=0 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - consider-using-f-string - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _, - pyLSV2 - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=120 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members=struct.* - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 3d818fc..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include *.md -recursive-include scripts *.py -recursive-include pyLSV2 *.mo \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..79a1419 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,195 @@ +[build-system] +requires = ["setuptools", "wheel", "setuptools_scm[toml]>=6.0"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] + +[project] +name = "pyLSV2" +authors = [ + {name = "drunsinn", email = "dr.unsinn@googlemail.com"}, +] +description = "A pure Python3 implementation of the LSV2 protocol" +requires-python = ">=3.6" +keywords = ["LSV2", "CNC", "PLC"] +dynamic = ["version", "readme", ] + +#[project.scripts] +#lsv2_demo = "scripts.lsv2_demo" + +[tool.setuptools] +packages = [ + "pyLSV2", + "pyLSV2.locales.en.LC_MESSAGES", + "pyLSV2.locales.de.LC_MESSAGES" + ] + +#[tool.setuptools.package-data] +#pyLSV2 = ["locales/*/LC_MESSAGES/*.mo", ] + +#[tool.setuptools.exclude-package-data] +#pyLSV2 = ["locales/*/LC_MESSAGES/*.po", ] + +[tool.setuptools.dynamic] +version = {attr = "pyLSV2.__version__"} +readme = {file = ["README.md", ]} + +[tool.black] +line-length = 140 +target-version = ["py36",] +include = "\\.pyi?$" +verbose = true + +[tool.pytest.ini_options] +log_cli = true +log_cli_level = "INFO" +log_format = "%(asctime)s %(levelname)s %(message)s" +log_date_format = "%Y-%m-%d %H:%M:%S" + +[tool.codespell] +skip = "*.po,*.ts,./docs/_build,./docs/_static,./.git" +count = "" +quiet-level = 3 +ignore-words-list = "spindel" + +[tool.pylint.main] +fail-under = 10 +ignore = ["CVS"] +ignore-patterns = ["^\\.#"] +jobs = 0 +limit-inference-results = 100 +persistent = true +py-version = "3.6" +suggestion-mode = true + +[tool.pylint.basic] +argument-naming-style = "snake_case" +attr-naming-style = "snake_case" +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] +class-attribute-naming-style = "any" +class-const-naming-style = "UPPER_CASE" +class-naming-style = "PascalCase" +const-naming-style = "UPPER_CASE" +docstring-min-length = -1 +function-naming-style = "snake_case" +good-names = ["i", "j", "k", "ex", "Run", "_", "pyLSV2"] +inlinevar-naming-style = "any" +method-naming-style = "snake_case" +module-naming-style = "snake_case" +no-docstring-rgx = "^_" +property-classes = ["abc.abstractproperty"] +variable-naming-style = "snake_case" + +[tool.pylint.classes] +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +valid-classmethod-first-arg = ["cls"] +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pylint.design] +max-args = 5 +max-attributes = 7 +max-bool-expr = 5 +max-branches = 12 +max-locals = 15 +max-parents = 7 +max-public-methods = 20 +max-returns = 6 +max-statements = 50 +min-public-methods = 2 + +[tool.pylint.exceptions] +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] + +[tool.pylint.format] +ignore-long-lines = "^\\s*(# )??$" +indent-after-paren = 4 +indent-string = " " +max-line-length = 120 +max-module-lines = 1000 + +[tool.pylint.imports] +known-third-party = ["enchant"] + +[tool.pylint.logging] +logging-format-style = "old" +logging-modules = ["logging"] + +[tool.pylint."messages control"] +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +disable = [ + "raw-checker-failed", + "bad-inline-option", + "locally-disabled", + "file-ignored", + "suppressed-message", + "useless-suppression", + "deprecated-pragma", + "use-symbolic-message-instead", + "use-implicit-booleaness-not-comparison-to-string", + "use-implicit-booleaness-not-comparison-to-zero", + "consider-using-f-string" + ] + +[tool.pylint.method_args] + +timeout-methods = [ + "requests.api.delete", + "requests.api.get", + "requests.api.head", + "requests.api.options", + "requests.api.patch", + "requests.api.post", + "requests.api.put", + "requests.api.request" + ] + +[tool.pylint.miscellaneous] +notes = ["FIXME", "XXX", "TODO", "ToDo"] + +[tool.pylint.refactoring] +max-nested-blocks = 5 +never-returning-functions = ["sys.exit", "argparse.parse_error"] + +[tool.pylint.reports] +evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" +score = true + +[tool.pylint.similarities] +ignore-comments = true +ignore-docstrings = true +ignore-imports = true +ignore-signatures = true +min-similarity-lines = 4 + +[tool.pylint.spelling] +max-spelling-suggestions = 4 +spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" + +[tool.pylint.typecheck] +contextmanager-decorators = ["contextlib.contextmanager"] +ignore-none = true +ignore-on-opaque-inference = true +ignored-checks-for-mixins = [ + "no-member", + "not-async-context-manager", + "not-context-manager", + "attribute-defined-outside-init" + ] +ignored-classes = [ + "optparse.Values", + "thread._local", + "_thread._local", + "argparse.Namespace" + ] +missing-member-hint = true +missing-member-hint-distance = 1 +missing-member-max-choices = 1 +mixin-class-rgx = ".*[Mm]ixin" + +[tool.pylint.variables] +allow-global-unused-variables = true +callbacks = ["cb_", "_cb"] +dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" +ignored-argument-names = "_.*|^ignored_|^unused_" +redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index bcd0752..0000000 --- a/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -log_cli = True -log_cli_level = INFO -log_format = %(asctime)s %(levelname)s %(message)s -log_date_format = %Y-%m-%d %H:%M:%S diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 8b13789..0000000 --- a/requirements.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/setup.py b/setup.py deleted file mode 100644 index 7e61f1e..0000000 --- a/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -* -"""package configuration for pyLSV2""" -from setuptools import find_packages, setup -from pyLSV2 import __doc__, __version__, __author__, __license__, __email__ - -setup( - name="pyLSV2", - python_requires=">=3.6", - packages=find_packages( - include=[ - "pyLSV2", - ], - exclude=["tests", "data"], - ), - #package_data={"pyLSV2": ["locales/*/LC_MESSAGES/*.mo"]}, - include_package_data=True, - version=__version__, - description=__doc__, - long_description=open("README.md").read(), - long_description_content_type="text/markdown", - author=__author__, - author_email=__email__, - url="https://github.com/drunsinn/pyLSV2", - license=__license__, - install_requires=[], - scripts=["scripts/lsv2cmd.py", "scripts/tab2csv.py", "scripts/scope2csv.py"], - keywords="LSV2 cnc communication transfer plc", - classifiers=[ - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Development Status :: 4 - Beta", - "Topic :: System :: Archiving", - "Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: System :: Monitoring", - "Typing :: Typed", - ], -) From 3820264cb571d387c819e9a56af763a63eb118dc Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 19:16:34 +0200 Subject: [PATCH 02/11] update actions --- .github/workflows/black.yml | 2 -- .github/workflows/python-publish.yml | 18 +++++++++++------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 1c10f7e..dbd7f31 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -11,5 +11,3 @@ jobs: steps: - uses: actions/checkout@v3 - uses: psf/black@stable - with: - options: "--verbose" \ No newline at end of file diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 2661ce2..e9e4b36 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -13,23 +13,27 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: '3.x' + - name: Install dependencies run: | python -m pip install --upgrade pip - pip install setuptools wheel twine - - name: msgfmt - uses: whtsky/msgfmt-action@6b2181f051b002182d01a1e1f1aff216230c5a4d + pip install setuptools wheel twine tox + + - name: Create Translation Files + uses: docker://textadi/build-language-i18n-action@v2 env: - WORKDIR: "pyLSV2/locales" + DIR: pyLSV2/locales + - name: Build and publish env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - python setup.py sdist bdist_wheel + pip wheel -w dist . twine upload dist/* From b8571a79368146c4af1590010816e9fef0912caa Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 19:19:49 +0200 Subject: [PATCH 03/11] change fail threshold for pylint --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 79a1419..1cc6303 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ quiet-level = 3 ignore-words-list = "spindel" [tool.pylint.main] -fail-under = 10 +fail-under = 9 ignore = ["CVS"] ignore-patterns = ["^\\.#"] jobs = 0 From 7d01c7e81baf1385b019b3d6c5eb418fb7666540 Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 19:22:36 +0200 Subject: [PATCH 04/11] reformat file for test --- tests/conftest.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index afdf192..8711ccc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,12 +7,8 @@ def pytest_addoption(parser): """add commandline options to tests""" - parser.addoption( - "--address", action="store", help="address of machine or programming station" - ) - parser.addoption( - "--timeout", action="store", help="number of seconds for network timeout" - ) + parser.addoption("--address", action="store", help="address of machine or programming station") + parser.addoption("--timeout", action="store", help="number of seconds for network timeout") @pytest.fixture From 65d870983fe20d67703e56e1a9df06444026406f Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 19:23:47 +0200 Subject: [PATCH 05/11] reformat all files with new black config --- pyLSV2/client.py | 407 ++++++++----------------------- pyLSV2/dat_cls.py | 4 +- pyLSV2/low_level_com.py | 25 +- pyLSV2/misc.py | 20 +- pyLSV2/misc_scope.py | 34 +-- pyLSV2/table_reader.py | 77 ++---- pyLSV2/translate_messages.py | 28 +-- scripts/__init__.py | 0 scripts/lsv2_demo.py | 52 +--- scripts/lsv2cmd.py | 28 +-- scripts/real_time_readings.py | 21 +- scripts/scope2csv.py | 33 +-- scripts/scope_demo.py | 20 +- scripts/signals_assignment.py | 4 +- scripts/ssh_tunnel_demo.py | 20 +- scripts/tab2csv.py | 8 +- tests/test_connection.py | 5 +- tests/test_file_functions.py | 36 +-- tests/test_machine_parameters.py | 5 +- tests/test_plc_read.py | 20 +- tests/test_transfer.py | 21 +- 21 files changed, 210 insertions(+), 658 deletions(-) create mode 100644 scripts/__init__.py diff --git a/pyLSV2/client.py b/pyLSV2/client.py index 6e16875..65411d9 100644 --- a/pyLSV2/client.py +++ b/pyLSV2/client.py @@ -115,9 +115,7 @@ def __exit__( def switch_safe_mode(self, enable_safe_mode: bool = True): """switch between safe mode and unrestricted mode""" if enable_safe_mode is False: - self._logger.info( - "disabling safe mode. login and system commands are not restricted. Use with caution!" - ) + self._logger.info("disabling safe mode. login and system commands are not restricted. Use with caution!") self._known_logins = tuple(e.value for e in lc.Login) self._known_sys_cmd = tuple(e.value for e in lc.ParCCC) else: @@ -166,16 +164,12 @@ def _send_recive( if command is lc.CMD.C_CC: if len(bytes_to_send) < 2: - self._logger.warning( - "system command requires a payload of at exactly 2 bytes" - ) + self._logger.warning("system command requires a payload of at exactly 2 bytes") return False c_cc_command = struct.unpack("!H", bytes_to_send[0:2])[0] if c_cc_command not in self._known_sys_cmd: - self._logger.debug( - "unknown or unsupported system command %s", bytes_to_send - ) + self._logger.debug("unknown or unsupported system command %s", bytes_to_send) return False lsv_content = self._llcom.telegram(command, bytes_to_send) @@ -194,9 +188,7 @@ def _send_recive( if self._llcom.last_response is expected_response: # expected response received - self._logger.debug( - "expected response received: %s", self._llcom.last_response - ) + self._logger.debug("expected response received: %s", self._llcom.last_response) if len(lsv_content) > 0: return lsv_content return True @@ -205,9 +197,7 @@ def _send_recive( self._logger.debug("no response expected") return False - self._logger.warning( - "received unexpected response %s", self._llcom.last_response - ) + self._logger.warning("received unexpected response %s", self._llcom.last_response) return False def _send_recive_block( @@ -257,9 +247,7 @@ def _send_recive_block( response_buffer = [] if self._llcom.last_response is expected_response: # expected response received - self._logger.debug( - "expected response received: %s", self._llcom.last_response - ) + self._logger.debug("expected response received: %s", self._llcom.last_response) while self._llcom.last_response is expected_response: response_buffer.append(lsv_content) lsv_content = self._llcom.telegram(command=lc.RSP.T_OK) @@ -317,32 +305,20 @@ def _configure_connection(self): "could not decide on a buffer size for maximum message length of %d", self._sys_par.max_block_length, ) - raise LSV2ProtocolException( - "could not negotiate buffer site, unknown buffer size of %d" - % self._sys_par.max_block_length - ) + raise LSV2ProtocolException("could not negotiate buffer site, unknown buffer size of %d" % self._sys_par.max_block_length) if selected_command is None: self._logger.debug("use smallest buffer size of 256") self._llcom.buffer_size = selected_size else: self._logger.debug("use buffer size of %d", selected_size) - if self._send_recive( - lc.CMD.C_CC, struct.pack("!H", selected_command), lc.RSP.T_OK - ): + if self._send_recive(lc.CMD.C_CC, struct.pack("!H", selected_command), lc.RSP.T_OK): self._llcom.buffer_size = selected_size else: - raise LSV2ProtocolException( - "error in communication while setting buffer size to %d" - % selected_size - ) + raise LSV2ProtocolException("error in communication while setting buffer size to %d" % selected_size) - if not self._send_recive( - lc.CMD.C_CC, struct.pack( - "!H", lc.ParCCC.SECURE_FILE_SEND), lc.RSP.T_OK - ): - self._logger.debug( - "secure file transfer not supported? use fallback") + if not self._send_recive(lc.CMD.C_CC, struct.pack("!H", lc.ParCCC.SECURE_FILE_SEND), lc.RSP.T_OK): + self._logger.debug("secure file transfer not supported? use fallback") self._secure_file_send = False else: self._logger.debug("secure file send is enabled") @@ -350,9 +326,7 @@ def _configure_connection(self): self.login(login=lc.Login.FILETRANSFER) - self._logger.info( - "successfully configured connection parameters and basic logins" - ) + self._logger.info("successfully configured connection parameters and basic logins") def login(self, login: lc.Login, password: str = "") -> bool: """ @@ -378,8 +352,7 @@ def login(self, login: lc.Login, password: str = "") -> bool: payload.extend(lm.ustr_to_ba(password)) if self._send_recive(lc.CMD.A_LG, payload, lc.RSP.T_OK): - self._logger.debug( - "login executed successfully for login %s", login.value) + self._logger.debug("login executed successfully for login %s", login.value) self._active_logins.append(login) return True @@ -407,8 +380,7 @@ def logout(self, login: Union[lc.Login, None] = None) -> bool: return False if self._send_recive(lc.CMD.A_LO, payload, lc.RSP.T_OK): - self._logger.info( - "logout executed successfully for login %s", login) + self._logger.info("logout executed successfully for login %s", login) if login is None: self._active_logins = [] else: @@ -424,18 +396,14 @@ def _read_parameters(self, force: bool = False) -> ld.SystemParameters: :param force: if ``True`` the information is re-read even if it is already buffered """ if self._sys_par.lsv2_version != -1 and force is False: - self._logger.debug( - "system parameters already in memory, return previous values" - ) + self._logger.debug("system parameters already in memory, return previous values") else: result = self._send_recive(lc.CMD.R_PR, None, lc.RSP.S_PR) if isinstance(result, (bytearray,)): self._sys_par = lm.decode_system_parameters(result) self._logger.debug("got system parameters: %s", self._sys_par) else: - self._logger.warning( - "an error occurred while querying system parameters" - ) + self._logger.warning("an error occurred while querying system parameters") payload = struct.pack("!L", lc.ParRCI.TURBO_MODE) result = self._send_recive(lc.CMD.R_CI, payload, lc.RSP.S_CI) @@ -445,9 +413,7 @@ def _read_parameters(self, force: bool = False) -> ld.SystemParameters: raise LSV2DataException("expected boolean") self._sys_par.turbo_mode_active = data else: - self._logger.warning( - "an error occurred while querying system information on turbo mode" - ) + self._logger.warning("an error occurred while querying system information on turbo mode") payload = struct.pack("!L", lc.ParRCI.DNC_ALLOWED) result = self._send_recive(lc.CMD.R_CI, payload, lc.RSP.S_CI) @@ -457,9 +423,7 @@ def _read_parameters(self, force: bool = False) -> ld.SystemParameters: raise LSV2DataException("expected boolean") self._sys_par.dnc_mode_allowed = data else: - self._logger.warning( - "an error occurred while querying system information on dnc mode" - ) + self._logger.warning("an error occurred while querying system information on dnc mode") payload = struct.pack("!L", lc.ParRCI.AXES_SAMPLING_RATE) result = self._send_recive(lc.CMD.R_CI, payload, lc.RSP.S_CI) @@ -469,9 +433,7 @@ def _read_parameters(self, force: bool = False) -> ld.SystemParameters: raise LSV2DataException("expected int") self._sys_par.axes_sampling_rate = data else: - self._logger.warning( - "an error occurred while querying system information on axes samling rate" - ) + self._logger.warning("an error occurred while querying system information on axes samling rate") return self._sys_par def _read_version(self, force=False) -> ld.VersionInfo: @@ -484,20 +446,15 @@ def _read_version(self, force=False) -> ld.VersionInfo: :raises LSV2DataException: if basic information could not be read from control """ if len(self._versions.control) > 0 and force is False: - self._logger.debug( - "version info already in memory, return previous values") + self._logger.debug("version info already in memory, return previous values") else: info_data = ld.VersionInfo() - result = self._send_recive( - lc.CMD.R_VR, struct.pack("!B", lc.ParRVR.CONTROL), lc.RSP.S_VR - ) + result = self._send_recive(lc.CMD.R_VR, struct.pack("!B", lc.ParRVR.CONTROL), lc.RSP.S_VR) if isinstance(result, (bytearray,)) and len(result) > 0: info_data.control = lm.ba_to_ustr(result) else: - raise LSV2DataException( - "Could not read version information from control" - ) + raise LSV2DataException("Could not read version information from control") result = self._send_recive( lc.CMD.R_VR, @@ -592,11 +549,9 @@ def program_stack(self) -> Union[ld.StackState, None]: result = self._send_recive(lc.CMD.R_RI, payload, lc.RSP.S_RI) if isinstance(result, (bytearray,)) and len(result) > 0: stack_info = lm.decode_stack_info(result) - self._logger.debug( - "successfully read active program stack: %s", stack_info) + self._logger.debug("successfully read active program stack: %s", stack_info) return stack_info - self._logger.warning( - "an error occurred while querying active program state") + self._logger.warning("an error occurred while querying active program state") return None @@ -614,12 +569,9 @@ def execution_state(self) -> lc.ExecState: result = self._send_recive(lc.CMD.R_RI, payload, lc.RSP.S_RI) if isinstance(result, (bytearray,)): - self._logger.debug( - "read execution state %d", struct.unpack("!H", result)[0] - ) + self._logger.debug("read execution state %d", struct.unpack("!H", result)[0]) return lc.ExecState(struct.unpack("!H", result)[0]) - self._logger.warning( - "an error occurred while querying execution state") + self._logger.warning("an error occurred while querying execution state") return lc.ExecState.UNDEFINED def directory_info(self, remote_directory: str = "") -> ld.DirectoryEntry: @@ -633,10 +585,7 @@ def directory_info(self, remote_directory: str = "") -> ld.DirectoryEntry: self._logger.warning("could not log in as user FILE") return ld.DirectoryEntry() - if ( - len(remote_directory) > 0 - and self.change_directory(remote_directory) is False - ): + if len(remote_directory) > 0 and self.change_directory(remote_directory) is False: self._logger.warning( "could not change current directory to read directory info for %s", remote_directory, @@ -645,9 +594,7 @@ def directory_info(self, remote_directory: str = "") -> ld.DirectoryEntry: result = self._send_recive(lc.CMD.R_DI, None, lc.RSP.S_DI) if isinstance(result, (bytearray,)) and len(result) > 0: dir_info = lm.decode_directory_info(result) - self._logger.debug( - "successfully received directory information for %s", dir_info.path - ) + self._logger.debug("successfully received directory information for %s", dir_info.path) return dir_info self._logger.warning("an error occurred while querying directory info") @@ -695,8 +642,7 @@ def file_info(self, remote_file_path: str) -> Union[ld.FileEntry, None]: result = self._send_recive(lc.CMD.R_FI, payload, lc.RSP.S_FI) if isinstance(result, (bytearray,)) and len(result) > 0: file_info = lm.decode_file_system_info(result, self._versions.type) - self._logger.debug( - "received file information for %s", file_info.name) + self._logger.debug("received file information for %s", file_info.name) return file_info if self.last_error.e_code == lc.LSV2StatusCode.T_ER_NO_FILE: @@ -727,13 +673,9 @@ def directory_content(self) -> List[ld.FileEntry]: result = self._send_recive_block(lc.CMD.R_DR, payload, lc.RSP.S_DR) if isinstance(result, (list,)): for entry in result: - dir_content.append( - lm.decode_file_system_info(entry, self._versions.type) - ) + dir_content.append(lm.decode_file_system_info(entry, self._versions.type)) - self._logger.debug( - "received %d packages for directory content", len(dir_content) - ) + self._logger.debug("received %d packages for directory content", len(dir_content)) else: self._logger.warning( "an error occurred while directory content info: '%s'", @@ -782,9 +724,7 @@ def make_directory(self, dir_path: str) -> bool: self._logger.warning("could not log in as user FILE") return False - path_parts = dir_path.replace("/", lc.PATH_SEP).split( - lc.PATH_SEP - ) # convert path + path_parts = dir_path.replace("/", lc.PATH_SEP).split(lc.PATH_SEP) # convert path path_to_check = "" for part in path_parts: @@ -804,8 +744,7 @@ def make_directory(self, dir_path: str) -> bool: ) return False else: - self._logger.debug( - "nothing to do as this segment already exists") + self._logger.debug("nothing to do as this segment already exists") return True def delete_empty_directory(self, dir_path: str) -> bool: @@ -829,14 +768,11 @@ def delete_empty_directory(self, dir_path: str) -> bool: return True if self.last_error.e_code == lc.LSV2StatusCode.T_ER_NO_DIR: - self._logger.debug( - "noting to do, directory %s didn't exist", dir_path) + self._logger.debug("noting to do, directory %s didn't exist", dir_path) return True if self.last_error.e_code == lc.LSV2StatusCode.T_ER_DEL_DIR: - self._logger.debug( - "could not delete directory %s since it is not empty", dir_path - ) + self._logger.debug("could not delete directory %s since it is not empty", dir_path) return False self._logger.warning( @@ -869,8 +805,7 @@ def delete_file(self, file_path: str) -> bool: return True if self.last_error.e_code == lc.LSV2StatusCode.T_ER_NO_DELETE: - self._logger.info( - "could not delete file %s since it is in use", file_path) + self._logger.info("could not delete file %s since it is in use", file_path) return False self._logger.warning( @@ -923,9 +858,7 @@ def copy_remote_file(self, source_path: str, target_path: str) -> bool: self._logger.debug("successfully copied file %s", source_path) return True - self._logger.warning( - "an error occurred copying file %s to %s", source_path, target_path - ) + self._logger.warning("an error occurred copying file %s to %s", source_path, target_path) return False def move_file(self, source_path: str, target_path: str) -> bool: @@ -986,9 +919,7 @@ def move_file(self, source_path: str, target_path: str) -> bool: ) return False - self._logger.warning( - "an error occurred moving file %s to %s", source_path, target_path - ) + self._logger.warning("an error occurred moving file %s to %s", source_path, target_path) return False def send_file( @@ -1023,11 +954,8 @@ def send_file( local_file = local_path if not local_file.is_file(): - self._logger.warning( - "the supplied path %s did not resolve to a file", local_file - ) - raise LSV2StateException( - "local file does not exist! {}".format(local_file)) + self._logger.warning("the supplied path %s did not resolve to a file", local_file) + raise LSV2StateException("local file does not exist! {}".format(local_file)) remote_path = remote_path.replace("/", lc.PATH_SEP) @@ -1039,38 +967,27 @@ def send_file( remote_file_name = remote_path.split(lc.PATH_SEP)[-1] remote_directory = remote_path.rstrip(remote_file_name) if not self.change_directory(remote_directory=remote_directory): - raise LSV2StateException( - "could not open the destination directory {}".format( - remote_directory - ) - ) + raise LSV2StateException("could not open the destination directory {}".format(remote_directory)) else: remote_file_name = remote_path remote_directory = self.directory_info().path # get pwd remote_directory = remote_directory.rstrip(lc.PATH_SEP) if not self.directory_info(remote_directory): - self._logger.debug( - "remote path does not exist, create directory(s)") + self._logger.debug("remote path does not exist, create directory(s)") self.make_directory(remote_directory) - remote_info = self.file_info( - remote_directory + lc.PATH_SEP + remote_file_name) + remote_info = self.file_info(remote_directory + lc.PATH_SEP + remote_file_name) if remote_info: self._logger.debug("remote path exists and points to file's") if override_file: - if not self.delete_file( - remote_directory + lc.PATH_SEP + remote_file_name - ): + if not self.delete_file(remote_directory + lc.PATH_SEP + remote_file_name): raise LSV2StateException( - "something went wrong while deleting file {}".format( - remote_directory + lc.PATH_SEP + remote_file_name - ) + "something went wrong while deleting file {}".format(remote_directory + lc.PATH_SEP + remote_file_name) ) else: - self._logger.warning( - "remote file already exists, override was not set") + self._logger.warning("remote file already exists, override was not set") return False self._logger.debug( @@ -1079,8 +996,7 @@ def send_file( remote_directory + lc.PATH_SEP + remote_file_name, ) - payload = lm.ustr_to_ba( - remote_directory + lc.PATH_SEP + remote_file_name) + payload = lm.ustr_to_ba(remote_directory + lc.PATH_SEP + remote_file_name) if binary_mode or lm.is_file_binary(local_path): payload.append(lc.MODE_BINARY) self._logger.debug("selecting binary transfer mode") @@ -1097,9 +1013,7 @@ def send_file( with local_file.open("rb") as input_buffer: while True: # use current buffer size but reduce by 10 to make sure it fits together with command and size - buffer = bytearray( - input_buffer.read(self._llcom.buffer_size - 8 - 2) - ) + buffer = bytearray(input_buffer.read(self._llcom.buffer_size - 8 - 2)) if not buffer: # finished reading file break @@ -1148,9 +1062,7 @@ def send_file( lt.get_error_text(self.last_error), ) else: - self._logger.warning( - "could not send file with error %s", self._llcom.last_response - ) + self._logger.warning("could not send file with error %s", self._llcom.last_response) return False return True @@ -1193,14 +1105,11 @@ def recive_file( elif local_file.is_file(): # self._logger.debug("local path exists and points to file") if not override_file: - self._logger.warning( - "local file already exists and override was not set. nothing to do" - ) + self._logger.warning("local file already exists and override was not set. nothing to do") return False local_file.unlink() - self._logger.debug("loading file from %s to %s", - remote_path, local_file) + self._logger.debug("loading file from %s to %s", remote_path, local_file) payload = lm.ustr_to_ba(remote_path) @@ -1222,8 +1131,7 @@ def recive_file( out_file.write(content) else: out_file.write(content.replace(b"\x00", b"\r\n")) - self._logger.debug( - "received first block of file file %s", remote_path) + self._logger.debug("received first block of file file %s", remote_path) while True: content = self._llcom.telegram( @@ -1234,9 +1142,7 @@ def recive_file( out_file.write(content) else: out_file.write(content.replace(b"\x00", b"\r\n")) - self._logger.debug( - "received %d more bytes for file", len(content) - ) + self._logger.debug("received %d more bytes for file", len(content)) elif self._llcom.last_response in lc.RSP.T_FD: self._logger.info("finished loading file") break @@ -1245,10 +1151,7 @@ def recive_file( "something went wrong while receiving file data %s", remote_path, ) - if ( - self._llcom.last_response is lc.RSP.T_ER - or self._llcom.last_response is lc.RSP.T_BD - ): + if self._llcom.last_response is lc.RSP.T_ER or self._llcom.last_response is lc.RSP.T_BD: self._logger.warning( "an error occurred while loading the first block of data %s '%s'", self.last_error, @@ -1256,10 +1159,7 @@ def recive_file( ) return False else: - if ( - self._llcom.last_response is lc.RSP.T_ER - or self._llcom.last_response is lc.RSP.T_BD - ): + if self._llcom.last_response is lc.RSP.T_ER or self._llcom.last_response is lc.RSP.T_BD: self._logger.warning( "an error occurred while loading the first block of data for file %s, %s '%s'", remote_path, @@ -1267,9 +1167,7 @@ def recive_file( lt.get_error_text(self.last_error), ) else: - self._logger.warning( - "could not load file with error %s", self._llcom.last_response - ) + self._logger.warning("could not load file with error %s", self._llcom.last_response) return False self._logger.info( @@ -1281,9 +1179,7 @@ def recive_file( return True - def read_plc_memory( - self, first_element: int, mem_type: lc.MemoryType, number_of_elements: int = 1 - ) -> list: + def read_plc_memory(self, first_element: int, mem_type: lc.MemoryType, number_of_elements: int = 1) -> list: """ Read data from plc memory. Requires access level ``PLCDEBUG`` to work. @@ -1363,17 +1259,14 @@ def read_plc_memory( if (first_element + number_of_elements) > max_elements: raise LSV2InputException( - "highest address is %d but address of last requested element is %d" - % (max_elements, (first_element + number_of_elements)) + "highest address is %d but address of last requested element is %d" % (max_elements, (first_element + number_of_elements)) ) plc_values = [] if mem_type is lc.MemoryType.STRING: for i in range(number_of_elements): - address = ( - start_address + first_element * mem_byte_count + i * mem_byte_count - ) + address = start_address + first_element * mem_byte_count + i * mem_byte_count payload = bytearray() payload.extend(struct.pack("!L", address)) @@ -1388,9 +1281,7 @@ def read_plc_memory( unpack_string = "{}s".format(len(result)) - plc_values.append( - lm.ba_to_ustr(struct.unpack(unpack_string, result)[0]) - ) + plc_values.append(lm.ba_to_ustr(struct.unpack(unpack_string, result)[0])) else: logging.error( "failed to read string %d from address %d", @@ -1400,10 +1291,8 @@ def read_plc_memory( return [] else: - max_elements_per_transfer = math.floor( - 255 / mem_byte_count) - 1 # subtract 1 for safety - num_groups = math.ceil( - number_of_elements / max_elements_per_transfer) + max_elements_per_transfer = math.floor(255 / mem_byte_count) - 1 # subtract 1 for safety + num_groups = math.ceil(number_of_elements / max_elements_per_transfer) logging.debug( "memory type allows %d elements per telegram, split request into %d group(s)", max_elements_per_transfer, @@ -1423,14 +1312,11 @@ def read_plc_memory( address = start_address + first_element_in_group * mem_byte_count - logging.debug( - "current transfer group %d has %d elements", i, elements_in_group - ) + logging.debug("current transfer group %d has %d elements", i, elements_in_group) payload = bytearray() payload.extend(struct.pack("!L", address)) - payload.extend(struct.pack( - "!B", elements_in_group * mem_byte_count)) + payload.extend(struct.pack("!B", elements_in_group * mem_byte_count)) result = self._send_recive(lc.CMD.R_MB, payload, lc.RSP.S_MB) if isinstance(result, (bytearray,)): logging.debug( @@ -1439,11 +1325,7 @@ def read_plc_memory( first_element_in_group, ) for j in range(0, len(result), mem_byte_count): - plc_values.append( - struct.unpack( - unpack_string, result[j: j + mem_byte_count] - )[0] - ) + plc_values.append(struct.unpack(unpack_string, result[j : j + mem_byte_count])[0]) else: logging.error( "failed to read value from address %d", @@ -1457,8 +1339,7 @@ def read_plc_memory( logging.debug("read a total of %d value(s)", len(plc_values)) if len(plc_values) != number_of_elements: raise LSV2DataException( - "number of received values %d is not equal to number of requested %d" - % (len(plc_values), number_of_elements) + "number of received values %d is not equal to number of requested %d" % (len(plc_values), number_of_elements) ) return plc_values @@ -1471,8 +1352,7 @@ def set_keyboard_access(self, unlocked: bool) -> bool: :param unlocked: if ``True`` unlocks the keyboard so it can be used. If ``False``, input is set to locked """ if self.versions.is_tnc7(): - self._logger.warning( - "this function might not be supported on TNC7") + self._logger.warning("this function might not be supported on TNC7") if not self.login(lc.Login.MONITOR): self._logger.warning("clould not log in as user MONITOR") @@ -1491,9 +1371,7 @@ def set_keyboard_access(self, unlocked: bool) -> bool: else: self._logger.debug("command to lock keyboard was successful") return True - self._logger.warning( - "an error occurred changing the state of the keyboard lock" - ) + self._logger.warning("an error occurred changing the state of the keyboard lock") return False def get_machine_parameter(self, name: str) -> str: @@ -1515,18 +1393,13 @@ def get_machine_parameter(self, name: str) -> str: result = self._send_recive(lc.CMD.R_MC, payload, lc.RSP.S_MC) if isinstance(result, (bytearray,)) and len(result) > 0: value = lm.ba_to_ustr(result) - self._logger.debug( - "machine parameter %s has value %s", name, value) + self._logger.debug("machine parameter %s has value %s", name, value) return value - self._logger.warning( - "an error occurred while reading machine parameter %s", name - ) + self._logger.warning("an error occurred while reading machine parameter %s", name) return "" - def set_machine_parameter( - self, name: str, value: str, safe_to_disk: bool = False - ) -> bool: + def set_machine_parameter(self, name: str, value: str, safe_to_disk: bool = False) -> bool: """ Set machine parameter on control. Writing a parameter takes some time, make sure to set timeout sufficiently high! @@ -1584,8 +1457,7 @@ def send_key_code(self, key_code: Union[lc.KeyCode, lc.OldKeyCode]) -> bool: :param key_code: code number of the keyboard key """ if self.versions.is_tnc7(): - self._logger.warning( - "this function might not be supported on TNC7") + self._logger.warning("this function might not be supported on TNC7") if not self.login(lc.Login.MONITOR): self._logger.warning("clould not log in as user MONITOR") @@ -1596,13 +1468,10 @@ def send_key_code(self, key_code: Union[lc.KeyCode, lc.OldKeyCode]) -> bool: result = self._send_recive(lc.CMD.C_EK, payload, lc.RSP.T_OK) if result: - self._logger.debug( - "sending the key code %d was successful", key_code) + self._logger.debug("sending the key code %d was successful", key_code) return True - self._logger.warning( - "an error occurred while sending the key code %d", key_code - ) + self._logger.warning("an error occurred while sending the key code %d", key_code) return False def spindle_tool_status(self) -> Union[ld.ToolInformation, None]: @@ -1620,12 +1489,9 @@ def spindle_tool_status(self) -> Union[ld.ToolInformation, None]: result = self._send_recive(lc.CMD.R_RI, payload, lc.RSP.S_RI) if isinstance(result, (bytearray,)) and len(result) > 0: tool_info = lm.decode_tool_info(result) - self._logger.debug( - "successfully read info on current tool: %s", tool_info) + self._logger.debug("successfully read info on current tool: %s", tool_info) return tool_info - self._logger.warning( - "an error occurred while querying current tool information. This does not work for all control types" - ) + self._logger.warning("an error occurred while querying current tool information. This does not work for all control types") return None def override_state(self) -> Union[ld.OverrideState, None]: @@ -1643,12 +1509,9 @@ def override_state(self) -> Union[ld.OverrideState, None]: result = self._send_recive(lc.CMD.R_RI, payload, lc.RSP.S_RI) if isinstance(result, (bytearray,)) and len(result) > 0: override_info = lm.decode_override_state(result) - self._logger.debug( - "successfully read override info: %s", override_info) + self._logger.debug("successfully read override info: %s", override_info) return override_info - self._logger.warning( - "an error occurred while querying current override information. This does not work for all control types" - ) + self._logger.warning("an error occurred while querying current override information. This does not work for all control types") return None def get_error_messages(self) -> List[ld.NCErrorMessage]: @@ -1671,8 +1534,7 @@ def get_error_messages(self) -> List[ld.NCErrorMessage]: payload = bytearray() payload.extend(struct.pack("!H", lc.ParRRI.NEXT_ERROR)) result = self._send_recive(lc.CMD.R_RI, payload, lc.RSP.S_RI) - self._logger.debug( - "successfully read first error but further errors") + self._logger.debug("successfully read first error but further errors") while isinstance(result, (bytearray,)): messages.append(lm.decode_error_message(result)) @@ -1681,20 +1543,15 @@ def get_error_messages(self) -> List[ld.NCErrorMessage]: if self.last_error.e_code is lc.LSV2StatusCode.T_ER_NO_NEXT_ERROR: self._logger.debug("successfully read all errors") else: - self._logger.warning( - "an error occurred while querying error information." - ) + self._logger.warning("an error occurred while querying error information.") return messages if self.last_error.e_code is lc.LSV2StatusCode.T_ER_NO_NEXT_ERROR: - self._logger.debug( - "successfully read first error but no error active") + self._logger.debug("successfully read first error but no error active") return messages - self._logger.warning( - "an error occurred while querying error information. This does not work for all control types" - ) + self._logger.warning("an error occurred while querying error information. This does not work for all control types") return [] @@ -1714,9 +1571,7 @@ def _walk_dir(self, descend: bool = True) -> List[str]: for entry in self.directory_content(): if entry.name == "." or entry.name == ".." or entry.name.endswith(":"): continue - current_fs_element = str(current_path + entry.name).replace( - "/", lc.PATH_SEP - ) + current_fs_element = str(current_path + entry.name).replace("/", lc.PATH_SEP) if entry.is_directory is True and descend is True: if self.change_directory(current_fs_element): content.extend(self._walk_dir()) @@ -1725,9 +1580,7 @@ def _walk_dir(self, descend: bool = True) -> List[str]: self.change_directory(current_path) return content - def get_file_list( - self, path: str = "", descend: bool = True, pattern: str = "" - ) -> List[str]: + def get_file_list(self, path: str = "", descend: bool = True, pattern: str = "") -> List[str]: """ Get list of files in directory structure. Requires access level ``FILETRANSFER`` to work. @@ -1768,9 +1621,7 @@ def read_data_path(self, path: str) -> Union[bool, int, float, str, None]: :raises LSV2ProtocolException: if data type could not be determiend """ if not self.versions.is_itnc(): - self._logger.warning( - "Reading values from data path does not work on non iTNC controls!" - ) + self._logger.warning("Reading values from data path does not work on non iTNC controls!") return None path = path.replace("/", lc.PATH_SEP).replace('"', "'") @@ -1805,22 +1656,14 @@ def read_data_path(self, path: str) -> Union[bool, int, float, str, None]: elif value_type == 17: data_value = struct.unpack("!B", result[4:5])[0] else: - raise LSV2ProtocolException( - "unknown return type: %d for %s" % (value_type, result[4:]) - ) + raise LSV2ProtocolException("unknown return type: %d for %s" % (value_type, result[4:])) - self._logger.info( - "successfully read data path: %s and got value '%s'", path, data_value - ) + self._logger.info("successfully read data path: %s and got value '%s'", path, data_value) return data_value elif self.last_error.e_code == lc.LSV2StatusCode.T_ER_WRONG_PARA: - self._logger.warning( - "the argument '%s' is not supported by this control", path) + self._logger.warning("the argument '%s' is not supported by this control", path) return None - self._logger.warning( - "an error occurred while querying data path '%s'. Error code was %d", - path, self.last_error.e_code - ) + self._logger.warning("an error occurred while querying data path '%s'. Error code was %d", path, self.last_error.e_code) return None def axes_location(self) -> Union[Dict[str, float], None]: @@ -1859,13 +1702,7 @@ def grab_screen_dump(self, image_path: pathlib.Path) -> bool: self._logger.warning("clould not log in as user FILE") return False - temp_file_path = ( - lc.DriveName.TNC - + lc.PATH_SEP - + "screendump_" - + datetime.now().strftime("%Y%m%d_%H%M%S") - + ".bmp" - ) + temp_file_path = lc.DriveName.TNC + lc.PATH_SEP + "screendump_" + datetime.now().strftime("%Y%m%d_%H%M%S") + ".bmp" payload = bytearray(struct.pack("!H", lc.ParCCC.SCREENDUMP)) payload.extend(lm.ustr_to_ba(temp_file_path)) @@ -1876,9 +1713,7 @@ def grab_screen_dump(self, image_path: pathlib.Path) -> bool: self._logger.warning("screen dump was not created") return False - if not self.recive_file( - remote_path=temp_file_path, local_path=image_path, binary_mode=True - ): + if not self.recive_file(remote_path=temp_file_path, local_path=image_path, binary_mode=True): self._logger.warning("could not download screen dump from control") return False @@ -1894,8 +1729,7 @@ def get_remote_datetime(self) -> datetime: Read current time and date from control """ if not self.login(lc.Login.DIAG): - self._logger.warning( - "clould not log in as user for DIAGNOSTICS function") + self._logger.warning("clould not log in as user for DIAGNOSTICS function") return datetime.fromtimestamp(0) result = self._send_recive(lc.CMD.R_DT, None, lc.RSP.S_DT) @@ -1903,9 +1737,7 @@ def get_remote_datetime(self) -> datetime: ts = lm.decode_timestamp(result) self._logger.debug("Time on Control is %s", ts.isoformat()) else: - raise LSV2ProtocolException( - "something went wrong while reading current time and date" - ) + raise LSV2ProtocolException("something went wrong while reading current time and date") return ts def read_scope_signals(self) -> List[ld.ScopeSignal]: @@ -1919,8 +1751,7 @@ def read_scope_signals(self) -> List[ld.ScopeSignal]: return list() if not self.login(lc.Login.SCOPE): - self._logger.warning( - "clould not log in as user for scope function") + self._logger.warning("clould not log in as user for scope function") return list() channel_list = list() @@ -1935,23 +1766,15 @@ def read_scope_signals(self) -> List[ld.ScopeSignal]: if self._llcom.last_response in lc.RSP.S_OC: channel_list.extend(lms.decode_signal_description(content)) elif self._llcom.last_response in lc.RSP.T_FD: - self._logger.info( - "finished loading and parsing data for all scope signals" - ) + self._logger.info("finished loading and parsing data for all scope signals") break else: - self._logger.error( - "something went wrong while reading scope signal" - ) - raise LSV2ProtocolException( - "did not received expected response while reading data for scope signals" - ) + self._logger.error("something went wrong while reading scope signal") + raise LSV2ProtocolException("did not received expected response while reading data for scope signals") return channel_list - def real_time_readings( - self, signal_list: List[ld.ScopeSignal], duration: int, interval: int - ): + def real_time_readings(self, signal_list: List[ld.ScopeSignal], duration: int, interval: int): """ Read signal readings from control in real time. Only works for iTNC 530. Before reading data, the signal description is updated with information regardinf offset and factor. @@ -1968,8 +1791,7 @@ def real_time_readings( return list() if not self.login(lc.Login.SCOPE): - self._logger.warning( - "clould not log in as user for scope function") + self._logger.warning("clould not log in as user for scope function") return list() self._logger.debug( @@ -1983,12 +1805,8 @@ def real_time_readings( payload.extend(struct.pack("!L", interval)) for signal in signal_list: if interval not in [600, 3000, 21000]: - self._logger.warning( - "the selected interval doesn't fit for signals readings!" - ) - raise LSV2ProtocolException( - "the selected interval must be: 600 or 3000 or 21000 us" - ) + self._logger.warning("the selected interval doesn't fit for signals readings!") + raise LSV2ProtocolException("the selected interval must be: 600 or 3000 or 21000 us") payload.extend(signal.to_ba()) result = self._send_recive(lc.CMD.R_OP, payload, lc.RSP.S_OP) @@ -1996,16 +1814,13 @@ def real_time_readings( signal_list = lms.decode_signal_details(signal_list, result) else: if self.last_error.e_code == 85: - self._logger.warning( - "too many signals selected: %d", len(signal_list)) + self._logger.warning("too many signals selected: %d", len(signal_list)) raise LSV2ProtocolException("too many signals selected???") if self.last_error.e_code == lc.LSV2StatusCode.T_ER_OSZI_CHSEL: self._logger.warning("Error setting up the channels") raise LSV2ProtocolException("Error setting up the channels") - self._logger.warning( - "Error while configuring interval and signals") - raise LSV2ProtocolException( - "Error while configuring interval and signals") + self._logger.warning("Error while configuring interval and signals") + raise LSV2ProtocolException("Error while configuring interval and signals") # setup trigger and read data from control payload = bytearray() @@ -2020,11 +1835,8 @@ def real_time_readings( content = self._send_recive(lc.CMD.R_OD, payload, lc.RSP.S_OD) if not isinstance(content, (bytearray,)) or len(content) <= 0: - self._logger.error( - "something went wrong while reading first data package for signals" - ) - raise LSV2ProtocolException( - "something went wrong while reading scope data") + self._logger.error("something went wrong while reading first data package for signals") + raise LSV2ProtocolException("something went wrong while reading scope data") recorded_data.append(lms.decode_scope_reading(signal_list, content)) end = time.time() @@ -2032,13 +1844,10 @@ def real_time_readings( while timer < duration: content = self._llcom.telegram(lc.RSP.T_OK) if self._llcom.last_response in lc.RSP.S_OD: - recorded_data.append( - lms.decode_scope_reading(signal_list, content)) + recorded_data.append(lms.decode_scope_reading(signal_list, content)) yield recorded_data[0] else: - self._logger.warning( - "something went wrong during periodically reading scope data, abort reading" - ) + self._logger.warning("something went wrong during periodically reading scope data, abort reading") break end = time.time() timer = end - start diff --git a/pyLSV2/dat_cls.py b/pyLSV2/dat_cls.py index b2e6928..677c8f8 100644 --- a/pyLSV2/dat_cls.py +++ b/pyLSV2/dat_cls.py @@ -1150,9 +1150,7 @@ def normalized_name(self) -> str: class ScopeSignalData: - def __init__( - self, channel: int, signal: int, offset: int, factor: float, unit: str - ): + def __init__(self, channel: int, signal: int, offset: int, factor: float, unit: str): self._channel = channel self._signal = signal self._offset = offset diff --git a/pyLSV2/low_level_com.py b/pyLSV2/low_level_com.py index 4a43aee..c1097ee 100644 --- a/pyLSV2/low_level_com.py +++ b/pyLSV2/low_level_com.py @@ -36,9 +36,7 @@ def __init__(self, hostname: str, port: int = 19000, timeout: float = 15.0): try: self._host_ip = socket.gethostbyname(hostname) except socket.gaierror: - logging.error( - "there was an error getting the IP for the hostname %s", hostname - ) + logging.error("there was an error getting the IP for the hostname %s", hostname) raise self._port = self.DEFAULT_PORT @@ -182,10 +180,7 @@ def telegram( telegram, ) if len(telegram) >= self.buffer_size: - raise OverflowError( - "telegram to long for set current buffer size: %d >= %d" - % (len(telegram), self.buffer_size) - ) + raise OverflowError("telegram to long for set current buffer size: %d >= %d" % (len(telegram), self.buffer_size)) data_recived = bytearray() try: @@ -201,22 +196,16 @@ def telegram( raise if len(data_recived) > 0: - self._logger.debug( - "received block of data with length %d", len(data_recived) - ) + self._logger.debug("received block of data with length %d", len(data_recived)) if len(data_recived) >= 8: # read 4 bytes for response length response_length = struct.unpack("!L", data_recived[0:4])[0] # read 4 bytes for response type - self._last_lsv2_response = RSP( - data_recived[4:8].decode("utf-8", "ignore") - ) + self._last_lsv2_response = RSP(data_recived[4:8].decode("utf-8", "ignore")) else: # response is less than 8 bytes long which is not enough space for package length and response message! - raise LSV2ProtocolException( - "response to short, less than 8 bytes: %s" % data_recived - ) + raise LSV2ProtocolException("response to short, less than 8 bytes: %s" % data_recived) else: response_length = 0 self._last_lsv2_response = RSP.NONE @@ -229,9 +218,7 @@ def telegram( len(response_content) < response_length, ) try: - response_content.extend( - self._tcpsock.recv(response_length - len(data_recived[8:])) - ) + response_content.extend(self._tcpsock.recv(response_length - len(data_recived[8:]))) except Exception: self._logger.error( "something went wrong while waiting for more data to arrive. expected %d, received %d, content so far: %s", diff --git a/pyLSV2/misc.py b/pyLSV2/misc.py index 6370bb6..f087c5a 100644 --- a/pyLSV2/misc.py +++ b/pyLSV2/misc.py @@ -26,9 +26,7 @@ def decode_system_parameters(result_set: bytearray) -> ld.SystemParameters: elif message_length == 124: info_list = struct.unpack("!14L8B8L2BH4B2L2HLL", result_set) else: - raise LSV2DataException( - "unexpected length %s of message content %s" % (message_length, result_set) - ) + raise LSV2DataException("unexpected length %s of message content %s" % (message_length, result_set)) sys_par = ld.SystemParameters() sys_par.markers_start_address = info_list[0] sys_par.number_of_markers = info_list[1] @@ -105,9 +103,7 @@ def decode_system_information(data_set: bytearray) -> Union[bool, int]: return data_set -def decode_file_system_info( - data_set: bytearray, control_type: ControlType = ControlType.UNKNOWN -) -> ld.FileEntry: +def decode_file_system_info(data_set: bytearray, control_type: ControlType = ControlType.UNKNOWN) -> ld.FileEntry: """ Decode result from file system entry @@ -186,12 +182,8 @@ def decode_drive_info(data_set: bytearray) -> List[ld.DriveEntry]: while (offset + fixed_length + 1) < len(data_set): drive_entry = ld.DriveEntry() drive_entry.unknown_0 = struct.unpack("!L", data_set[offset : offset + 4])[0] - drive_entry.unknown_1 = struct.unpack("!4s", data_set[offset + 4 : offset + 8])[ - 0 - ] - drive_entry.unknown_2 = struct.unpack("!L", data_set[offset + 8 : offset + 12])[ - 0 - ] + drive_entry.unknown_1 = struct.unpack("!4s", data_set[offset + 4 : offset + 8])[0] + drive_entry.unknown_2 = struct.unpack("!L", data_set[offset + 8 : offset + 12])[0] if chr(data_set[offset + fixed_length]) == ":": drive_entry.name = ba_to_ustr(data_set[offset + 12 : offset + 17]) @@ -235,9 +227,7 @@ def decode_tool_info(data_set: bytearray) -> ld.ToolInformation: tool_info = ld.ToolInformation() tool_info.number = struct.unpack("!L", data_set[0:4])[0] tool_info.index = struct.unpack("!H", data_set[4:6])[0] - tool_info.axis = {0: "X", 1: "Y", 2: "Z"}.get( - struct.unpack("!H", data_set[6:8])[0], "unknown" - ) + tool_info.axis = {0: "X", 1: "Y", 2: "Z"}.get(struct.unpack("!H", data_set[6:8])[0], "unknown") if len(data_set) > 8: tool_info.length = struct.unpack(" List[ld.ScopeSignal]: name_end += 1 channel_name = lm.ba_to_ustr(data_set[name_start:name_end]) if data_set[10:46] != bytearray(b"\x00" * 36): - raise LSV2DataException( - "unexpected data in channel description in bytes 10 to 45: %s" - % data_set[10:46] - ) + raise LSV2DataException("unexpected data in channel description in bytes 10 to 45: %s" % data_set[10:46]) interval_value_1 = struct.unpack("!H", data_set[2:4])[0] interval_value_2 = struct.unpack("!H", data_set[8:10])[0] if interval_value_1 != interval_value_2: - raise LSV2DataException( - "error in decoding of channel description data: %s" % data_set - ) + raise LSV2DataException("error in decoding of channel description data: %s" % data_set) min_interval = interval_value_1 type_num = struct.unpack("!H", data_set[4:6])[0] @@ -62,9 +57,7 @@ def decode_signal_description(data_set: bytearray) -> List[ld.ScopeSignal]: channel_type = lc.ChannelType(type_num) if not data_set[6:8] == bytearray(b"\x00\x00"): - raise LSV2DataException( - "unexpected values in bytes 6 and 7: %s" % data_set[6:8] - ) + raise LSV2DataException("unexpected values in bytes 6 and 7: %s" % data_set[6:8]) if channel_type in [lc.ChannelType.TYPE1, lc.ChannelType.TYPE4]: if len(data_set) not in [98, 106]: raise LSV2DataException("unexpected length of data for channel type 1 or 4") @@ -110,9 +103,7 @@ def decode_signal_description(data_set: bytearray) -> List[ld.ScopeSignal]: return signals -def decode_signal_details( - signal_list: List[ld.ScopeSignal], data_set: bytearray -) -> List[ld.ScopeSignal]: +def decode_signal_details(signal_list: List[ld.ScopeSignal], data_set: bytearray) -> List[ld.ScopeSignal]: """ Decode the detailed description of a signal after selecting them @@ -130,10 +121,7 @@ def split_dataset(data): logger.debug("R_OP dataset has expected length") for i, data_sub_set in enumerate(split_dataset(data_set)): if data_sub_set[17:] != bytearray(b"?\x00\x00\x00\x00"): - raise Exception( - "unexpected data in signal details at position 17 %s" - % data_sub_set[17:] - ) + raise Exception("unexpected data in signal details at position 17 %s" % data_sub_set[17:]) signal_list[i].unit = lm.ba_to_ustr(data_sub_set[0:10]) signal_list[i].factor = struct.unpack(" list: """list of columns used in this table""" return self._columns - def append_column( - self, name: str, start: int, end: int, width: int = 0, empty_value=None - ): + def append_column(self, name: str, start: int, end: int, width: int = 0, empty_value=None): """add column to the table format""" self._columns.append(name) if width == 0: @@ -167,9 +165,7 @@ def update_column_format(self, name: str, parameters: dict): def _get_column_names(self): """get list of columns used in this table""" - raise DeprecationWarning( - "Do not use this function anymore! Use ```column_names```" - ) + raise DeprecationWarning("Do not use this function anymore! Use ```column_names```") def append_row(self, row): """add a data entry to the table""" @@ -204,14 +200,11 @@ def dump_native(self, file_path: pathlib.Path, renumber_column=None): version_string = " Version:%s" % str(self._version) with open(file_path, "w", encoding="ascii") as tfp: - tfp.write("BEGIN %s%s%s\n" % - (file_name, units_string, version_string)) + tfp.write("BEGIN %s%s%s\n" % (file_name, units_string, version_string)) for column_name in self._columns: if column_name not in self._column_format: - raise Exception( - "configuration is incomplete, missing definition for column {column_name:s}" - ) + raise Exception("configuration is incomplete, missing definition for column {column_name:s}") fixed_width = self._column_format[column_name]["width"] format_string = "{0:<%d}" % fixed_width tfp.write(format_string.format(column_name)) @@ -233,16 +226,9 @@ def dump_native(self, file_path: pathlib.Path, renumber_column=None): "entry is missing optional column %s defined in output format, replace with empty value", column_name, ) - tfp.write( - format_string.format( - self._column_format[column_name]["empty_value"] - ) - ) + tfp.write(format_string.format(self._column_format[column_name]["empty_value"])) else: - raise Exception( - "entry is missing a value for column %s defined in the output format" - % column_name - ) + raise Exception("entry is missing a value for column %s defined in the output format" % column_name) tfp.write("\n") row_counter += 1 @@ -254,8 +240,7 @@ def dump_csv(self, file_path: pathlib.Path, decimal_char: str = "."): :param file_path: file location for csv file """ - self._logger.debug( - "write table to csv, using decimal char '%s'", decimal_char) + self._logger.debug("write table to csv, using decimal char '%s'", decimal_char) def localize_floats(row): float_pattern = re.compile(r"^[+-]?\d+\.\d+$") @@ -277,9 +262,7 @@ def localize_floats(row): csv_writer.writerow(localize_floats(row)) self._logger.info("csv file saved successfully") - def find_string( - self, column_name: str, search_value: Union[str, re.Pattern] - ) -> list: + def find_string(self, column_name: str, search_value: Union[str, re.Pattern]) -> list: """ search for string rows by string or pattern returns list of lines that contain the search result @@ -289,20 +272,12 @@ def find_string( """ search_results = [] if not column_name in self._columns: - self._logger.error( - "column with name %s not part of this table", column_name - ) + self._logger.error("column with name %s not part of this table", column_name) else: if isinstance(search_value, (str,)): - search_results = [ - itm for itm in self._content if search_value in itm[column_name] - ] + search_results = [itm for itm in self._content if search_value in itm[column_name]] elif isinstance(search_value, (re.Pattern,)): - search_results = [ - itm - for itm in self._content - if search_value.match(itm[column_name]) is not None - ] + search_results = [itm for itm in self._content if search_value.match(itm[column_name]) is not None] return search_results @staticmethod @@ -333,10 +308,7 @@ def parse_table(table_path: pathlib.Path) -> "NCTable": ) if header is None: - raise Exception( - "File has wrong format: incorrect header for file %s" - % table_path - ) + raise Exception("File has wrong format: incorrect header for file %s" % table_path) nctable.name = header.group("name").strip() nctable.suffix = header.group("suffix") @@ -414,11 +386,7 @@ def parse_table(table_path: pathlib.Path) -> "NCTable": table_entry = {} for column in nctable.column_names: - table_entry[column] = line[ - nctable.get_column_start(column): nctable.get_column_end( - column - ) - ].strip() + table_entry[column] = line[nctable.get_column_start(column) : nctable.get_column_end(column)].strip() nctable.append_row(table_entry) logger.debug("Found %d entries", len(nctable.rows)) @@ -428,12 +396,8 @@ def parse_table(table_path: pathlib.Path) -> "NCTable": for c_d in table_config["TableDescription"]["columns"]: cfg_column_name = c_d["CfgColumnDescription"]["key"] if cfg_column_name not in nctable.column_names: - raise Exception( - "found unexpected column %s" % cfg_column_name - ) - if c_d["CfgColumnDescription"][ - "width" - ] != nctable.get_column_width(cfg_column_name): + raise Exception("found unexpected column %s" % cfg_column_name) + if c_d["CfgColumnDescription"]["width"] != nctable.get_column_width(cfg_column_name): raise Exception( "found difference in column width for colmun %s: %d : %d" % ( @@ -442,9 +406,7 @@ def parse_table(table_path: pathlib.Path) -> "NCTable": nctable.get_column_width(cfg_column_name), ) ) - nctable.update_column_format( - cfg_column_name, c_d["CfgColumnDescription"] - ) + nctable.update_column_format(cfg_column_name, c_d["CfgColumnDescription"]) except UnicodeDecodeError: logger.error("File has invalid utf-8 encoding") @@ -508,8 +470,7 @@ def str_to_typed_value(value_string: str): if isinstance(last_object, (list,)): if ":=" in line: parts = line.split(":=") - last_object.append( - {parts[0]: str_to_typed_value(parts[1])}) + last_object.append({parts[0]: str_to_typed_value(parts[1])}) else: last_object.append(line) @@ -545,9 +506,7 @@ def from_json_format(file_path: pathlib.Path) -> "NCTable": end=json_data["column_config"][column]["end"], ) if "empty_value" in json_data["column_config"][column]: - nct.set_column_empty_value( - column, json_data["column_config"][column]["empty_value"] - ) + nct.set_column_empty_value(column, json_data["column_config"][column]["empty_value"]) return nct @staticmethod diff --git a/pyLSV2/translate_messages.py b/pyLSV2/translate_messages.py index f0dbf10..297826b 100644 --- a/pyLSV2/translate_messages.py +++ b/pyLSV2/translate_messages.py @@ -10,9 +10,7 @@ from .const import ExecState, LSV2StatusCode, PgmState -def get_error_text( - t_error: LSV2Error, language: str = "", locale_path: Union[str, None] = None -) -> str: +def get_error_text(t_error: LSV2Error, language: str = "", locale_path: Union[str, None] = None) -> str: """Parse error type and error code and return the error message. :param int error_type: type of error code. @@ -27,9 +25,7 @@ def get_error_text( locale_path = os.path.join(os.path.dirname(__file__), "locales") if len(language) < 2: - translate = gettext.translation( - domain="error_text", localedir=locale_path, fallback=True - ) + translate = gettext.translation(domain="error_text", localedir=locale_path, fallback=True) else: translate = gettext.translation( domain="error_text", @@ -93,9 +89,7 @@ def get_error_text( LSV2StatusCode.T_ER_OUT_OF_RANGE: _("LSV2_ERROR_T_ER_OUT_OF_RANGE"), LSV2StatusCode.T_ER_INVALID_AXIS: _("LSV2_ERROR_T_ER_INVALID_AXIS"), LSV2StatusCode.T_ER_STREAMING_ACTIVE: _("LSV2_ERROR_T_ER_STREAMING_ACTIVE"), - LSV2StatusCode.T_ER_NO_STREAMING_ACTIVE: _( - "LSV2_ERROR_T_ER_NO_STREAMING_ACTIVE" - ), + LSV2StatusCode.T_ER_NO_STREAMING_ACTIVE: _("LSV2_ERROR_T_ER_NO_STREAMING_ACTIVE"), LSV2StatusCode.T_ER_TO_MANY_OPEN_TCP: _("LSV2_ERROR_T_ER_TO_MANY_OPEN_TCP"), LSV2StatusCode.T_ER_NO_FREE_HANDLE: _("LSV2_ERROR_T_ER_NO_FREE_HANDLE"), LSV2StatusCode.T_ER_PLCMEMREMA_CLEAR: _("LSV2_ERROR_T_ER_PLCMEMREMA_CLEAR"), @@ -126,9 +120,7 @@ def get_error_text( }.get(t_error.e_code, _("LSV2_ERROR_UNKNOWN_CODE")) -def get_program_status_text( - code: PgmState, language: str = "", locale_path: Union[str, None] = None -) -> str: +def get_program_status_text(code: PgmState, language: str = "", locale_path: Union[str, None] = None) -> str: """Translate status code of program state to text :param int code: status code of program state @@ -141,9 +133,7 @@ def get_program_status_text( locale_path = os.path.join(os.path.dirname(__file__), "locales") if len(language) < 2: - translate = gettext.translation( - domain="message_text", localedir=locale_path, fallback=True - ) + translate = gettext.translation(domain="message_text", localedir=locale_path, fallback=True) else: translate = gettext.translation( domain="message_text", @@ -168,9 +158,7 @@ def get_program_status_text( }.get(code, translate.gettext("PGM_STATE_UNKNOWN")) -def get_execution_status_text( - code: ExecState, language: str = "", locale_path: Union[str, None] = None -): +def get_execution_status_text(code: ExecState, language: str = "", locale_path: Union[str, None] = None): """Translate status code of execution state to text See https://github.com/drunsinn/pyLSV2/issues/1 @@ -184,9 +172,7 @@ def get_execution_status_text( locale_path = os.path.join(os.path.dirname(__file__), "locales") if len(language) < 2: - translate = gettext.translation( - domain="message_text", localedir=locale_path, fallback=True - ) + translate = gettext.translation(domain="message_text", localedir=locale_path, fallback=True) else: translate = gettext.translation( domain="message_text", diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/lsv2_demo.py b/scripts/lsv2_demo.py index e86f206..97772c0 100644 --- a/scripts/lsv2_demo.py +++ b/scripts/lsv2_demo.py @@ -46,11 +46,7 @@ con.connect() print("Basics:") - print( - "# Connected to a '{:s}' running software version '{:s}'".format( - con.versions.control, con.versions.nc_sw - ) - ) + print("# Connected to a '{:s}' running software version '{:s}'".format(con.versions.control, con.versions.nc_sw)) print( "# Using LSV2 version '{:d}' with version flags '0x{:02x}' and '0x{:02x}'".format( con.parameters.lsv2_version, @@ -84,19 +80,11 @@ pgm_stack = con.program_stack() if pgm_stack is not None: print("# selected program: '{:s}'".format(pgm_stack.main)) - print( - "## currently execution '{:s}' on line {:d}".format( - pgm_stack.current, pgm_stack.line_no - ) - ) + print("## currently execution '{:s}' on line {:d}".format(pgm_stack.current, pgm_stack.line_no)) ovr_stat = con.override_state() if ovr_stat is not None: - print( - "# override states: feed {:f}%, rapid {:f}%, spindle {:f}%".format( - ovr_stat.feed, ovr_stat.rapid, ovr_stat.spindle - ) - ) + print("# override states: feed {:f}%, rapid {:f}%, spindle {:f}%".format(ovr_stat.feed, ovr_stat.rapid, ovr_stat.spindle)) print("PLC memory:") print("# the first 5 entries for some memory types:") @@ -150,17 +138,9 @@ print("File access") drv_info = con.drive_info() - print( - "# names of disk drives: {:s}".format( - ", ".join([drv.name for drv in drv_info]) - ) - ) + print("# names of disk drives: {:s}".format(", ".join([drv.name for drv in drv_info]))) dir_info = con.directory_info() - print( - "# current directory is '{:s}' with {:d} bytes of free drive space".format( - dir_info.path, dir_info.free_size - ) - ) + print("# current directory is '{:s}' with {:d} bytes of free drive space".format(dir_info.path, dir_info.free_size)) dir_content = con.directory_content() only_files = filter( @@ -169,20 +149,10 @@ ) for file_entry in only_files: - print( - "## file name: {:s}, date {:}, size {:d} bytes".format( - file_entry.name, file_entry.timestamp, file_entry.size - ) - ) - only_dir = filter( - lambda f_e: f_e.is_directory is True and f_e.is_drive is False, dir_content - ) + print("## file name: {:s}, date {:}, size {:d} bytes".format(file_entry.name, file_entry.timestamp, file_entry.size)) + only_dir = filter(lambda f_e: f_e.is_directory is True and f_e.is_drive is False, dir_content) for file_entry in only_dir: - print( - "## directory name: {:s}, date {:}".format( - file_entry.name, file_entry.timestamp - ) - ) + print("## directory name: {:s}, date {:}".format(file_entry.name, file_entry.timestamp)) print("# file search") h_files = con.get_file_list(path="TNC:", pattern=r"[\$A-Za-z0-9_-]*\.[hH]$") @@ -194,10 +164,6 @@ t_info = con.spindle_tool_status() if t_info is not None: print("# direct reading of current tool successful") - print( - "# current tool in spindle: {:d}.{:d} '{:s}'".format( - t_info.number, t_info.index, t_info.name - ) - ) + print("# current tool in spindle: {:d}.{:d} '{:s}'".format(t_info.number, t_info.index, t_info.name)) else: print("# direct reading of current tool not supported for this control") diff --git a/scripts/lsv2cmd.py b/scripts/lsv2cmd.py index e2d63e3..eb79a78 100644 --- a/scripts/lsv2cmd.py +++ b/scripts/lsv2cmd.py @@ -17,12 +17,12 @@ __version__ = "1.0" __email__ = "dr.unsinn@googlemail.com" -REMOTE_PATH_REGEX = r"^(?Plsv2(\+ssh)?):\/\/(?P[\w\.-]*)(?::(?P\d{2,5}))?(?:\/(?P(TNC|PLC):))(?P(\/[\$\.\w\d_-]+)*)\/?$" +REMOTE_PATH_REGEX = ( + r"^(?Plsv2(\+ssh)?):\/\/(?P[\w\.-]*)(?::(?P\d{2,5}))?(?:\/(?P(TNC|PLC):))(?P(\/[\$\.\w\d_-]+)*)\/?$" +) if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="command line script for functions in pyLSV2" - ) + parser = argparse.ArgumentParser(description="command line script for functions in pyLSV2") parser.add_argument( "source", help="source file. Either local path or URL with format lsv2://:/TNC:/", @@ -54,9 +54,7 @@ default=logging.WARNING, ) - parser.add_argument( - "-t", "--timeout", help="timeout duration in seconds", type=float, default=10.0 - ) + parser.add_argument("-t", "--timeout", help="timeout duration in seconds", type=float, default=10.0) parser.add_argument( "-f", "--force", @@ -138,9 +136,7 @@ if use_ssh: import sshtunnel - ssh_forwarder = sshtunnel.SSHTunnelForwarder( - host_machine, remote_bind_address=("127.0.0.1", host_port) - ) + ssh_forwarder = sshtunnel.SSHTunnelForwarder(host_machine, remote_bind_address=("127.0.0.1", host_port)) ssh_forwarder.start() host_machine = "127.0.0.1" host_port = ssh_forwarder.local_bind_port @@ -160,9 +156,7 @@ logger.error("source file dose not exist on remote: '%s'", source_path) sys.exit(-3) elif file_info.is_directory or file_info.is_drive: - logger.error( - "source on remote is not file but directory: '%s'", source_path - ) + logger.error("source on remote is not file but directory: '%s'", source_path) sys.exit(-4) else: if os.path.exists(source_path): @@ -181,14 +175,10 @@ success = con.copy_remote_file(source_path=source_path, target_path=dest_path) elif source_is_remote and not dest_is_remote: logger.debug("copy from remote to local") - success = con.recive_file( - remote_path=source_path, local_path=dest_path, override_file=args.force - ) + success = con.recive_file(remote_path=source_path, local_path=dest_path, override_file=args.force) else: logger.debug("copy from local to remote") - success = con.send_file( - local_path=source_path, remote_path=dest_path, override_file=args.force - ) + success = con.send_file(local_path=source_path, remote_path=dest_path, override_file=args.force) con.disconnect() if success: diff --git a/scripts/real_time_readings.py b/scripts/real_time_readings.py index f0c398f..7883306 100644 --- a/scripts/real_time_readings.py +++ b/scripts/real_time_readings.py @@ -40,10 +40,7 @@ for package in con.real_time_readings(selected_signals, duration, interval): signal_readings = package.get_data() readings_per_signal = len(signal_readings[0].data) - print( - "successfulle read %d signals with %d values each" - % (len(signal_readings), readings_per_signal) - ) + print("successfulle read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) for i in range(readings_per_signal): # Signal_type = sample[# appending rank]["data"][one_smaple] @@ -53,29 +50,23 @@ if count_high_freq % 5 == 0: # This condition is only for signals of low frequency position_X = round( - signal_readings[0].data[i] * signal_readings[0].factor - + signal_readings[0].offset, + signal_readings[0].data[i] * signal_readings[0].factor + signal_readings[0].offset, 3, ) position_Y = round( - signal_readings[1].data[i] * signal_readings[1].factor - + signal_readings[1].offset, + signal_readings[1].data[i] * signal_readings[1].factor + signal_readings[1].offset, 3, ) position_Z = round( - signal_readings[2].data[i] * signal_readings[2].factor - + signal_readings[2].offset, + signal_readings[2].data[i] * signal_readings[2].factor + signal_readings[2].offset, 3, ) I_nominal_X = round( - signal_readings[3].data[i] * signal_readings[3].factor - + signal_readings[3].offset, + signal_readings[3].data[i] * signal_readings[3].factor + signal_readings[3].offset, 3, ) - print( - f"Position X = {position_X} mm , Position Y = {position_Y} , Position Z = {position_Z}, I nominal X = {I_nominal_X} " - ) + print(f"Position X = {position_X} mm , Position Y = {position_Y} , Position Z = {position_Z}, I nominal X = {I_nominal_X} ") fp.write( "Position X = %f mm , Position Y = %f , Position Z = %f , I nominal X = %f\n" % (position_X, position_Y, position_Z, I_nominal_X) diff --git a/scripts/scope2csv.py b/scripts/scope2csv.py index 7adb6c2..bd3d5dd 100644 --- a/scripts/scope2csv.py +++ b/scripts/scope2csv.py @@ -26,9 +26,7 @@ parser.add_argument("host", help="ip or hostname of control", type=str) - parser.add_argument( - "output", help="path of the csv file the data should be written to", type=Path - ) + parser.add_argument("output", help="path of the csv file the data should be written to", type=Path) parser.add_argument( "signals", @@ -37,13 +35,9 @@ type=int, ) - parser.add_argument( - "-a", "--duration", help="number of seconds to record", type=int, default=10 - ) + parser.add_argument("-a", "--duration", help="number of seconds to record", type=int, default=10) - parser.add_argument( - "-i", "--interval", help="number of µs between readings", type=int, default=6000 - ) + parser.add_argument("-i", "--interval", help="number of µs between readings", type=int, default=6000) parser.add_argument( "-d", @@ -103,14 +97,10 @@ sys.exit(-2) if args.interval <= 0: - logging.error( - "the selected interval has to be at least greater than 0: %d", args.interval - ) + logging.error("the selected interval has to be at least greater than 0: %d", args.interval) sys.exit(-3) - with pyLSV2.LSV2( - args.host, port=19000, timeout=args.timeout, safe_mode=False - ) as con: + with pyLSV2.LSV2(args.host, port=19000, timeout=args.timeout, safe_mode=False) as con: availible_signals = con.read_scope_signals() if sorted(selected_signals)[-1] > len(availible_signals): @@ -136,15 +126,10 @@ csv.writerow(list(map(lambda x: x.normalized_name(), scope_signals))) readings_counter = 0 - for package in con.real_time_readings( - scope_signals, args.duration, args.interval - ): + for package in con.real_time_readings(scope_signals, args.duration, args.interval): signal_readings = package.get_data() readings_per_signal = len(signal_readings[0].data) - logging.debug( - "successfulle read %d signals with %d values each" - % (len(signal_readings), readings_per_signal) - ) + logging.debug("successfulle read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) for i in range(readings_per_signal): row = list() @@ -154,9 +139,7 @@ csv.writerow(row) readings_counter += 1 - logging.info( - "finished reading data, data was saved to %s", args.output.absolute() - ) + logging.info("finished reading data, data was saved to %s", args.output.absolute()) logging.debug("number of recorded data points %d", readings_counter) for s in scope_signals: diff --git a/scripts/scope_demo.py b/scripts/scope_demo.py index c02bff8..0307a34 100644 --- a/scripts/scope_demo.py +++ b/scripts/scope_demo.py @@ -37,24 +37,12 @@ for package in con.real_time_readings(selected_signals, duration, interval): signal_readings = package.get_data() readings_per_signal = len(signal_readings[0].data) - print( - "successfully read %d signals with %d values each" - % (len(signal_readings), readings_per_signal) - ) + print("successfully read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) for i in range(readings_per_signal): - position_X = ( - signal_readings[0].data[i] * signal_readings[0].factor - + signal_readings[0].offset - ) - position_Y = ( - signal_readings[1].data[i] * signal_readings[1].factor - + signal_readings[1].offset - ) - position_Z = ( - signal_readings[2].data[i] * signal_readings[2].factor - + signal_readings[2].offset - ) + position_X = signal_readings[0].data[i] * signal_readings[0].factor + signal_readings[0].offset + position_Y = signal_readings[1].data[i] * signal_readings[1].factor + signal_readings[1].offset + position_Z = signal_readings[2].data[i] * signal_readings[2].factor + signal_readings[2].offset readings_counter += 1 print( diff --git a/scripts/signals_assignment.py b/scripts/signals_assignment.py index 6a60498..d67eb30 100644 --- a/scripts/signals_assignment.py +++ b/scripts/signals_assignment.py @@ -38,9 +38,7 @@ def from_signals(signal_list: List[pyLSV2.ScopeSignal]): if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="command line script for dumping the signal list of a control to json" - ) + parser = argparse.ArgumentParser(description="command line script for dumping the signal list of a control to json") parser.add_argument( "address", help="ip or hostname of the control", diff --git a/scripts/ssh_tunnel_demo.py b/scripts/ssh_tunnel_demo.py index 207b86c..6c2fcb6 100644 --- a/scripts/ssh_tunnel_demo.py +++ b/scripts/ssh_tunnel_demo.py @@ -26,11 +26,7 @@ private_key_file = "" lsv2_port = 19000 - print( - "Connecting to {:s}@{:s}:{:d} via ssh tunnel".format( - user_name, address, lsv2_port - ) - ) + print("Connecting to {:s}@{:s}:{:d} via ssh tunnel".format(user_name, address, lsv2_port)) ssh_forwarder = SSHTunnelForwarder( address, ssh_username=user_name, @@ -38,21 +34,13 @@ remote_bind_address=("127.0.0.1", lsv2_port), ) ssh_forwarder.start() - print( - "SSH tunnel established. local port is {}".format(ssh_forwarder.local_bind_port) - ) + print("SSH tunnel established. local port is {}".format(ssh_forwarder.local_bind_port)) print("Establish regular LSV2 connection via local port") - con = pyLSV2.LSV2( - "127.0.0.1", port=ssh_forwarder.local_bind_port, timeout=5, safe_mode=False - ) + con = pyLSV2.LSV2("127.0.0.1", port=ssh_forwarder.local_bind_port, timeout=5, safe_mode=False) con.connect() - print( - "Connected to '{:s}' with NC Software '{:s}'".format( - con.versions.control, con.versions.nc_sw - ) - ) + print("Connected to '{:s}' with NC Software '{:s}'".format(con.versions.control, con.versions.nc_sw)) print("Close Connection") con.disconnect() diff --git a/scripts/tab2csv.py b/scripts/tab2csv.py index 8f6a350..d564cc7 100644 --- a/scripts/tab2csv.py +++ b/scripts/tab2csv.py @@ -14,13 +14,9 @@ __email__ = "dr.unsinn@googlemail.com" if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="command line script parsing table files" - ) + parser = argparse.ArgumentParser(description="command line script parsing table files") parser.add_argument("source", help="table file to parse", type=pathlib.Path) - parser.add_argument( - "--decimal_char", help="override local decimal char", type=str, default="," - ) + parser.add_argument("--decimal_char", help="override local decimal char", type=str, default=",") log_group = parser.add_mutually_exclusive_group() log_group.add_argument( "-d", diff --git a/tests/test_connection.py b/tests/test_connection.py index b3a8fef..15a34a6 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -50,10 +50,7 @@ def test_switching_safe_mode(address: str, timeout: float): def test_login_with_password(address: str, timeout: float): """check if logging in with a password works""" with pyLSV2.LSV2(address, port=19000, timeout=timeout, safe_mode=False) as lsv2: - if not ( - lsv2.versions.control.startswith("iTNC530 Program") - or lsv2.versions.control.startswith("iTNC530Program") - ): + if not (lsv2.versions.control.startswith("iTNC530 Program") or lsv2.versions.control.startswith("iTNC530Program")): # logon to plc is not locked? lsv2.logout(pyLSV2.Login.FILEPLC) assert lsv2.login(login=pyLSV2.Login.FILEPLC) is False diff --git a/tests/test_file_functions.py b/tests/test_file_functions.py index 1036abf..102413c 100644 --- a/tests/test_file_functions.py +++ b/tests/test_file_functions.py @@ -79,17 +79,11 @@ def test_remote_file_functions(address: str, timeout: float): if lsv2.versions.is_tnc(): # only test for tnc controls - assert ( - lsv2.copy_remote_file(source_path=mdi_dir + mdi_name, target_path=test_dir) - is True - ) + assert lsv2.copy_remote_file(source_path=mdi_dir + mdi_name, target_path=test_dir) is True assert lsv2.file_info(test_dir + mdi_name) is not None assert lsv2.delete_file(test_dir + mdi_name) is True - assert ( - lsv2.copy_remote_file(source_path=mdi_dir + mdi_name, target_path=test_file_1) - is True - ) + assert lsv2.copy_remote_file(source_path=mdi_dir + mdi_name, target_path=test_file_1) is True assert lsv2.file_info(test_file_1) is not None assert lsv2.change_directory(remote_directory=mdi_dir) is True @@ -150,30 +144,10 @@ def test_file_search(address: str, timeout: float): > 0 ) elif lsv2.versions.is_pilot(): - file_path = ( - pyLSV2.DriveName.TNC - + pyLSV2.PATH_SEP - + "nc_prog" - + pyLSV2.PATH_SEP - + "ncps" - ) - assert ( - len( - lsv2.get_file_list( - file_path, descend=False, pattern=pyLSV2.REGEX_FILE_NAME_H - ) - ) - > 0 - ) + file_path = pyLSV2.DriveName.TNC + pyLSV2.PATH_SEP + "nc_prog" + pyLSV2.PATH_SEP + "ncps" + assert len(lsv2.get_file_list(file_path, descend=False, pattern=pyLSV2.REGEX_FILE_NAME_H)) > 0 else: file_path = pyLSV2.DriveName.TNC + pyLSV2.PATH_SEP + "nc_prog" - assert ( - len( - lsv2.get_file_list( - file_path, descend=False, pattern=pyLSV2.REGEX_FILE_NAME_H - ) - ) - > 0 - ) + assert len(lsv2.get_file_list(file_path, descend=False, pattern=pyLSV2.REGEX_FILE_NAME_H)) > 0 lsv2.disconnect() diff --git a/tests/test_machine_parameters.py b/tests/test_machine_parameters.py index c9a8c9a..6497323 100644 --- a/tests/test_machine_parameters.py +++ b/tests/test_machine_parameters.py @@ -35,10 +35,7 @@ def test_rw_machine_parameter(address: str, timeout: float): assert lsv2.login(pyLSV2.Login.PLCDEBUG) is True current_value = lsv2.get_machine_parameter(parameter_name) - assert ( - lsv2.set_machine_parameter(parameter_name, current_value, safe_to_disk=False) - is not False - ) + assert lsv2.set_machine_parameter(parameter_name, current_value, safe_to_disk=False) is not False lsv2.logout(pyLSV2.Login.PLCDEBUG) diff --git a/tests/test_plc_read.py b/tests/test_plc_read.py index 6c87923..b304578 100644 --- a/tests/test_plc_read.py +++ b/tests/test_plc_read.py @@ -98,22 +98,9 @@ def test_data_path_read(address: str, timeout: float): assert lsv2.read_data_path("/TABLE/TOOL/T/1/L") is not None # These probably only work on a programming station - assert ( - lsv2.read_data_path("/PLC/program/symbol/global/MG_BA_Automatik") - is not None - ) - assert ( - lsv2.read_data_path( - '/PLC/program/symbol/module/"SPINDEL.SRC"/KL_100_PROZENT' - ) - is not None - ) - assert ( - lsv2.read_data_path( - "/PLC/program/symbol/global/STG_WZM[0].WL_WZM_SIMULATION_ZAEHLE" - ) - is not None - ) + assert lsv2.read_data_path("/PLC/program/symbol/global/MG_BA_Automatik") is not None + assert lsv2.read_data_path('/PLC/program/symbol/module/"SPINDEL.SRC"/KL_100_PROZENT') is not None + assert lsv2.read_data_path("/PLC/program/symbol/global/STG_WZM[0].WL_WZM_SIMULATION_ZAEHLE") is not None assert lsv2.read_data_path("/PLC/program/symbol/global/+STG_WZM[1]") is not None # check if path is sanitized correctly @@ -121,6 +108,7 @@ def test_data_path_read(address: str, timeout: float): lsv2.disconnect() + def test_comapare_values(address: str, timeout: float): """test to see if reading via data path and plc memory returns the same value. run only on iTNC""" lsv2 = pyLSV2.LSV2(address, port=19000, timeout=timeout, safe_mode=False) diff --git a/tests/test_transfer.py b/tests/test_transfer.py index 4bd6a30..0d3c98a 100644 --- a/tests/test_transfer.py +++ b/tests/test_transfer.py @@ -25,20 +25,10 @@ def test_file_recive(address: str, timeout: float): with tempfile.TemporaryDirectory(suffix=None, prefix="pyLSV2_") as tmp_dir_name: local_mdi_path = Path(tmp_dir_name).joinpath("mdi.h") - assert ( - lsv2.recive_file( - local_path=str(local_mdi_path), remote_path=mdi_path, binary_mode=False - ) - is True - ) + assert lsv2.recive_file(local_path=str(local_mdi_path), remote_path=mdi_path, binary_mode=False) is True local_tool_table_path = Path(tmp_dir_name).joinpath("tool.t") - assert ( - lsv2.recive_file( - local_path=str(local_tool_table_path), remote_path=tool_t_path - ) - is True - ) + assert lsv2.recive_file(local_path=str(local_tool_table_path), remote_path=tool_t_path) is True lsv2.disconnect() @@ -104,11 +94,6 @@ def test_recive_with_path_formating(address: str, timeout: float): with tempfile.TemporaryDirectory(suffix=None, prefix="pyLSV2_") as tmp_dir_name: local_mdi_path = Path(tmp_dir_name).joinpath("mdi.h") - assert ( - lsv2.recive_file( - local_path=str(local_mdi_path), remote_path=mdi_path, binary_mode=False - ) - is True - ) + assert lsv2.recive_file(local_path=str(local_mdi_path), remote_path=mdi_path, binary_mode=False) is True lsv2.disconnect() From 79fddda102a0f15663c2ec11be02bfa73c0ba237 Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 19:34:12 +0200 Subject: [PATCH 06/11] address some pylint concerns --- pyLSV2/client.py | 20 ++++++++++---------- pyLSV2/dat_cls.py | 4 ++-- pyLSV2/low_level_com.py | 4 +--- pyLSV2/misc.py | 15 ++++++++------- tests/test_plc_read.py | 1 - 5 files changed, 21 insertions(+), 23 deletions(-) diff --git a/pyLSV2/client.py b/pyLSV2/client.py index 65411d9..47d98ab 100644 --- a/pyLSV2/client.py +++ b/pyLSV2/client.py @@ -1290,7 +1290,6 @@ def read_plc_memory(self, first_element: int, mem_type: lc.MemoryType, number_of ) return [] else: - max_elements_per_transfer = math.floor(255 / mem_byte_count) - 1 # subtract 1 for safety num_groups = math.ceil(number_of_elements / max_elements_per_transfer) logging.debug( @@ -1303,7 +1302,6 @@ def read_plc_memory(self, first_element: int, mem_type: lc.MemoryType, number_of first_element_in_group = first_element for i in range(num_groups): - # determine number of elements for this group if remaining_elements > max_elements_per_transfer: elements_in_group = max_elements_per_transfer @@ -1660,9 +1658,11 @@ def read_data_path(self, path: str) -> Union[bool, int, float, str, None]: self._logger.info("successfully read data path: %s and got value '%s'", path, data_value) return data_value - elif self.last_error.e_code == lc.LSV2StatusCode.T_ER_WRONG_PARA: + + if self.last_error.e_code == lc.LSV2StatusCode.T_ER_WRONG_PARA: self._logger.warning("the argument '%s' is not supported by this control", path) return None + self._logger.warning("an error occurred while querying data path '%s'. Error code was %d", path, self.last_error.e_code) return None @@ -1748,13 +1748,13 @@ def read_scope_signals(self) -> List[ld.ScopeSignal]: """ if not self.versions.is_itnc(): self._logger.warning("only works for iTNC530") - return list() + return [] if not self.login(lc.Login.SCOPE): self._logger.warning("clould not log in as user for scope function") - return list() + return [] - channel_list = list() + channel_list = [] content = self._llcom.telegram(lc.CMD.R_OC) if self._llcom.last_response in lc.RSP.S_OC: @@ -1788,11 +1788,11 @@ def real_time_readings(self, signal_list: List[ld.ScopeSignal], duration: int, i """ if not self.versions.is_itnc(): self._logger.warning("only works for iTNC530") - return list() + return [] if not self.login(lc.Login.SCOPE): self._logger.warning("clould not log in as user for scope function") - return list() + return [] self._logger.debug( "start recoding %d readings with interval of %d µs", @@ -1831,7 +1831,7 @@ def real_time_readings(self, signal_list: List[ld.ScopeSignal], duration: int, i payload.extend(struct.pack("!L", interval)) start = time.time() # start timer - recorded_data = list() + recorded_data = [] content = self._send_recive(lc.CMD.R_OD, payload, lc.RSP.S_OD) if not isinstance(content, (bytearray,)) or len(content) <= 0: @@ -1851,6 +1851,6 @@ def real_time_readings(self, signal_list: List[ld.ScopeSignal], duration: int, i break end = time.time() timer = end - start - recorded_data = list() + recorded_data = [] self._logger.debug("finished reading scope data") diff --git a/pyLSV2/dat_cls.py b/pyLSV2/dat_cls.py index 677c8f8..33ba237 100644 --- a/pyLSV2/dat_cls.py +++ b/pyLSV2/dat_cls.py @@ -1158,7 +1158,7 @@ def __init__(self, channel: int, signal: int, offset: int, factor: float, unit: self._unit = unit # self._header = bytearray() - self.data = list() + self.data = [] @property def channel(self) -> int: @@ -1199,7 +1199,7 @@ class ScopeReading: def __init__(self, sequence_number: int): self._seqence_nr = sequence_number # self._full_data = bytearray() - self._signal_data = list() + self._signal_data = [] def seqence_nr(self) -> int: """sequence number of consecuetive readings""" diff --git a/pyLSV2/low_level_com.py b/pyLSV2/low_level_com.py index c1097ee..934c45b 100644 --- a/pyLSV2/low_level_com.py +++ b/pyLSV2/low_level_com.py @@ -253,7 +253,7 @@ def __init__(self, port: str, speed: int, timeout: float = 15.0): self._last_lsv2_response = RSP.NONE self._last_error = LSV2Error() raise NotImplementedError() - import serial + # import serial @property def last_response(self) -> RSP: @@ -287,14 +287,12 @@ def connect(self): Establish connection to control """ raise NotImplementedError() - pass def disconnect(self): """ Close connection """ raise NotImplementedError() - pass def telegram( self, diff --git a/pyLSV2/misc.py b/pyLSV2/misc.py index f087c5a..0f59963 100644 --- a/pyLSV2/misc.py +++ b/pyLSV2/misc.py @@ -89,18 +89,19 @@ def decode_system_information(data_set: bytearray) -> Union[bool, int]: if data_type == 1: return struct.unpack("!xxx?", data_set[4:])[0] - elif data_type == 2: + + if data_type == 2: return struct.unpack("!L", data_set[4:])[0] - else: - raise LSV2DataException("unexpected value for data type of system information") + + raise LSV2DataException("unexpected value for data type of system information") # always returns b"\x00\x00\x00\x02\x00\x00\x0b\xb8" for recording 1, 2 and 3 # -> is independent of channel, axes, interval or samples # maybe the last four bytes are the actual interval? 0x00 00 0b b8 = 3000 # documentation hints - if data_set != bytearray(b"\x00\x00\x00\x02\x00\x00\x0b\xb8"): - print(" # unexpected return pattern for R_CI!") - raise Exception("unknown data for S_CI result") - return data_set + # if data_set != bytearray(b"\x00\x00\x00\x02\x00\x00\x0b\xb8"): + # print(" # unexpected return pattern for R_CI!") + # raise Exception("unknown data for S_CI result") + # return data_set def decode_file_system_info(data_set: bytearray, control_type: ControlType = ControlType.UNKNOWN) -> ld.FileEntry: diff --git a/tests/test_plc_read.py b/tests/test_plc_read.py index b304578..4c22ef5 100644 --- a/tests/test_plc_read.py +++ b/tests/test_plc_read.py @@ -115,7 +115,6 @@ def test_comapare_values(address: str, timeout: float): lsv2.connect() if lsv2.versions.is_itnc(): - for mem_address in [0, 1, 2, 4, 8, 12, 68, 69, 151, 300, 368]: v1 = lsv2.read_plc_memory(mem_address, pyLSV2.MemoryType.DWORD, 1)[0] v2 = lsv2.read_data_path("/PLC/memory/D/%d" % (mem_address * 4)) From 5f4a577aea528f89cc695fe39a62132e0a06bdee Mon Sep 17 00:00:00 2001 From: Max Date: Sat, 7 Oct 2023 20:44:23 +0200 Subject: [PATCH 07/11] transition scripts to pyproject.toml file - drops some scripts for the moment --- README.md | 10 +- pyLSV2/client.py | 6 +- pyLSV2/demos/__init__.py | 3 + {scripts => pyLSV2/demos}/lsv2_demo.py | 17 +- {scripts => pyLSV2/demos}/scope2csv.py | 26 ++- .../demos/ssh_tunnel.py | 14 +- {scripts => pyLSV2/demos}/tab2csv.py | 11 +- pyLSV2/misc_scope.py | 5 +- pyLSV2/table_reader.py | 18 +- pyproject.toml | 11 +- scripts/__init__.py | 0 scripts/lsv2cmd.py | 187 ------------------ scripts/real_time_readings.py | 82 -------- scripts/scope_demo.py | 57 ------ scripts/signals_assignment.py | 70 ------- 15 files changed, 76 insertions(+), 441 deletions(-) create mode 100644 pyLSV2/demos/__init__.py rename {scripts => pyLSV2/demos}/lsv2_demo.py (96%) rename {scripts => pyLSV2/demos}/scope2csv.py (89%) rename scripts/ssh_tunnel_demo.py => pyLSV2/demos/ssh_tunnel.py (93%) rename {scripts => pyLSV2/demos}/tab2csv.py (94%) delete mode 100644 scripts/__init__.py delete mode 100644 scripts/lsv2cmd.py delete mode 100644 scripts/real_time_readings.py delete mode 100644 scripts/scope_demo.py delete mode 100644 scripts/signals_assignment.py diff --git a/README.md b/README.md index ffd328f..35f9841 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ - npalmerDNX ## Usage - See [lsv2_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/scripts/lsv2_demo.py) for a demonstration of some of the functions. + See [lsv2_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/pyLSV2/demos/lsv2_demo.py) for a demonstration of some of the functions. Since the whole protocol isn't documented there can always be problems with certain corner cases. Especially during file transfer a lot of stuff can go wrong. In case the control doesn't accept a command it returns an error. Some of these errors are checked internally but not everything is covered as of now. It is therefore @@ -107,7 +107,7 @@ These changes where made intentionally to make further development easier. See t ``` con.read_plc_memory(32, pyLSV2.MemoryType.MARKER, 15) ``` - See [lsv2_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/scripts/lsv2_demo.py) for more examples. + See [lsv2_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/pyLSV2/demos/lsv2_demo.py) for more examples. The available memory areas and their python data type | Memory Type | Python Type | @@ -135,7 +135,7 @@ These changes where made intentionally to make further development easier. See t con.read_data_path('/TABLE/TOOL/T/1/DOC') ``` - See [lsv2_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/scripts/lsv2_demo.py) for more examples. + See [lsv2_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/pyLSV2/demos/lsv2_demo.py) for more examples. Note that reading values from memory does not take into account the actual size in the control memory. This leads to an offset between the values read with `read_data_path` and `read_plc_memory`. As a workaround you have to multiply the address value with the number of bytes the data type requires. The following example tries to show how this can be accomplished: @@ -148,7 +148,7 @@ These changes where made intentionally to make further development easier. See t ### SSH Tunnel Newer controls allow the use of ssh to encrypt the communication via LSV2. - See [ssh_tunnel_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/scripts/ssh_tunnel_demo.py) for an example on + See [ssh_tunnel_demo.py](https://github.com/drunsinn/pyLSV2/blob/master/pyLSV2/demos/ssh_tunnel.py) for an example on how to use the python library [sshtunnel](https://github.com/pahaz/sshtunnel) to achieve a secure connection. ## Compatibility @@ -183,7 +183,7 @@ These changes where made intentionally to make further development easier. See t # Tables Included in this library is also functionality to work with Tables used by different NC Controls. This includes for example TNC controls as well as Anilam 6000i CNC. As these controls and there software versions use different table formats, it is also possible to dreive the format form an existing table and export the format to a json file. - See [tab2csv.py](https://github.com/drunsinn/pyLSV2/blob/master/scripts/tab2csv.py) for a demonstration on how to read a table and convert it to a csv file. + See [tab2csv.py](https://github.com/drunsinn/pyLSV2/blob/master/pyLSV2/demos/tab2csv.py) for a demonstration on how to read a table and convert it to a csv file. This script can also be used as a command line tool ``` diff --git a/pyLSV2/client.py b/pyLSV2/client.py index 47d98ab..9960c49 100644 --- a/pyLSV2/client.py +++ b/pyLSV2/client.py @@ -1663,7 +1663,11 @@ def read_data_path(self, path: str) -> Union[bool, int, float, str, None]: self._logger.warning("the argument '%s' is not supported by this control", path) return None - self._logger.warning("an error occurred while querying data path '%s'. Error code was %d", path, self.last_error.e_code) + self._logger.warning( + "an error occurred while querying data path '%s'. Error code was %d", + path, + self.last_error.e_code, + ) return None def axes_location(self) -> Union[Dict[str, float], None]: diff --git a/pyLSV2/demos/__init__.py b/pyLSV2/demos/__init__.py new file mode 100644 index 0000000..061169c --- /dev/null +++ b/pyLSV2/demos/__init__.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Collection of demo scripts for pyLSV2""" diff --git a/scripts/lsv2_demo.py b/pyLSV2/demos/lsv2_demo.py similarity index 96% rename from scripts/lsv2_demo.py rename to pyLSV2/demos/lsv2_demo.py index 97772c0..3082150 100644 --- a/scripts/lsv2_demo.py +++ b/pyLSV2/demos/lsv2_demo.py @@ -9,15 +9,14 @@ import pyLSV2 from pyLSV2.const import MemoryType -__author__ = "drunsinn" -__license__ = "MIT" -__version__ = "1.0" -__email__ = "dr.unsinn@googlemail.com" -if __name__ == "__main__": +def comprehensive_demo(): + """Basic demo for pyLSV2""" parser = argparse.ArgumentParser() - parser.add_argument("address", nargs="?", default="192.168.56.101", type=str) + # parser.add_argument("address", nargs="?", default="192.168.56.101", type=str) + + parser.add_argument("address", help="ip or hostname of control", type=str) parser.add_argument( "-d", @@ -121,7 +120,7 @@ lang = con.get_machine_parameter("CfgDisplayLanguage.ncLanguage") print("# Value of machine parameter for NC language: {:s}".format(lang)) - if con.version.is_tnc7(): + if con.versions.is_tnc7(): print("UI Interface test not available on TNC7?") else: print("UI Interface") @@ -167,3 +166,7 @@ print("# current tool in spindle: {:d}.{:d} '{:s}'".format(t_info.number, t_info.index, t_info.name)) else: print("# direct reading of current tool not supported for this control") + + +if __name__ == "__main__": + comprehensive_demo() diff --git a/scripts/scope2csv.py b/pyLSV2/demos/scope2csv.py similarity index 89% rename from scripts/scope2csv.py rename to pyLSV2/demos/scope2csv.py index bd3d5dd..b64bbb3 100644 --- a/scripts/scope2csv.py +++ b/pyLSV2/demos/scope2csv.py @@ -5,7 +5,7 @@ import sys import logging import argparse -import csv +from csv import writer as csv_writer from pathlib import Path import pyLSV2 @@ -17,7 +17,7 @@ __email__ = "dr.unsinn@googlemail.com" -if __name__ == "__main__": +def main(): parser = argparse.ArgumentParser( prog="real_time_readings", description="script to read scope signals from control", @@ -37,7 +37,13 @@ parser.add_argument("-a", "--duration", help="number of seconds to record", type=int, default=10) - parser.add_argument("-i", "--interval", help="number of µs between readings", type=int, default=6000) + parser.add_argument( + "-i", + "--interval", + help="number of µs between readings", + type=int, + default=21000, + ) parser.add_argument( "-d", @@ -122,17 +128,21 @@ scope_signals.append(new_signal) with open(args.output, "w", encoding="utf8") as csv_fp: - csv = csv.writer(csv_fp, dialect="excel", lineterminator="\n") + csv = csv_writer(csv_fp, dialect="excel", lineterminator="\n") csv.writerow(list(map(lambda x: x.normalized_name(), scope_signals))) readings_counter = 0 for package in con.real_time_readings(scope_signals, args.duration, args.interval): signal_readings = package.get_data() readings_per_signal = len(signal_readings[0].data) - logging.debug("successfulle read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) + logging.debug( + "successfulle read %d signals with %d values each", + len(signal_readings), + readings_per_signal, + ) for i in range(readings_per_signal): - row = list() + row = [] for signal in signal_readings: value = (signal.data[i] * signal.factor) + signal.offset row.append(value) @@ -150,3 +160,7 @@ ) sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/scripts/ssh_tunnel_demo.py b/pyLSV2/demos/ssh_tunnel.py similarity index 93% rename from scripts/ssh_tunnel_demo.py rename to pyLSV2/demos/ssh_tunnel.py index 6c2fcb6..56d7af4 100644 --- a/scripts/ssh_tunnel_demo.py +++ b/pyLSV2/demos/ssh_tunnel.py @@ -10,17 +10,13 @@ 6. edit this file and set address, user name and path to the key file """ import logging -import pyLSV2 from sshtunnel import SSHTunnelForwarder - -__author__ = "drunsinn" -__license__ = "MIT" -__version__ = "1.0" -__email__ = "dr.unsinn@googlemail.com" +import pyLSV2 logging.basicConfig(level=logging.INFO) -if __name__ == "__main__": + +def main(): address = "192.168.56.101" user_name = "user" private_key_file = "" @@ -46,3 +42,7 @@ print("Close SSH tunnel") ssh_forwarder.stop() + + +if __name__ == "__main__": + main() diff --git a/scripts/tab2csv.py b/pyLSV2/demos/tab2csv.py similarity index 94% rename from scripts/tab2csv.py rename to pyLSV2/demos/tab2csv.py index d564cc7..c7000d9 100644 --- a/scripts/tab2csv.py +++ b/pyLSV2/demos/tab2csv.py @@ -8,12 +8,9 @@ from pyLSV2 import NCTable -__author__ = "drunsinn" -__license__ = "MIT" -__version__ = "1.0" -__email__ = "dr.unsinn@googlemail.com" -if __name__ == "__main__": +def main(): + """console application to convert a tnc table file to a csv file""" parser = argparse.ArgumentParser(description="command line script parsing table files") parser.add_argument("source", help="table file to parse", type=pathlib.Path) parser.add_argument("--decimal_char", help="override local decimal char", type=str, default=",") @@ -63,3 +60,7 @@ sys.exit(-1) sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/pyLSV2/misc_scope.py b/pyLSV2/misc_scope.py index ea22e07..5e28971 100644 --- a/pyLSV2/misc_scope.py +++ b/pyLSV2/misc_scope.py @@ -2,7 +2,6 @@ # -*- coding: utf-8 -*- """misc helper functions for the scope part of pyLSV2""" import struct -from datetime import datetime from typing import List import logging @@ -32,7 +31,7 @@ def decode_signal_description(data_set: bytearray) -> List[ld.ScopeSignal]: # data_set[ 46: ??] : name of the channel # type 1, 2, 4, 5: # data_set[ 59: ] : signal names - signals = list() + signals = [] channel_number = struct.unpack("!H", data_set[0:2])[0] name_start = 46 name_end = 46 @@ -121,7 +120,7 @@ def split_dataset(data): logger.debug("R_OP dataset has expected length") for i, data_sub_set in enumerate(split_dataset(data_set)): if data_sub_set[17:] != bytearray(b"?\x00\x00\x00\x00"): - raise Exception("unexpected data in signal details at position 17 %s" % data_sub_set[17:]) + raise LSV2DataException("unexpected data in signal details at position 17 %s" % data_sub_set[17:]) signal_list[i].unit = lm.ba_to_ustr(data_sub_set[0:10]) signal_list[i].factor = struct.unpack(" self._column_format[name]["width"]: - raise Exception("value to long for column") + raise ValueError("value to long for column") self._column_format[name]["empty_value"] = value def update_column_format(self, name: str, parameters: dict): @@ -204,7 +204,7 @@ def dump_native(self, file_path: pathlib.Path, renumber_column=None): for column_name in self._columns: if column_name not in self._column_format: - raise Exception("configuration is incomplete, missing definition for column {column_name:s}") + raise ValueError("configuration is incomplete, missing definition for column {column_name:s}") fixed_width = self._column_format[column_name]["width"] format_string = "{0:<%d}" % fixed_width tfp.write(format_string.format(column_name)) @@ -228,7 +228,7 @@ def dump_native(self, file_path: pathlib.Path, renumber_column=None): ) tfp.write(format_string.format(self._column_format[column_name]["empty_value"])) else: - raise Exception("entry is missing a value for column %s defined in the output format" % column_name) + raise ValueError("entry is missing a value for column %s defined in the output format" % column_name) tfp.write("\n") row_counter += 1 @@ -308,7 +308,7 @@ def parse_table(table_path: pathlib.Path) -> "NCTable": ) if header is None: - raise Exception("File has wrong format: incorrect header for file %s" % table_path) + raise ValueError("File has wrong format: incorrect header for file %s" % table_path) nctable.name = header.group("name").strip() nctable.suffix = header.group("suffix") @@ -396,9 +396,9 @@ def parse_table(table_path: pathlib.Path) -> "NCTable": for c_d in table_config["TableDescription"]["columns"]: cfg_column_name = c_d["CfgColumnDescription"]["key"] if cfg_column_name not in nctable.column_names: - raise Exception("found unexpected column %s" % cfg_column_name) + raise ValueError("found unexpected column %s" % cfg_column_name) if c_d["CfgColumnDescription"]["width"] != nctable.get_column_width(cfg_column_name): - raise Exception( + raise ValueError( "found difference in column width for colmun %s: %d : %d" % ( cfg_column_name, @@ -447,7 +447,7 @@ def str_to_typed_value(value_string: str): last_object.append({name: new_category}) else: if name in last_object: - raise Exception("Element already in dict") + raise ValueError("Element already in dict") last_object[name] = new_category object_list.append(new_category) @@ -459,7 +459,7 @@ def str_to_typed_value(value_string: str): last_object.append({name: new_group}) else: if name in last_object: - raise Exception("Element already in dict") + raise ValueError("Element already in dict") last_object[name] = new_group object_list.append(new_group) @@ -479,7 +479,7 @@ def str_to_typed_value(value_string: str): parts = line.split(":=") last_object[parts[0]] = str_to_typed_value(parts[1]) else: - raise Exception("no keyname??") + raise ValueError("no keyname??") # last_object["value_%d" % id_counter] = line return config_data diff --git a/pyproject.toml b/pyproject.toml index 1cc6303..0ca90f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,8 +14,15 @@ requires-python = ">=3.6" keywords = ["LSV2", "CNC", "PLC"] dynamic = ["version", "readme", ] -#[project.scripts] -#lsv2_demo = "scripts.lsv2_demo" +[project.urls] +"Homepage" = "https://github.com/drunsinn/pyLSV2" +"Bug Tracker" = "https://github.com/drunsinn/pyLSV2/issues" + +[project.scripts] +lsv2_demo = "pyLSV2.demos.lsv2_demo:comprehensive_demo" +lsv2_scope2csv = "pyLSV2.demos.scope2csv:main" +lsv2_tab2csv = "pyLSV2.demos.tab2csv:main" +lsv2_tunnel_demo = "pyLSV2.demos.ssh_tunnel:main" [tool.setuptools] packages = [ diff --git a/scripts/__init__.py b/scripts/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/scripts/lsv2cmd.py b/scripts/lsv2cmd.py deleted file mode 100644 index eb79a78..0000000 --- a/scripts/lsv2cmd.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -"""This script lets you use some of the functions included in pyLSV2 via - the command line. -""" -import os -import sys -import logging -import argparse -import re -import socket - -import pyLSV2 - -__author__ = "drunsinn" -__license__ = "MIT" -__version__ = "1.0" -__email__ = "dr.unsinn@googlemail.com" - -REMOTE_PATH_REGEX = ( - r"^(?Plsv2(\+ssh)?):\/\/(?P[\w\.-]*)(?::(?P\d{2,5}))?(?:\/(?P(TNC|PLC):))(?P(\/[\$\.\w\d_-]+)*)\/?$" -) - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="command line script for functions in pyLSV2") - parser.add_argument( - "source", - help="source file. Either local path or URL with format lsv2://:/TNC:/", - type=str, - ) - parser.add_argument( - "destination", - help="destination file. Either local path or URL with format lsv2://:/TNC:/", - type=str, - ) - - log_group = parser.add_mutually_exclusive_group() - log_group.add_argument( - "-d", - "--debug", - help="enable log level DEBUG", - action="store_const", - dest="loglevel", - const=logging.DEBUG, - default=logging.WARNING, - ) - log_group.add_argument( - "-v", - "--verbose", - help="enable log level INFO", - action="store_const", - dest="loglevel", - const=logging.INFO, - default=logging.WARNING, - ) - - parser.add_argument("-t", "--timeout", help="timeout duration in seconds", type=float, default=10.0) - parser.add_argument( - "-f", - "--force", - help="replace file at target if it already exists", - action="store_true", - default=False, - ) - args = parser.parse_args() - - logging.basicConfig(level=args.loglevel) - logger = logging.getLogger("lsv2cmd") - - logger.debug("Start logging with level '%s'", logging.getLevelName(args.loglevel)) - logger.debug("Source Path: %s", args.source) - logger.debug("Destination Path: %s", args.destination) - - source_is_remote = False - dest_is_remote = False - - host_machine = "" - host_port = 19000 - use_ssh = False - - source_path = "" - dest_path = "" - - source_match = re.match(REMOTE_PATH_REGEX, args.source) - logger.debug("result of regex for source: %s", source_match) - - if source_match is not None: - source_is_remote = True - source_path = source_match.group("drive") + source_match.group("path") - host_machine = str(source_match.group("host")) - if source_match.group("port") is not None: - host_port = int(source_match.group("port")) - if "ssh" in source_match.group("prot"): - use_ssh = True - logger.info( - "Source path %s is on remote %s:%d via %s", - source_path, - host_machine, - host_port, - source_match.group("prot"), - ) - else: - source_path = args.source - logger.info("Source path %s is local", os.path.abspath(source_path)) - - dest_match = re.match(REMOTE_PATH_REGEX, args.destination) - logger.debug("result of regex for destination: %s", dest_match) - - if dest_match is not None: - dest_is_remote = True - dest_path = dest_match.group("drive") + dest_match.group("path") - if source_is_remote and host_machine != dest_match.group("host"): - logger.error( - "Can't copy between different remotes '%s' and '%s'", - host_machine, - dest_match.group("host"), - ) - sys.exit(-1) - - host_machine = str(dest_match.group("host")) - if dest_match.group("port") is not None: - host_port = int(dest_match.group("port")) - if "ssh" in dest_match.group("prot"): - use_ssh = True - logger.info( - "Destination path %s is on remote %s:%s via %s", - dest_path, - host_machine, - host_port, - dest_match.group("prot"), - ) - else: - dest_path = args.destination - logger.info("Destination path %s is local", os.path.abspath(dest_path)) - - if use_ssh: - import sshtunnel - - ssh_forwarder = sshtunnel.SSHTunnelForwarder(host_machine, remote_bind_address=("127.0.0.1", host_port)) - ssh_forwarder.start() - host_machine = "127.0.0.1" - host_port = ssh_forwarder.local_bind_port - logger.info("SSH tunnel established. local port is %d", host_port) - - try: - con = pyLSV2.LSV2(hostname=host_machine, port=host_port, timeout=args.timeout) - con.connect() - except socket.gaierror as ex: - logger.error("An Exception occurred: '%s'", ex) - logger.error("Could not resove host information: '%s'", host_machine) - sys.exit(-2) - - if source_is_remote: - file_info = con.file_info(remote_file_path=str(source_path)) - if not file_info: - logger.error("source file dose not exist on remote: '%s'", source_path) - sys.exit(-3) - elif file_info.is_directory or file_info.is_drive: - logger.error("source on remote is not file but directory: '%s'", source_path) - sys.exit(-4) - else: - if os.path.exists(source_path): - logger.debug("source file exists") - else: - if os.path.isfile(source_path): - logger.error("source file dose not exist: '%s'", source_path) - sys.exit(-5) - else: - logger.error("source folder dose not exist: '%s'", source_path) - sys.exit(-6) - - success = False - if source_is_remote and dest_is_remote: - logger.debug("Local copy on remote") - success = con.copy_remote_file(source_path=source_path, target_path=dest_path) - elif source_is_remote and not dest_is_remote: - logger.debug("copy from remote to local") - success = con.recive_file(remote_path=source_path, local_path=dest_path, override_file=args.force) - else: - logger.debug("copy from local to remote") - success = con.send_file(local_path=source_path, remote_path=dest_path, override_file=args.force) - con.disconnect() - - if success: - logger.info("File copied successful") - sys.exit(0) - sys.exit(-10) diff --git a/scripts/real_time_readings.py b/scripts/real_time_readings.py deleted file mode 100644 index 7883306..0000000 --- a/scripts/real_time_readings.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import sys -import logging -import pyLSV2 - -logging.basicConfig(level=logging.WARNING) - -with pyLSV2.LSV2("192.168.56.103", port=19000, timeout=5, safe_mode=False) as con: - availible_signals = con.read_scope_signals() - - # build list with selected signals - selected_signals = list() - selected_signals.append(availible_signals[0]) - selected_signals.append(availible_signals[1]) - selected_signals.append(availible_signals[2]) - selected_signals.append(availible_signals[122]) - - # alternatively: find signal by normalized name - # selected_signals.append(next(signal for signal in availible_signals if signal.normalized_name() == "x_s_actual")) - # selected_signals.append(next(signal for signal in availible_signals if signal.normalized_name() == "y_s_actual")) - # selected_signals.append(next(signal for signal in availible_signals if signal.normalized_name() == "z_s_actual")) - # selected_signals.append(next(signal for signal in availible_signals if signal.normalized_name() == "x_v_actual")) - # selected_signals.append(next(signal for signal in availible_signals if signal.normalized_name() == "x_a_actual")) - - duration = 10 - interval = 600 - - print("selected signals:") - for sig in selected_signals: - print("# %s" % sig) - - # take readings: - # signal_list=selected_signals, duration=10 , interval=3000): - with open("data.txt", "w") as fp: - readings_counter = 0 - count_high_freq = 0 - - for package in con.real_time_readings(selected_signals, duration, interval): - signal_readings = package.get_data() - readings_per_signal = len(signal_readings[0].data) - print("successfulle read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) - - for i in range(readings_per_signal): - # Signal_type = sample[# appending rank]["data"][one_smaple] - # for signal in signal_readings: - # value = (signal.data[i] * signal.factor) + signal.offset - # print(value, signal.unit) - if count_high_freq % 5 == 0: - # This condition is only for signals of low frequency - position_X = round( - signal_readings[0].data[i] * signal_readings[0].factor + signal_readings[0].offset, - 3, - ) - position_Y = round( - signal_readings[1].data[i] * signal_readings[1].factor + signal_readings[1].offset, - 3, - ) - position_Z = round( - signal_readings[2].data[i] * signal_readings[2].factor + signal_readings[2].offset, - 3, - ) - I_nominal_X = round( - signal_readings[3].data[i] * signal_readings[3].factor + signal_readings[3].offset, - 3, - ) - - print(f"Position X = {position_X} mm , Position Y = {position_Y} , Position Z = {position_Z}, I nominal X = {I_nominal_X} ") - fp.write( - "Position X = %f mm , Position Y = %f , Position Z = %f , I nominal X = %f\n" - % (position_X, position_Y, position_Z, I_nominal_X) - ) - count_high_freq += 1 - - readings_counter += readings_per_signal - - print("a total of %d readings were taken" % readings_counter) - - print("the signal description was updated to:") - for s in selected_signals: - print(s) diff --git a/scripts/scope_demo.py b/scripts/scope_demo.py deleted file mode 100644 index 0307a34..0000000 --- a/scripts/scope_demo.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -"""This script contains a demo on how to use the scope functions on iTNC controls -""" - -import sys -import logging -import pyLSV2 - -logging.basicConfig(level=logging.WARNING) - -with pyLSV2.LSV2("192.168.56.102", port=19000, timeout=5, safe_mode=False) as con: - if not con.versions.is_itnc(): - print("the scope functions only work for iTNC controls") - sys.exit(-1) - - availible_signals = con.read_scope_signals() - - # build list with selected signals - selected_signals = list() - selected_signals.append(availible_signals[0]) - selected_signals.append(availible_signals[1]) - selected_signals.append(availible_signals[2]) - - print("selected signals:") - for sig in selected_signals: - print("# %s" % sig) - - duration = 2 - interval = 6000 - - print("reading values for a duration of %d seconds with an interval of %d µs") - - # take readings: - readings_counter = 0 - - for package in con.real_time_readings(selected_signals, duration, interval): - signal_readings = package.get_data() - readings_per_signal = len(signal_readings[0].data) - print("successfully read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) - - for i in range(readings_per_signal): - position_X = signal_readings[0].data[i] * signal_readings[0].factor + signal_readings[0].offset - position_Y = signal_readings[1].data[i] * signal_readings[1].factor + signal_readings[1].offset - position_Z = signal_readings[2].data[i] * signal_readings[2].factor + signal_readings[2].offset - readings_counter += 1 - - print( - "Count: %d Position X = %.3f mm, Position Y = %.3f, Position Z = %.3f" - % (readings_counter, position_X, position_Y, position_Z) - ) - - print("# a total of %d readings were taken" % readings_counter) - - print("# the signal description was updated to:") - for signal in selected_signals: - print("##", signal) diff --git a/scripts/signals_assignment.py b/scripts/signals_assignment.py deleted file mode 100644 index d67eb30..0000000 --- a/scripts/signals_assignment.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import json -import logging -import argparse -from pathlib import Path -from typing import List - -import pyLSV2 - -logging.basicConfig(level=logging.INFO) - - -class SignalConfig: - def to_json(self, file_path: Path): - with open(file_path, "w", encoding="utf8") as cfp: - json.dump(sc.__dict__, cfp) - - @staticmethod - def from_json(file_path: Path): - signal_config = SignalConfig() - with open(file_path, "r", encoding="utf8") as cfp: - data = json.load(cfp) - - for key, value in data.items(): - setattr(signal_config, key, value) - return signal_config - - @staticmethod - def from_signals(signal_list: List[pyLSV2.ScopeSignal]): - signal_config = SignalConfig() - - for i, signal in enumerate(signal_list): - setattr(signal_config, signal.normalized_name(), i) - - return signal_config - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="command line script for dumping the signal list of a control to json") - parser.add_argument( - "address", - help="ip or hostname of the control", - type=str, - ) - parser.add_argument( - "destination", - help="destination json file", - type=Path, - ) - - args = parser.parse_args() - - with pyLSV2.LSV2(args.address, port=19000, timeout=5, safe_mode=False) as con: - availible_signals = con.read_scope_signals() - - # create signal configuration from signal list - sc = SignalConfig.from_signals(availible_signals) - - # store signal configuration to json file - sc.to_json(args.destination) - - # restore signal configuration from json file - sc_new = SignalConfig.from_json(args.destination) - - # compare before and after - print(sc.y_s_actual, sc_new.y_s_actual) - - print(sc.x_i2_t_p_m, sc_new.x_i2_t_p_m) From 574e13e408d525795f84a7f53ce37abef49a6b12 Mon Sep 17 00:00:00 2001 From: Max Date: Sun, 8 Oct 2023 12:09:28 +0200 Subject: [PATCH 08/11] add demos folder to package list --- pyproject.toml | 66 ++++++++++++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0ca90f7..96bc429 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,13 +6,11 @@ build-backend = "setuptools.build_meta" [project] name = "pyLSV2" -authors = [ - {name = "drunsinn", email = "dr.unsinn@googlemail.com"}, -] +authors = [{ name = "drunsinn", email = "dr.unsinn@googlemail.com" }] description = "A pure Python3 implementation of the LSV2 protocol" requires-python = ">=3.6" keywords = ["LSV2", "CNC", "PLC"] -dynamic = ["version", "readme", ] +dynamic = ["version", "readme"] [project.urls] "Homepage" = "https://github.com/drunsinn/pyLSV2" @@ -27,9 +25,10 @@ lsv2_tunnel_demo = "pyLSV2.demos.ssh_tunnel:main" [tool.setuptools] packages = [ "pyLSV2", + "pyLSV2.demos", "pyLSV2.locales.en.LC_MESSAGES", - "pyLSV2.locales.de.LC_MESSAGES" - ] + "pyLSV2.locales.de.LC_MESSAGES", +] #[tool.setuptools.package-data] #pyLSV2 = ["locales/*/LC_MESSAGES/*.mo", ] @@ -38,12 +37,12 @@ packages = [ #pyLSV2 = ["locales/*/LC_MESSAGES/*.po", ] [tool.setuptools.dynamic] -version = {attr = "pyLSV2.__version__"} -readme = {file = ["README.md", ]} +version = { attr = "pyLSV2.__version__" } +readme = { file = ["README.md"] } [tool.black] line-length = 140 -target-version = ["py36",] +target-version = ["py36"] include = "\\.pyi?$" verbose = true @@ -88,8 +87,21 @@ property-classes = ["abc.abstractproperty"] variable-naming-style = "snake_case" [tool.pylint.classes] -defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] -exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] +defining-attr-methods = [ + "__init__", + "__new__", + "setUp", + "asyncSetUp", + "__post_init__", +] +exclude-protected = [ + "_asdict", + "_fields", + "_replace", + "_source", + "_make", + "os._exit", +] valid-classmethod-first-arg = ["cls"] valid-metaclass-classmethod-first-arg = ["mcs"] @@ -123,7 +135,13 @@ logging-format-style = "old" logging-modules = ["logging"] [tool.pylint."messages control"] -confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] +confidence = [ + "HIGH", + "CONTROL_FLOW", + "INFERENCE", + "INFERENCE_FAILURE", + "UNDEFINED", +] disable = [ "raw-checker-failed", "bad-inline-option", @@ -135,8 +153,8 @@ disable = [ "use-symbolic-message-instead", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero", - "consider-using-f-string" - ] + "consider-using-f-string", +] [tool.pylint.method_args] @@ -148,8 +166,8 @@ timeout-methods = [ "requests.api.patch", "requests.api.post", "requests.api.put", - "requests.api.request" - ] + "requests.api.request", +] [tool.pylint.miscellaneous] notes = ["FIXME", "XXX", "TODO", "ToDo"] @@ -181,14 +199,14 @@ ignored-checks-for-mixins = [ "no-member", "not-async-context-manager", "not-context-manager", - "attribute-defined-outside-init" - ] + "attribute-defined-outside-init", +] ignored-classes = [ "optparse.Values", "thread._local", "_thread._local", - "argparse.Namespace" - ] + "argparse.Namespace", +] missing-member-hint = true missing-member-hint-distance = 1 missing-member-max-choices = 1 @@ -199,4 +217,10 @@ allow-global-unused-variables = true callbacks = ["cb_", "_cb"] dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" ignored-argument-names = "_.*|^ignored_|^unused_" -redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] +redefining-builtins-modules = [ + "six.moves", + "past.builtins", + "future.builtins", + "builtins", + "io", +] From d5661e794b6f0c1f669361a64d5af87de379a7aa Mon Sep 17 00:00:00 2001 From: Max Date: Sun, 8 Oct 2023 14:55:02 +0200 Subject: [PATCH 09/11] rename demos to scripts - rename folder with scripts - rename entry points to be more consistent --- pyLSV2/{demos => scripts}/__init__.py | 0 pyLSV2/scripts/cmd.py | 192 ++++++++++++++++++ .../{demos/lsv2_demo.py => scripts/demo.py} | 49 +++++ pyLSV2/{demos => scripts}/scope2csv.py | 0 pyLSV2/{demos => scripts}/ssh_tunnel.py | 0 pyLSV2/{demos => scripts}/tab2csv.py | 0 pyproject.toml | 12 +- 7 files changed, 248 insertions(+), 5 deletions(-) rename pyLSV2/{demos => scripts}/__init__.py (100%) create mode 100644 pyLSV2/scripts/cmd.py rename pyLSV2/{demos/lsv2_demo.py => scripts/demo.py} (79%) rename pyLSV2/{demos => scripts}/scope2csv.py (100%) rename pyLSV2/{demos => scripts}/ssh_tunnel.py (100%) rename pyLSV2/{demos => scripts}/tab2csv.py (100%) diff --git a/pyLSV2/demos/__init__.py b/pyLSV2/scripts/__init__.py similarity index 100% rename from pyLSV2/demos/__init__.py rename to pyLSV2/scripts/__init__.py diff --git a/pyLSV2/scripts/cmd.py b/pyLSV2/scripts/cmd.py new file mode 100644 index 0000000..229c442 --- /dev/null +++ b/pyLSV2/scripts/cmd.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""This script lets you use some of the functions included in pyLSV2 via + the command line. +""" +import os +import sys +import logging +import argparse +import re +import socket + +import pyLSV2 + +__author__ = "drunsinn" +__license__ = "MIT" +__version__ = "1.0" +__email__ = "dr.unsinn@googlemail.com" + +REMOTE_PATH_REGEX = ( + r"^(?Plsv2(\+ssh)?):\/\/(?P[\w\.-]*)(?::(?P\d{2,5}))?(?:\/(?P(TNC|PLC):))(?P(\/[\$\.\w\d_-]+)*)\/?$" +) + + +def main(): + parser = argparse.ArgumentParser(description="command line script for functions in pyLSV2") + parser.add_argument( + "source", + help="source file. Either local path or URL with format lsv2://:/TNC:/", + type=str, + ) + parser.add_argument( + "destination", + help="destination file. Either local path or URL with format lsv2://:/TNC:/", + type=str, + ) + + log_group = parser.add_mutually_exclusive_group() + log_group.add_argument( + "-d", + "--debug", + help="enable log level DEBUG", + action="store_const", + dest="loglevel", + const=logging.DEBUG, + default=logging.WARNING, + ) + log_group.add_argument( + "-v", + "--verbose", + help="enable log level INFO", + action="store_const", + dest="loglevel", + const=logging.INFO, + default=logging.WARNING, + ) + + parser.add_argument("-t", "--timeout", help="timeout duration in seconds", type=float, default=10.0) + parser.add_argument( + "-f", + "--force", + help="replace file at target if it already exists", + action="store_true", + default=False, + ) + args = parser.parse_args() + + logging.basicConfig(level=args.loglevel) + logger = logging.getLogger("lsv2cmd") + + logger.debug("Start logging with level '%s'", logging.getLevelName(args.loglevel)) + logger.debug("Source Path: %s", args.source) + logger.debug("Destination Path: %s", args.destination) + + source_is_remote = False + dest_is_remote = False + + host_machine = "" + host_port = 19000 + use_ssh = False + + source_path = "" + dest_path = "" + + source_match = re.match(REMOTE_PATH_REGEX, args.source) + logger.debug("result of regex for source: %s", source_match) + + if source_match is not None: + source_is_remote = True + source_path = source_match.group("drive") + source_match.group("path") + host_machine = str(source_match.group("host")) + if source_match.group("port") is not None: + host_port = int(source_match.group("port")) + if "ssh" in source_match.group("prot"): + use_ssh = True + logger.info( + "Source path %s is on remote %s:%d via %s", + source_path, + host_machine, + host_port, + source_match.group("prot"), + ) + else: + source_path = args.source + logger.info("Source path %s is local", os.path.abspath(source_path)) + + dest_match = re.match(REMOTE_PATH_REGEX, args.destination) + logger.debug("result of regex for destination: %s", dest_match) + + if dest_match is not None: + dest_is_remote = True + dest_path = dest_match.group("drive") + dest_match.group("path") + if source_is_remote and host_machine != dest_match.group("host"): + logger.error( + "Can't copy between different remotes '%s' and '%s'", + host_machine, + dest_match.group("host"), + ) + sys.exit(-1) + + host_machine = str(dest_match.group("host")) + if dest_match.group("port") is not None: + host_port = int(dest_match.group("port")) + if "ssh" in dest_match.group("prot"): + use_ssh = True + logger.info( + "Destination path %s is on remote %s:%s via %s", + dest_path, + host_machine, + host_port, + dest_match.group("prot"), + ) + else: + dest_path = args.destination + logger.info("Destination path %s is local", os.path.abspath(dest_path)) + + if use_ssh: + import sshtunnel + + ssh_forwarder = sshtunnel.SSHTunnelForwarder(host_machine, remote_bind_address=("127.0.0.1", host_port)) + ssh_forwarder.start() + host_machine = "127.0.0.1" + host_port = ssh_forwarder.local_bind_port + logger.info("SSH tunnel established. local port is %d", host_port) + + try: + con = pyLSV2.LSV2(hostname=host_machine, port=host_port, timeout=args.timeout) + con.connect() + except socket.gaierror as ex: + logger.error("An Exception occurred: '%s'", ex) + logger.error("Could not resove host information: '%s'", host_machine) + sys.exit(-2) + + if source_is_remote: + file_info = con.file_info(remote_file_path=str(source_path)) + if not file_info: + logger.error("source file dose not exist on remote: '%s'", source_path) + sys.exit(-3) + elif file_info.is_directory or file_info.is_drive: + logger.error("source on remote is not file but directory: '%s'", source_path) + sys.exit(-4) + else: + if os.path.exists(source_path): + logger.debug("source file exists") + else: + if os.path.isfile(source_path): + logger.error("source file dose not exist: '%s'", source_path) + sys.exit(-5) + else: + logger.error("source folder dose not exist: '%s'", source_path) + sys.exit(-6) + + success = False + if source_is_remote and dest_is_remote: + logger.debug("Local copy on remote") + success = con.copy_remote_file(source_path=source_path, target_path=dest_path) + elif source_is_remote and not dest_is_remote: + logger.debug("copy from remote to local") + success = con.recive_file(remote_path=source_path, local_path=dest_path, override_file=args.force) + else: + logger.debug("copy from local to remote") + success = con.send_file(local_path=source_path, remote_path=dest_path, override_file=args.force) + con.disconnect() + + if success: + logger.info("File copied successful") + sys.exit(0) + sys.exit(-10) + + +if __name__ == "__main__": + main() diff --git a/pyLSV2/demos/lsv2_demo.py b/pyLSV2/scripts/demo.py similarity index 79% rename from pyLSV2/demos/lsv2_demo.py rename to pyLSV2/scripts/demo.py index 3082150..5268472 100644 --- a/pyLSV2/demos/lsv2_demo.py +++ b/pyLSV2/scripts/demo.py @@ -168,5 +168,54 @@ def comprehensive_demo(): print("# direct reading of current tool not supported for this control") +def scope_demo(): + with pyLSV2.LSV2("192.168.56.102", port=19000, timeout=5, safe_mode=False) as con: + if not con.versions.is_itnc(): + print("the scope functions only work for iTNC controls") + sys.exit(-1) + + availible_signals = con.read_scope_signals() + + # build list with selected signals + selected_signals = list() + selected_signals.append(availible_signals[0]) + selected_signals.append(availible_signals[1]) + selected_signals.append(availible_signals[2]) + + print("selected signals:") + for sig in selected_signals: + print("# %s" % sig) + + duration = 2 + interval = 6000 + + print("reading values for a duration of %d seconds with an interval of %d µs") + + # take readings: + readings_counter = 0 + + for package in con.real_time_readings(selected_signals, duration, interval): + signal_readings = package.get_data() + readings_per_signal = len(signal_readings[0].data) + print("successfully read %d signals with %d values each" % (len(signal_readings), readings_per_signal)) + + for i in range(readings_per_signal): + position_X = signal_readings[0].data[i] * signal_readings[0].factor + signal_readings[0].offset + position_Y = signal_readings[1].data[i] * signal_readings[1].factor + signal_readings[1].offset + position_Z = signal_readings[2].data[i] * signal_readings[2].factor + signal_readings[2].offset + readings_counter += 1 + + print( + "Count: %d Position X = %.3f mm, Position Y = %.3f, Position Z = %.3f" + % (readings_counter, position_X, position_Y, position_Z) + ) + + print("# a total of %d readings were taken" % readings_counter) + + print("# the signal description was updated to:") + for signal in selected_signals: + print("##", signal) + + if __name__ == "__main__": comprehensive_demo() diff --git a/pyLSV2/demos/scope2csv.py b/pyLSV2/scripts/scope2csv.py similarity index 100% rename from pyLSV2/demos/scope2csv.py rename to pyLSV2/scripts/scope2csv.py diff --git a/pyLSV2/demos/ssh_tunnel.py b/pyLSV2/scripts/ssh_tunnel.py similarity index 100% rename from pyLSV2/demos/ssh_tunnel.py rename to pyLSV2/scripts/ssh_tunnel.py diff --git a/pyLSV2/demos/tab2csv.py b/pyLSV2/scripts/tab2csv.py similarity index 100% rename from pyLSV2/demos/tab2csv.py rename to pyLSV2/scripts/tab2csv.py diff --git a/pyproject.toml b/pyproject.toml index 96bc429..e6348d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,15 +17,17 @@ dynamic = ["version", "readme"] "Bug Tracker" = "https://github.com/drunsinn/pyLSV2/issues" [project.scripts] -lsv2_demo = "pyLSV2.demos.lsv2_demo:comprehensive_demo" -lsv2_scope2csv = "pyLSV2.demos.scope2csv:main" -lsv2_tab2csv = "pyLSV2.demos.tab2csv:main" -lsv2_tunnel_demo = "pyLSV2.demos.ssh_tunnel:main" +lsv2demo = "pyLSV2.scripts.demo:comprehensive_demo" +lsv2demo_scope = "pyLSV2.scripts.demo:scope_demo" +lsv2demo_ssh = "pyLSV2.scripts.ssh_tunnel:main" +lsv2scope2csv = "pyLSV2.scripts.scope2csv:main" +lsv2tab2csv = "pyLSV2.scripts.tab2csv:main" +lsv2cmd = "pyLSV2.scripts.cmd:main" [tool.setuptools] packages = [ "pyLSV2", - "pyLSV2.demos", + "pyLSV2.scripts", "pyLSV2.locales.en.LC_MESSAGES", "pyLSV2.locales.de.LC_MESSAGES", ] From 3195f730069e2b9fd034d384ac09493a6500dcbe Mon Sep 17 00:00:00 2001 From: Max Date: Sun, 8 Oct 2023 15:37:38 +0200 Subject: [PATCH 10/11] fix error in scope demo --- pyLSV2/scripts/demo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyLSV2/scripts/demo.py b/pyLSV2/scripts/demo.py index 5268472..cb2bcfa 100644 --- a/pyLSV2/scripts/demo.py +++ b/pyLSV2/scripts/demo.py @@ -187,7 +187,7 @@ def scope_demo(): print("# %s" % sig) duration = 2 - interval = 6000 + interval = 3000 print("reading values for a duration of %d seconds with an interval of %d µs") From b2ef66debe4c51b102e9ddd9885dd91d887f92fb Mon Sep 17 00:00:00 2001 From: Max Date: Sun, 8 Oct 2023 15:38:01 +0200 Subject: [PATCH 11/11] add licens info and optional dependencies --- pyproject.toml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e6348d9..e1adf5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,6 +11,13 @@ description = "A pure Python3 implementation of the LSV2 protocol" requires-python = ">=3.6" keywords = ["LSV2", "CNC", "PLC"] dynamic = ["version", "readme"] +dependencies = [] + +[project.license] +file = "LICENSE" + +[project.optional-dependencies] +SSH = ["sshtunnel>=0.4"] [project.urls] "Homepage" = "https://github.com/drunsinn/pyLSV2" @@ -32,12 +39,6 @@ packages = [ "pyLSV2.locales.de.LC_MESSAGES", ] -#[tool.setuptools.package-data] -#pyLSV2 = ["locales/*/LC_MESSAGES/*.mo", ] - -#[tool.setuptools.exclude-package-data] -#pyLSV2 = ["locales/*/LC_MESSAGES/*.po", ] - [tool.setuptools.dynamic] version = { attr = "pyLSV2.__version__" } readme = { file = ["README.md"] }