diff --git a/.devcontainer/.gitignore b/.devcontainer/.gitignore new file mode 100644 index 00000000..2f22d22f --- /dev/null +++ b/.devcontainer/.gitignore @@ -0,0 +1 @@ +.zsh_history diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 00000000..f212d05b --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,40 @@ +# A useful base repository based on ms devcontainer but with a bunch of fixes +# and useful installs (geo tools are unnecessary for this project, but all the other tweaks are helpful) +FROM windpioneers/gdal-python:familiar-catshark-gdal-2.4.1-python-3.9-dev + +# Tell zsh where you want to store history +# We leave you to decide, but if you put this into a folder that's been mapped +# into the container, then history will persist over container rebuilds :) +# +# !!!IMPORTANT!!! +# Make sure your .zsh_history file is NOT committed into your repository, as it can contain +# sensitive information. So in this case, you should add +# .devcontainer/.zsh_history +# to your .gitignore file. +# +ENV HISTFILE="/workspaces/data-gateway/.devcontainer/.zsh_history" + +# Switch to vscode user +USER vscode +WORKDIR /workspaces/data-gateway + +# Install the rust toolchain and give permission for all users to use it +ENV RUST_INSTALL_DIR=/home/vscode/.rust +ENV RUSTUP_HOME="${RUST_INSTALL_DIR}/rustup" +ENV CARGO_HOME="${RUST_INSTALL_DIR}/cargo" +RUN mkdir -p ${RUSTUP_HOME} && \ + mkdir -p ${CARGO_HOME} +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y +ENV PATH="${CARGO_HOME}/bin:${PATH}" +RUN chmod -R ugo+rwx ${RUST_INSTALL_DIR} + +# Install poetry +RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python +ENV PATH "/home/vscode/.poetry/bin:$PATH" +RUN poetry config virtualenvs.create false + +# Install python dependencies. Note that poetry installs any root packages by default, +# But this is not available at this stage of caching dependencies. So we do a dependency-only install here +# to cache the dependencies, then a full poetry install post-create to install any root packages. +COPY pyproject.toml poetry.lock ./ +RUN poetry install --no-ansi --no-interaction --no-root diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..ff4630e8 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,84 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: +// https://github.com/microsoft/vscode-dev-containers/tree/v0.187.0/containers/python-3 +{ + "name": "Data-Gateway Devcontainer", + "build": { + "dockerfile": "Dockerfile", + "context": ".." + }, + // Set *default* container specific settings.json values on container create. + "settings": { + "austin.mode": "Wall time", + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true, + "esbonio.server.enabled": true, + "esbonio.sphinx.confDir": "${workspaceFolder}/docs/source", + "jupyter.widgetScriptSources": ["jsdelivr.com", "unpkg.com"], + "prettier.prettierPath": "/usr/local/prettier", + "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.provider": "black", + "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", + "python.languageServer": "Pylance", + "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", + "python.linting.enabled": true, + "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", + "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", + "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", + "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", + // Line length to match black settings + // Disabling specific messages: + // - To find the details do: /usr/local/py-utils/bin/pylint --list-msgs + // - Disable missing-module-docstring (C0114) because we don't document modules routinely, just their members + // - Disable invalid-name (C0103) because pylint thinks that eg 'x', 'df', 'np' are invalid due to their lengths + "python.linting.pylintArgs": [ + "--max-line-length=120", + "--disable=missing-module-docstring,invalid-name" + ], + "python.linting.pylintEnabled": true, + "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint", + "python.pythonPath": "/usr/local/bin/python", + // Scrolling the editor is a nice idea but it doesn't work: always out of sync and impossible to manage + "restructuredtext.preview.scrollEditorWithPreview": false, + "restructuredtext.preview.scrollPreviewWithEditor": false, + "restructuredtext.linter.doc8.extraArgs": ["--max-line-length 180"], + "terminal.integrated.defaultProfile.linux": "zsh" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "bungcip.better-toml", + "esbenp.prettier-vscode@2.2.1", + "irongeek.vscode-env", + "lextudio.restructuredtext", + "me-dutour-mathieu.vscode-github-actions", + "mikestead.dotenv", + "ms-python.python", + "ms-python.vscode-pylance", + "ms-toolsai.jupyter", + "ms-toolsai.jupyter-renderers", + "ms-toolsai.jupyter-keymap", + "ms-vsliveshare.vsliveshare", + "p403n1x87.austin-vscode", + "ritwickdey.liveserver", + "trond-snekvik.simple-rst" + ], + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + "forwardPorts": [80, 443, 5000, 7045, 7046, 7047, 7048, 7049, 8000, 8080], + + // Poetry install *with* the root, which can't be cached in the docker layers (see dockerfile) + "postCreateCommand": "poetry install && pre-commit install && pre-commit install -t commit-msg", + + // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode", + + // Allow ptrace based debuggers (like austin) to work in the container + "runArgs": [ + "--env-file", + "${localWorkspaceFolder}/.env", + "--cap-add=SYS_PTRACE", + "--security-opt", + "seccomp=unconfined" + ] +} diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..c94a4c01 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,9 @@ +**/.DS_Store +.coverage +.devcontainer/ +.env +.pytest_cache/ +.tox/ +.venv/ +.vscode/ +*.wg.conf diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index a36b197b..d5f90ddb 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -3,12 +3,12 @@ -## Summary +# Summary -## Quality Checklist +# Quality Checklist - [ ] New features are fully tested (No matter how much Coverage Karma you have) - [ ] **[v0.2 onward]** New features are included in the documentation diff --git a/.github/workflows/balena.yml b/.github/workflows/balena.yml new file mode 100644 index 00000000..ee20423f --- /dev/null +++ b/.github/workflows/balena.yml @@ -0,0 +1,17 @@ +name: balena + +on: + push: + branches: + - main + +jobs: + balena-push: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-node@v1.1.0 + - uses: theaccordance/balena-push@v1.0.0 + with: + api-token: ${{secrets.BALENA_API_TOKEN}} + application-name: ${{secrets.BALENA_APPLICATION_NAME}} diff --git a/.github/workflows/gcloud-deploy.yml b/.github/workflows/gcloud-deploy.yml new file mode 100644 index 00000000..ea3d686d --- /dev/null +++ b/.github/workflows/gcloud-deploy.yml @@ -0,0 +1,68 @@ +name: gcloud-deploy + +on: + push: + branches: + - main + - test + +jobs: + deploy: + permissions: + contents: "read" + id-token: "write" + runs-on: ubuntu-latest + steps: + - id: checkout + uses: actions/checkout@v2 + + - name: Get prefix (test- or nothing for production) + id: prefix + run: | + if [ "${{ github.ref }}" = "main" ]; then + echo "::set-output name=name_prefix::" + else + echo "::set-output name=name_prefix::test-" + fi + + - id: auth + uses: google-github-actions/auth@v0 + with: + workload_identity_provider: "projects/885434704038/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider" + service_account: "data-gateway-ci-testing@aerosense-twined.iam.gserviceaccount.com" + + - id: deploy-add-sensor-type + uses: google-github-actions/deploy-cloud-functions@v0 + with: + name: ${{ steps.prefix.outputs.name_prefix }}add-sensor-type + description: Allows addition of a new sensor type whose values will be accepted into the database + entry_point: add_sensor_type + runtime: python39 + region: europe-west6 + env_vars: BIG_QUERY_DATASET_NAME=${{ steps.prefix.outputs.name_prefix }}greta,COMPUTE_PROVIDER=GOOGLE_CLOUD_FUNCTION,DESTINATION_PROJECT_NAME=aerosense-twined + source_dir: cloud_functions + + - id: deploy-create-installation + uses: google-github-actions/deploy-cloud-functions@v0 + with: + name: ${{ steps.prefix.outputs.name_prefix }}create-installation + description: Allows creation of a new installation + entry_point: create_installation + runtime: python39 + region: europe-west6 + env_vars: BIG_QUERY_DATASET_NAME=${{ steps.prefix.outputs.name_prefix }}greta,COMPUTE_PROVIDER=GOOGLE_CLOUD_FUNCTION,DESTINATION_PROJECT_NAME=aerosense-twined + source_dir: cloud_functions + + - id: deploy-ingress-eu + uses: google-github-actions/deploy-cloud-functions@v0 + with: + name: ${{ steps.prefix.outputs.name_prefix }}ingress-eu + entry_point: upload_window + runtime: python39 + region: europe-west6 + memory: 1GB + env_vars: BIG_QUERY_DATASET_NAME=${{ steps.prefix.outputs.name_prefix }}greta,COMPUTE_PROVIDER=GOOGLE_CLOUD_FUNCTION,DESTINATION_PROJECT_NAME=aerosense-twined,DESTINATION_BUCKET_NAME=data-gateway-processed-data,SOURCE_PROJECT_NAME=aerosense-twined + source_dir: cloud_functions + event_trigger_type: google.storage.object.finalize + event_trigger_resource: projects/_/buckets/${{ steps.prefix.outputs.name_prefix }}aerosense-ingress-eu + event_trigger_service: storage.googleapis.com diff --git a/.gitignore b/.gitignore index 331ebf6d..6ccb592d 100644 --- a/.gitignore +++ b/.gitignore @@ -105,3 +105,9 @@ ENV/ .DS_store .pytest_cache + +# Wireguard credentials +*.wg.conf + +# VSCode local settings +.vscode \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7129af19..ed67fc61 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,8 @@ -exclude: 'build|docs|node_modules|.git|.tox|dist|docs|data_gateway.egg-info' +exclude: "build|docs|node_modules|.git|.tox|dist|docs|data_gateway.egg-info" default_stages: [commit] fail_fast: true default_language_version: - python: python3 # force all unspecified python hooks to run python3 + python: python3 # force all unspecified python hooks to run python3 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 @@ -18,10 +18,10 @@ repos: - id: isort - repo: https://github.com/psf/black - rev: 21.6b0 + rev: 22.3.0 hooks: - id: black - args: ['--line-length', '120'] + args: ["--line-length", "120"] - repo: https://gitlab.com/pycqa/flake8 rev: 3.9.2 @@ -29,14 +29,14 @@ repos: - id: flake8 language_version: python3 additional_dependencies: - - 'pep8-naming' + - "pep8-naming" args: - --ignore-names=setUp,tearDown,setUpClass,tearDownClass,asyncSetUp,asyncTearDown,setUpTestData,failureException,longMessage,maxDiff,startTestRun,stopTestRun - repo: https://github.com/pycqa/pydocstyle rev: 6.1.1 hooks: - - id: pydocstyle + - id: pydocstyle - repo: https://github.com/thclark/pre-commit-sphinx rev: 0.0.3 @@ -44,11 +44,11 @@ repos: - id: build-docs language_version: python3 additional_dependencies: - - 'poetry>=1,<2' - - 'Sphinx>=4,<5' - - 'sphinx-rtd-theme>=1,<2' - - 'sphinx-tabs>=3,<4' - - 'sphinx-autoapi==1.8.4' + - "poetry>=1,<2" + - "Sphinx>=4,<5" + - "sphinx-rtd-theme>=1,<2" + - "sphinx-tabs>=3,<4" + - "sphinx-autoapi==1.8.4" - repo: https://github.com/windpioneers/pre-commit-hooks rev: 0.0.5 @@ -56,20 +56,22 @@ repos: - id: check-branch-name language_version: python3 args: - - '^main$' - - '^development$' - - '^devops/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^doc/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^feature/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^fix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^hotfix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^review/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^refactor/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^enhancement/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' - - '^test/([a-z][a-z0-9]*)(-[a-z0-9]+)*$' + - "^main$" + - "^test$" + - "^devops/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^doc/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^feature/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^fix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^hotfix/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^review/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^refactor/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^enhancement/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" + - "^dependencies/([a-z][a-z0-9]*)(-[a-z0-9]+)*$" - - repo: https://github.com/octue/conventional-commits - rev: 0.3.1 + - repo: https://github.com/octue/pre-commit-hooks + rev: 0.6.0 hooks: - id: check-commit-message-is-conventional stages: [commit-msg] + args: + - --maximum-body-line-length=2000 diff --git a/Dockerfile.template b/Dockerfile.template new file mode 100644 index 00000000..fdf9864e --- /dev/null +++ b/Dockerfile.template @@ -0,0 +1,26 @@ +# base-image for python on any machine using a template variable, +# see more about dockerfile templates here: https://www.balena.io/docs/learn/develop/dockerfile/ +FROM balenalib/%%BALENA_MACHINE_NAME%%-debian-python:3.8-bullseye-run + +# Install git for development purposes +RUN install_packages git + +# Set our working directory +RUN mkdir -p /usr/aerosense/data-gateway +WORKDIR /usr/aerosense/data-gateway + +# Copy requirements.txt first for better cache on later pushes +COPY requirements-pi.txt requirements-pi.txt + +# Install python deps on the resin.io build server +RUN pip install -r requirements-pi.txt + +# Copy and install the project, to source the gateway CLI +COPY . ./ +RUN pip install . + +# Enable udevd so that plugged dynamic hardware devices show up in our container. +ENV UDEV=1 + +# Keep the container alive after start, so we can ssh in and use the gateway +CMD ["sleep", "infinity"] diff --git a/README.md b/README.md index 90102f23..3d3f18eb 100644 --- a/README.md +++ b/README.md @@ -8,13 +8,13 @@ Read the docs [here.](https://aerosense-data-gateway.readthedocs.io/en/latest/) +*Note that the test coverage figure is more like 90% - the recent addition of multiprocessing has made it difficult to +measure the true coverage across multiple processes.* + ## Installation and usage -To install, run one of -```shell -pip install data-gateway -``` +To install, run: ```shell -poetry add data-gateway +pip install git+https://github.com/aerosense-ai/data-gateway.git ``` The command line interface (CLI) can then be accessed via: @@ -48,28 +48,31 @@ Commands: ## Developer notes ### Installation -We're using `poetry` instead of `pip` to manage the package. In terms of developer experience, this just means there are -some slightly different commands to run than usual. `data-gateway` can still be `pip`-installed by anyone anywhere, but -dependency resolution and dependency specification for `data-gateway` developers is improved by using `poetry` locally. -#### Clone the repository +#### Poetry +We're using `poetry` instead of `pip` to manage the package to take advantage of the `poetry.lock` file [among other +useful features](https://python-poetry.org/). In terms of developer experience, this just means there are some slightly +different commands to run than usual. `data-gateway` can still be `pip`-installed by anyone anywhere, but dependency +resolution and dependency specification for `data-gateway` developers is improved by using `poetry` locally. -First, clone the repository: -```shell -export GATEWAY_VERSION="0.11.7" # Or whatever release number you aim to use, check the latest available on GitHub; -git clone https://github.com/aerosense-ai/data-gateway.git@${GATEWAY_VERSION} -``` +#### Architecture-specific installations +Due to some (most likely temporary) constraints with `poetry` and the need to run and develop the gateway on Linux, +Windows, M1 Macs, and Raspberry Pis, the need has arisen for some slightly different installation procedures on these +different architectures/platforms. Instructions are detailed below - [click here](https://github.com/aerosense-ai/data-gateway/issues/65) +to read more. -Then, change directory into the repository: +#### Clone the repository +First, clone the repository and `cd` into it: ```shell +git clone https://github.com/aerosense-ai/data-gateway.git cd data-gateway ``` -#### Install on Linux or MacOS +Then follow the instructions for your platform below. -Run the following from the repository root. -```bash -# Install poetry. +#### Install on MacOS and Linux (except on Raspberry Pi) +Run the following from the repository root: +```shell pip install poetry # Editably install data-gateway, including its development dependencies. @@ -81,32 +84,43 @@ This will editably install `data-gateway` in a `poetry`-managed virtual environm - It won't be affected by changes to other python packages you have installed on your system, making development much easier and more deterministic -Don't forget to re-activate the virtual environment each time you use a new terminal window to work in the repository. +#### Install on Raspberry Pi +Run the following from the repository root: +```shell +pip install -r requirements-pi-dev.txt +``` #### Install on Windows This workflow works for Windows using Powershell. Prerequisites: -1. Make sure to have python not installed from the [python.org](https://www.python.org/) +1. Make sure to have python not installed from [python.org](https://www.python.org/) 2. Install [pyenv-win](https://github.com/pyenv-win/pyenv-win) via pip method 3. Execute ```pip install virtualenv``` Installation: -1. Clone this repo as described above. -2. `cd data-gateway` -3. `pyenv install 3.7.0` (or higher) -4. `pyenv local 3.7.0` -5. `pyenv rehash` -6. `virtualenv venv` -7. `./venv/Scripts/activate` -8. `pip install poetry` -9. `poetry install` +```shell +pyenv install 3.7.0 # (or higher) +pyenv local 3.7.0 +pyenv rehash +virtualenv venv +./venv/Scripts/activate +pip install poetry +poetry install +``` Every time you enter the repo over powershell again, make sure to activate the venv using -``` +```shell ./venv/Scripts/activate ``` +#### Troubleshooting +If there are problems reading the serial port, try running this (or the equivalent on non-Linux platforms) and retrying: +```shell +sudo apt-get update +sudo apt-get install libhdf5-dev libhdf5-serial-dev +``` + ### Testing These environment variables need to be set to run the tests: * `GOOGLE_APPLICATION_CREDENTIALS=/absolute/path/to/service/account/file.json` diff --git a/cloud_functions/big_query.py b/cloud_functions/big_query.py index 9f74ee1b..c0eedbf9 100644 --- a/cloud_functions/big_query.py +++ b/cloud_functions/big_query.py @@ -1,10 +1,11 @@ import copy import datetime +import hashlib +import importlib.util import json import logging import uuid -from blake3 import blake3 from google.cloud import bigquery from exceptions import ( @@ -17,6 +18,20 @@ logger = logging.getLogger(__name__) +if importlib.util.find_spec("blake3"): + from blake3 import blake3 +else: + blake3 = hashlib.sha256 + logger.warning( + "The blake3 package is not available, so hashlib.sha256 is being used instead. This is probably because blake3 " + "is only required by the cloud function, where it is separately specified as a requirement. The reason blake3 " + "is not in the development or production dependencies is because it requires the rust language/bindings to be " + "available, which adds multiple unnecessary steps when installing data-gateway on Raspberry Pi. blake3 being " + "unavailable is not a problem for general development, testing, or gateway-only production, but if this warning " + "shows up in the production cloud function, it is a problem. Pip install blake3 to resume normal behaviour." + ) + + SENSOR_NAME_MAPPING = { "Mics": "microphone", "Baros_P": "barometer", @@ -50,10 +65,11 @@ def __init__(self, project_name, dataset_name): "microphone_data": f"{self.dataset_id}.microphone_data", } - def add_sensor_data(self, data, configuration_id, installation_reference, label=None): + def add_sensor_data(self, data, node_id, configuration_id, installation_reference, label=None): """Insert sensor data into the dataset for the given configuration and installation references. :param dict data: data from the sensors - the keys are the sensor names and the values are samples in the form of lists of lists + :param str node_id: :param str configuration_id: the UUID of the configuration used to produce the given data :param str installation_reference: the reference (name) of the installation that produced the data :param str|None label: an optional label relevant to the given data @@ -69,6 +85,7 @@ def add_sensor_data(self, data, configuration_id, installation_reference, label= rows.append( { "datetime": datetime.datetime.fromtimestamp(sample[0]), + "node_id": node_id, "sensor_type_reference": sensor_type_reference, "sensor_value": sample[1:], "configuration_id": configuration_id, @@ -77,17 +94,23 @@ def add_sensor_data(self, data, configuration_id, installation_reference, label= } ) - errors = self.client.insert_rows(table=self.client.get_table(self.table_names["sensor_data"]), rows=rows) + if len(rows) > 0: - if errors: - raise ValueError(errors) + errors = self.client.insert_rows(table=self.client.get_table(self.table_names["sensor_data"]), rows=rows) - logger.info("Uploaded %d samples of sensor data to BigQuery dataset %r.", len(rows), self.dataset_id) + if errors: + raise ValueError(errors) + + logger.info("Uploaded %d samples of sensor data to BigQuery dataset %r.", len(rows), self.dataset_id) + + logger.warning( + "Received 0 samples of sensor data, skipping insert of data to BigQuery dataset %r", self.dataset_id + ) def record_microphone_data_location_and_metadata( self, path, - project_name, + node_id, configuration_id, installation_reference, label=None, @@ -95,7 +118,7 @@ def record_microphone_data_location_and_metadata( """Record the file location and metadata for a window of microphone data. :param str path: the Google Cloud Storage path to the microphone data - :param str project_name: the name of the project the storage bucket belongs to + :param str node_id: :param str configuration_id: the UUID of the configuration used to produce the data :param str installation_reference: the reference for the installation that produced the data :param str|None label: the label applied to the gateway session that produced the data @@ -107,7 +130,7 @@ def record_microphone_data_location_and_metadata( rows=[ { "path": path, - "project_name": project_name, + "node_id": node_id, "configuration_id": configuration_id, "installation_reference": installation_reference, "label": label, @@ -164,14 +187,12 @@ def add_sensor_type(self, name, reference, description=None, measuring_unit=None logger.info("Added new sensor %r to BigQuery dataset %r.", reference, self.dataset_id) - def add_installation(self, reference, turbine_id, blade_id, hardware_version, sensor_coordinates, location=None): + def add_installation(self, reference, turbine_id, receiver_firmware_version, location=None): """Add a new installation to the BigQuery dataset. :param str reference: the name to give to the installation :param str turbine_id: - :param str blade_id: - :param str hardware_version: the version of the sensor hardware at this installation - :param dict sensor_coordinates: sensor name mapped to an array of (x, y, r) coordinates for each individual sensor + :param str receiver_firmware_version: the version of the receiver firmware in this installation :param str|None location: the geographical location of the installation in WKT format if relevant (it may not be if it's a wind tunnel which could be set up anywhere) :raise cloud_functions.exceptions.InstallationWithSameNameAlreadyExists: if an installation with the given name already exists :raise ValueError: if the addition fails @@ -195,9 +216,7 @@ def add_installation(self, reference, turbine_id, blade_id, hardware_version, se { "reference": reference, "turbine_id": turbine_id, - "blade_id": blade_id, - "hardware_version": hardware_version, - "sensor_coordinates": json.dumps(sensor_coordinates), + "receiver_firmware_version": receiver_firmware_version, "location": location, } ], @@ -219,16 +238,16 @@ def add_configuration(self, configuration): configuration = copy.deepcopy(configuration) # Installation data is stored in a separate column, so pop it before the next step. - installation_data = configuration.pop("installation_data") + gateway_configuration = configuration.pop("gateway") - software_configuration_json = json.dumps(configuration) - software_configuration_hash = blake3(software_configuration_json.encode()).hexdigest() + nodes_configuration_json = json.dumps(configuration["nodes"]) + nodes_configuration_hash = blake3(nodes_configuration_json.encode()).hexdigest() configuration_id = self._get_field_if_exists( table_name=self.table_names["configuration"], field_name="id", - comparison_field_name="software_configuration_hash", - value=software_configuration_hash, + comparison_field_name="nodes_configuration_hash", + value=nodes_configuration_hash, ) if configuration_id: @@ -238,18 +257,18 @@ def add_configuration(self, configuration): ) configuration_id = str(uuid.uuid4()) - installation_data_json = json.dumps(installation_data) - installation_data_hash = blake3(installation_data_json.encode()).hexdigest() + gateway_configuration_json = json.dumps(gateway_configuration) + gateway_configuration_hash = blake3(gateway_configuration_json.encode()).hexdigest() errors = self.client.insert_rows( table=self.client.get_table(self.table_names["configuration"]), rows=[ { "id": configuration_id, - "software_configuration": software_configuration_json, - "software_configuration_hash": software_configuration_hash, - "installation_data": installation_data_json, - "installation_data_hash": installation_data_hash, + "nodes_configuration": nodes_configuration_json, + "nodes_configuration_hash": nodes_configuration_hash, + "gateway_configuration": gateway_configuration_json, + "gateway_configuration_hash": gateway_configuration_hash, } ], ) diff --git a/cloud_functions/forms.py b/cloud_functions/forms.py index 9baf85d4..587402c7 100644 --- a/cloud_functions/forms.py +++ b/cloud_functions/forms.py @@ -20,9 +20,7 @@ def __call__(self, form, field): class CreateInstallationForm(FlaskForm): reference = StringField("Reference", [validators.DataRequired(), SlugifiedValidator()]) turbine_id = StringField("Turbine ID", [validators.DataRequired()]) - blade_id = StringField("Blade ID", [validators.DataRequired()]) - hardware_version = StringField("Hardware version", [validators.DataRequired()]) - sensor_coordinates = StringField("Sensor coordinates", [validators.DataRequired()]) + receiver_firmware_version = StringField("Receiver firmware version", [validators.DataRequired()]) longitude = FloatField("Longitude", [validators.Optional()]) latitude = FloatField("Latitude", [validators.Optional()]) diff --git a/cloud_functions/main.py b/cloud_functions/main.py index 41c18add..87411150 100644 --- a/cloud_functions/main.py +++ b/cloud_functions/main.py @@ -35,13 +35,14 @@ def upload_window(event, context): ) window, window_metadata = window_handler.get_window() - - unix_timestamped_window = window_handler.convert_window_timestamps_to_unix_time(window) - window_handler.persist_window(unix_timestamped_window["sensor_data"], window_metadata) + window_handler.persist_window(window, window_metadata) def add_sensor_type(request): - """Add a new sensor type to the BigQuery dataset. This is the entrypoint for the `add-sensor-type` cloud function.""" + """Add a new sensor type to the BigQuery dataset. This is the entrypoint for the `add-sensor-type` cloud function. + + :return (dict, int): + """ form = AddSensorTypeForm(meta={"csrf": False}) if request.method != "POST": @@ -87,6 +88,8 @@ def add_sensor_type(request): def create_installation(request): """Create a new installation in the BigQuery dataset. This is the entrypoint for the `create-installation` cloud function. + + :return (dict, int): """ form = CreateInstallationForm(meta={"csrf": False}) @@ -108,9 +111,7 @@ def create_installation(request): dataset.add_installation( reference=form.reference.data, turbine_id=form.turbine_id.data, - blade_id=form.blade_id.data, - hardware_version=form.hardware_version.data, - sensor_coordinates=form.sensor_coordinates.data, + receiver_firmware_version=form.receiver_firmware_version.data, location=location, ) diff --git a/cloud_functions/requirements.txt b/cloud_functions/requirements.txt index ebff4bd6..c77c9595 100644 --- a/cloud_functions/requirements.txt +++ b/cloud_functions/requirements.txt @@ -1,9 +1,7 @@ blake3==0.2.1 flask_wtf>=1,<2 google-cloud-bigquery>=2.29,<3 -octue[hdf5]==0.16.0 -pandas>=1.2,<3 +octue[hdf5]==0.27.3 python-slugify>=5,<6 -scipy>=1.6,<2 shapely>=1.8,<2 wtforms[email]>=3,<4 diff --git a/cloud_functions/window_handler.py b/cloud_functions/window_handler.py index aafe1858..86edf7b1 100644 --- a/cloud_functions/window_handler.py +++ b/cloud_functions/window_handler.py @@ -58,6 +58,7 @@ def get_window(self): logger.info("Downloaded window %r.", self.window_cloud_path) cloud_metadata = self.source_client.get_metadata(self.window_cloud_path) + logger.info("Custom metadata (logged for debugging upload race condition): %s", cloud_metadata) window_metadata = cloud_metadata["custom_metadata"]["data_gateway__configuration"] logger.info("Downloaded metadata for window %r.", self.window_cloud_path) @@ -70,43 +71,39 @@ def persist_window(self, window, window_metadata): :param dict window_metadata: useful metadata about how the data was produced (currently the configuration the data gateway used to read it from the sensors) :return None: """ - session_data = window_metadata.pop("session_data") + session_data = window_metadata.pop("session") try: configuration_id = self.dataset.add_configuration(window_metadata) except ConfigurationAlreadyExists as e: configuration_id = e.args[1] - if MICROPHONE_SENSOR_NAME in window: - self._store_microphone_data( - data=window.pop(MICROPHONE_SENSOR_NAME), + for node_id, node_data in window.items(): + if MICROPHONE_SENSOR_NAME in node_data: + self._store_microphone_data( + data=node_data.pop(MICROPHONE_SENSOR_NAME), + node_id=node_id, + configuration_id=configuration_id, + installation_reference=window_metadata["gateway"]["installation_reference"], + label=session_data.get("label"), + ) + + self.dataset.add_sensor_data( + data=node_data, + node_id=node_id, configuration_id=configuration_id, - installation_reference=window_metadata["installation_data"]["installation_reference"], + installation_reference=window_metadata["gateway"]["installation_reference"], label=session_data.get("label"), ) - self.dataset.add_sensor_data( - data=window, - configuration_id=configuration_id, - installation_reference=window_metadata["installation_data"]["installation_reference"], - label=session_data.get("label"), - ) - logger.info("Uploaded window to BigQuery dataset %r.", self.destination_big_query_dataset) - def convert_window_timestamps_to_unix_time(self, window): - """Use sensor_time_offset, to convert sensor node internal clock timestamps into UNIX time for the window samples""" - for sensor in window["sensor_data"].keys(): - for sample in window["sensor_data"][sensor]: - sample[0] += window["sensor_time_offset"] - - return window - - def _store_microphone_data(self, data, configuration_id, installation_reference, label): + def _store_microphone_data(self, data, node_id, configuration_id, installation_reference, label): """Store microphone data as an HDF5 file in the destination cloud storage bucket and record its location and metadata in a BigQuery table. :param list(list) data: + :param str node_id: :param str configuration_id: :param str installation_reference: :param str label: @@ -120,19 +117,25 @@ def _store_microphone_data(self, data, configuration_id, installation_reference, else: labels = None - with Datafile( + microphone_file = Datafile( path=storage.path.generate_gs_path(self.destination_bucket, "microphone", upload_path), - tags={"configuration_id": configuration_id, "installation_reference": installation_reference}, + tags={ + "node_id": node_id, + "configuration_id": configuration_id, + "installation_reference": installation_reference, + }, labels=labels, - mode="w", - ) as (datafile, f): + hypothetical=True, + ) + + with microphone_file.open("w") as f: f["dataset"] = data - logger.info(f"Uploaded {len(data)} microphone data entries to {datafile.cloud_path!r}.") + logger.info(f"Uploaded {len(data)} microphone data entries to {microphone_file.cloud_path!r}.") self.dataset.record_microphone_data_location_and_metadata( - path=datafile.cloud_path, - project_name=self.destination_project, + path=microphone_file.cloud_path, + node_id=node_id, configuration_id=configuration_id, installation_reference=installation_reference, label=label, diff --git a/cloudbuild.yaml b/cloudbuild.yaml deleted file mode 100644 index c07ca3af..00000000 --- a/cloudbuild.yaml +++ /dev/null @@ -1,66 +0,0 @@ -steps: -- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' - id: Deploy test-ingress-eu cloud function - args: - - gcloud - - functions - - deploy - - test-ingress-eu - - --source=cloud_functions - - --entry-point=upload_window - - --runtime=python38 - - --trigger-resource=test-aerosense-ingress-eu - - --trigger-event=google.storage.object.finalize - - --memory=1GB - - --region=europe-west6 - - --set-env-vars=SOURCE_PROJECT_NAME=aerosense-twined,DESTINATION_PROJECT_NAME=aerosense-twined,DESTINATION_BUCKET_NAME=test-data-gateway-processed-data,BIG_QUERY_DATASET_NAME=test_greta - - --timeout=540 - -- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' - id: Deploy create-installation cloud function - args: - - gcloud - - functions - - deploy - - create-installation - - --source=cloud_functions - - --entry-point=create_installation - - --runtime=python39 - - --trigger-http - - --security-level=secure-always - - --region=europe-west6 - - --set-env-vars=DESTINATION_PROJECT_NAME=aerosense-twined,BIG_QUERY_DATASET_NAME=greta - -- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' - id: Deploy add-sensor-type cloud function - args: - - gcloud - - functions - - deploy - - add-sensor-type - - --source=cloud_functions - - --entry-point=add_sensor_type - - --runtime=python39 - - --trigger-http - - --security-level=secure-always - - --region=europe-west6 - - --set-env-vars=DESTINATION_PROJECT_NAME=aerosense-twined,BIG_QUERY_DATASET_NAME=greta - -- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' - id: Deploy ingress-eu cloud function - args: - - gcloud - - functions - - deploy - - ingress-eu - - --source=cloud_functions - - --entry-point=upload_window - - --runtime=python38 - - --trigger-resource=aerosense-ingress-eu - - --trigger-event=google.storage.object.finalize - - --memory=1GB - - --region=europe-west6 - - --set-env-vars=SOURCE_PROJECT_NAME=aerosense-twined,DESTINATION_PROJECT_NAME=aerosense-twined,DESTINATION_BUCKET_NAME=data-gateway-processed-data,BIG_QUERY_DATASET_NAME=greta - - --timeout=540 - -timeout: 1200s diff --git a/data_gateway/__init__.py b/data_gateway/__init__.py index 92a600dc..04715231 100644 --- a/data_gateway/__init__.py +++ b/data_gateway/__init__.py @@ -2,7 +2,6 @@ __all__ = ("exceptions",) -MICROPHONE_SENSOR_NAME = "Mics" def stop_gateway(logger, stop_signal): diff --git a/data_gateway/cli.py b/data_gateway/cli.py index 9a4e7717..b541b531 100644 --- a/data_gateway/cli.py +++ b/data_gateway/cli.py @@ -1,4 +1,5 @@ import multiprocessing +import os import click import pkg_resources @@ -63,14 +64,21 @@ def gateway_cli(logger_uri, log_level): type=click.Path(dir_okay=False), default="config.json", show_default=True, - help="Path to your Aerosense deployment configuration JSON file.", + help="Path to your Aerosense deployment configuration JSON file. This value is overridden by the environment variable GATEWAY_CONFIG_FILE if set", ) @click.option( "--routine-file", type=click.Path(dir_okay=False), default="routine.json", show_default=True, - help="Path to sensor command routine JSON file.", + help="Path to sensor command routine JSON file. This value is overridden by the environment variable GATEWAY_ROUTINE_FILE if set", +) +@click.option( + "--stop-routine-file", + type=click.Path(dir_okay=False), + default="stop_routine.json", + show_default=True, + help="Path to sensor command routine JSON file to be executed on exit of the gateway loop (i.e. a routine which will shut down the sensors after running the gateway). This value is overridden by the environment variable GATEWAY_ROUTINE_FILE if set", ) @click.option( "--save-locally", "-l", is_flag=True, default=False, show_default=True, help="Save output JSON data to disk." @@ -96,7 +104,7 @@ def gateway_cli(logger_uri, log_level): type=click.Path(file_okay=False), default="data_gateway", show_default=True, - help="The directory in which to save data windows from the gateway.", + help="The directory in which to save data windows from the gateway. This value is overridden by the environment variable GATEWAY_OUTPUT_DIR if set", ) @click.option( "--window-size", @@ -108,9 +116,9 @@ def gateway_cli(logger_uri, log_level): @click.option( "--gcp-bucket-name", type=click.STRING, - default=None, + default="aerosense-ingress-eu", show_default=True, - help="The name of the Google Cloud Platform (GCP) storage bucket to use.", + help="The name of the Google Cloud Platform (GCP) storage bucket to use. This value is overridden by the environment variable GATEWAY_GCP_BUCKET_NAME if set.", ) @click.option( "--label", @@ -142,6 +150,7 @@ def start( serial_port, config_file, routine_file, + stop_routine_file, save_locally, no_upload_to_cloud, interactive, @@ -160,16 +169,24 @@ def start( """ from data_gateway.data_gateway import DataGateway + # Allow override of defaults from the environment + overridden_config_file = os.environ.get("GATEWAY_CONFIG_FILE", None) or config_file + overridden_output_dir = os.environ.get("GATEWAY_OUTPUT_DIR", None) or output_dir + overridden_routine_file = os.environ.get("GATEWAY_ROUTINE_FILE", None) or routine_file + overridden_stop_routine_file = os.environ.get("GATEWAY_STOP_ROUTINE_FILE", None) or stop_routine_file + overridden_gcp_bucket_name = os.environ.get("GATEWAY_GCP_BUCKET_NAME", None) or gcp_bucket_name + data_gateway = DataGateway( serial_port=serial_port, - configuration_path=config_file, - routine_path=routine_file, + configuration_path=overridden_config_file, + routine_path=overridden_routine_file, + stop_routine_path=overridden_stop_routine_file, save_locally=save_locally, upload_to_cloud=not no_upload_to_cloud, interactive=interactive, - output_directory=output_dir, + output_directory=overridden_output_dir, window_size=window_size, - bucket_name=gcp_bucket_name, + bucket_name=overridden_gcp_bucket_name, label=label, save_csv_files=save_csv_files, use_dummy_serial_port=use_dummy_serial_port, @@ -189,17 +206,15 @@ def start( ) def create_installation(config_file): """Create an installation representing a collection of sensors that data can be collected from. The installation - information is read from the "installation_data" field of `configuration.json`. + information is read from the "gateway" field of `configuration.json`. """ import json - from data_gateway.exceptions import WrongNumberOfSensorCoordinatesError - with open(config_file or "configuration.json") as f: configuration = json.load(f) - installation_data = configuration["installation_data"] - slugified_reference = slugify(installation_data["installation_reference"]) + gateway_configuration = configuration["gateway"] + slugified_reference = slugify(gateway_configuration["installation_reference"]) while True: user_confirmation = input(f"Create installation with reference {slugified_reference!r}? [Y/n]\n") @@ -210,31 +225,19 @@ def create_installation(config_file): if user_confirmation.upper() in {"Y", ""}: break - for sensor, coordinates in installation_data["sensor_coordinates"].items(): - number_of_sensors = configuration["number_of_sensors"][sensor] - - if len(coordinates) != number_of_sensors: - raise WrongNumberOfSensorCoordinatesError( - f"In the configuration file, the number of sensors for the {sensor!r} sensor type is " - f"{number_of_sensors} but coordinates were given for {len(coordinates)} sensors - these numbers must " - f"match." - ) - # Required parameters: parameters = { "reference": slugified_reference, - "turbine_id": installation_data["turbine_id"], - "blade_id": installation_data["blade_id"], - "hardware_version": installation_data["hardware_version"], - "sensor_coordinates": json.dumps(installation_data["sensor_coordinates"]), + "turbine_id": gateway_configuration["turbine_id"], + "receiver_firmware_version": gateway_configuration["receiver_firmware_version"], } # Optional parameters: - if installation_data.get("longitude"): - parameters["longitude"] = installation_data["longitude"] + if gateway_configuration.get("longitude"): + parameters["longitude"] = gateway_configuration["longitude"] - if installation_data.get("latitude"): - parameters["latitude"] = installation_data["latitude"] + if gateway_configuration.get("latitude"): + parameters["latitude"] = gateway_configuration["latitude"] print("Creating...") @@ -301,23 +304,14 @@ def add_sensor_type(name, description, measuring_unit, metadata): @gateway_cli.command() -@click.option( - "--config-file", - type=click.Path(), - default="config.json", - show_default=True, - help="Path to your Aerosense deployment configuration file.", -) -def supervisord_conf(config_file): +def supervisord_conf(): """Print conf entry for use with supervisord. Daemonising a process ensures it automatically restarts after a failure and on startup of the operating system failure. """ - import os - supervisord_conf_str = f""" [program:{SUPERVISORD_PROGRAM_NAME,}] -command=gateway start --config-file {os.path.abspath(config_file)}""" +command=gateway start""" print(supervisord_conf_str) return 0 diff --git a/data_gateway/configuration.py b/data_gateway/configuration.py index dbf795ac..fb19c663 100644 --- a/data_gateway/configuration.py +++ b/data_gateway/configuration.py @@ -1,188 +1,297 @@ -from data_gateway import MICROPHONE_SENSOR_NAME +import copy +from data_gateway.exceptions import WrongNumberOfSensorCoordinatesError -class Configuration: - """A data class containing the configuration values for the firmware and hardware used by the data gateway. - TODO Refactor configuration: make each sensor a nested dict, with all the sensor parameters realtive to it. - :param float mics_freq: microphones sampling frequency - :param float mics_bm: TODO - :param float baros_freq: barometers sampling frequency - :param float baros_bm: TODO +BASE_STATION_ID = "base-station" + +DEFAULT_SENSOR_NAMES = [ + "Mics", + "Baros_P", + "Baros_T", + "Diff_Baros", + "Acc", + "Gyro", + "Mag", + "Analog Vbat", + "Constat", +] + +DEFAULT_DEFAULT_HANDLES = { + "34": "Abs. baros", + "36": "Diff. baros", + "38": "Mic 0", + "40": "Mic 1", + "42": "IMU Accel", + "44": "IMU Gyro", + "46": "IMU Magnetometer", + "48": "Analog1", + "50": "Analog2", + "52": "Constat", + "54": "Cmd Decline", + "56": "Sleep State", + "58": "Remote Info Message", + "60": "Timestamp Packet 0", + "62": "Timestamp Packet 1", + "64": "Local Info Message", +} + +DEFAULT_DECLINE_REASONS = { + "0": "Bad block detection ongoing", + "1": "Task already registered, cannot register again", + "2": "Task is not registered, cannot de-register", + "3": "Connection parameter update unfinished", + "4": "Not ready to sleep", + "5": "Not in sleep", +} + +DEFAULT_SLEEP_STATES = {"0": "Exiting sleep", "1": "Entering sleep"} + +DEFAULT_REMOTE_INFO_TYPES = {"0": "Battery info"} + +DEFAULT_LOCAL_INFO_TYPES = { + "0": "Synchronization not ready as not every sensor node is connected", + "1": "Time synchronization info", + "2": "Time sync exception", + "4": "Time sync coarse data record error", + "8": "Time sync alignment error", + "16": "Time sync coarse data time diff error", + "32": "Device not connected", + "64": "Select message destination successful", + "128": "Time sync success", + "129": "Coarse sync finish", + "130": "Time sync msg sent", +} + +DEFAULT_SAMPLES_PER_PACKET = { + "Mics": 8, + "Diff_Baros": 24, + "Baros_P": 1, + "Baros_T": 1, + "Acc": 40, # IMU, int(240 / 2 / 3) + "Gyro": 40, # IMU, int(240 / 2 / 3) + "Mag": 40, # IMU, int(240 / 2 / 3) + "Analog Vbat": 60, + "Constat": 24, +} + +DEFAULT_SENSOR_CONVERSION_CONSTANTS = { + "Mics": 1, + "Diff_Baros": 1, + "Baros_P": 40.96, + "Baros_T": 100, + "Acc": 1, + "Gyro": 1, + "Mag": 1, + "Analog Vbat": 1, + "Constat": 1, +} + +DEFAULT_SENSOR_COMMANDS = { + "start": ["startBaros", "startDiffBaros", "startIMU", "startMics"], + "stop": ["stopBaros", "stopDiffBaros", "stopIMU", "stopMics"], + "configuration": ["configBaros", "configAccel", "configGyro", "configMics"], + "utilities": [ + "getBattery", + "setConnInterval", + "tpcBoostIncrease", + "tpcBoostDecrease", + "tpcBoostHeapMemThr1", + "tpcBoostHeapMemThr2", + "tpcBoostHeapMemThr4", + ], +} + +DEFAULT_NUMBER_OF_SENSORS = { + "Mics": 10, + "Baros_P": 40, + "Baros_T": 40, + "Diff_Baros": 5, + "Acc": 3, + "Gyro": 3, + "Mag": 3, + "Analog Vbat": 2, + "Constat": 4, +} + +DEFAULT_SESSION = { + "label": None, +} + + +class GatewayConfiguration: + """A data class containing configured/default values for the gateway receiver + + :param float baudrate: serial port baud rate + :param Literal["little", "big"] endian: one of "little" or "big" + :param str installation_reference: A unique reference (id) for the current installation + :param float latitude: The latitude of the turbine in WGS84 coordinate system + :param float longitude: The longitude of the turbine in WGS84 coordinate system + :param int packet_key: The prefix value for packets received from the base station (i.e. not from a node) + :param int packet_key_offset: The base prefix value for packets received from nodes (node_packet_key = node_id + packet_key_offset) + :param str receiver_firmware_version: The version of the firmware running on the gateway receiver, if known. + :param int serial_buffer_rx_size: serial receiving buffer size in bytes + :param int serial_buffer_tx_size: serial transmitting buffer size in bytes + :param str turbine_id: A unique id for the turbine on which this is installed + :return None: + """ + + def __init__( + self, + baudrate=2300000, + endian="little", + installation_reference="unknown", + latitude=0, + longitude=0, + packet_key=254, + packet_key_offset=245, + receiver_firmware_version="unknown", + serial_buffer_rx_size=4095, + serial_buffer_tx_size=1280, + turbine_id="unknown", + ): + self.baudrate = baudrate + self.endian = endian + self.installation_reference = installation_reference + self.latitude = latitude + self.longitude = longitude + self.serial_buffer_rx_size = serial_buffer_rx_size + self.serial_buffer_tx_size = serial_buffer_tx_size + self.turbine_id = turbine_id + self.receiver_firmware_version = receiver_firmware_version + self.packet_key = packet_key + self.packet_key_offset = packet_key_offset + + def to_dict(self): + """Convert the configuration to a dictionary. + + :return dict: + """ + return vars(self) + + +class NodeConfiguration: + """A data class containing configured/default values for a sensor node + :param float acc_freq: accelerometers sampling frequency - :param float acc_range: TODO - :param float gyro_freq: gyrometers sampling frequency - :param float gyro_range: TODO + :param float acc_range: TODO nobody seems to know... :param float analog_freq: analog sensors sampling frequency + :param float baros_bm: TODO nobody seems to know... + :param float baros_freq: barometers sampling frequency + :param str blade_id: The id of the blade on which the node is mounted, if known :param float constat_period: period of incoming connection statistic parameters in ms - :param int serial_buffer_rx_size: serial receiving buffer size in bytes - :param int serial_buffer_tx_size: serial transmitting buffer size in bytes - :param float baudrate: serial port baud rate - :param Literal["little", "big"] endian: one of "little" or "big" + :param dict|None decline_reason: + :param float diff_baros_freq: differential barometers sampling frequency + :param dict|None default_handles: Map of the default handles which a node will use to communicate packet type (the expected contents of packet payload). These are defaults, as they may be altered on the fly by a node. + :param float gyro_freq: gyrometers sampling frequency + :param float gyro_range: TODO nobody seems to know... + :param dict|None local_info_type: A map of labels to the type of information received from base station + :param float mag_freq: + :param float mics_freq: microphones sampling frequency + :param float mics_bm: TODO nobody seems to know... :param float max_timestamp_slack: TODO # 5ms :param float max_period_drift: TODO # 2% difference between IMU clock and CPU clock allowed - :param int packet_key: TODO - :param int type_handle_def: TODO - :param int mics_samples_per_packet: number of samples per packet from microphones - :param int imu_samples_per_packet: TODO - :param int analog_samples_per_packet: number of samples per packet from analog sensors - :param int baros_samples_per_packet: number of samples per packet from barometers - :param int constat_samples_per_packet: number of samples per packet from connection statistics - :param list sensor_names: Sensors present on the measurement node - :param dict|None default_handles: Each handle identifies packet type, i.e. expected contents of packet payload - :param dict|None samples_per_packet: TODO - :param dict|None number_of_sensors: TODO - :param dict|None period: + :param str node_firmware_version: The verison of the firmware on the node, if known. + :param dict|None number_of_sensors: A map for each sensor, giving the number of samples expected from that sensor + :param dict|None remote_info_type: A map of labels to the type of information received from remote node + :param dict|None samples_per_packet: A map for each sensor, giving the number of samples sent in a packet from that sensor :param dict|None sensor_commands: - :param dict|None installation_data: metadata about the current session of the gateway provided by the user + :param dict|None sensor_conversion_constants: + :param dict|None sensor_coordinates: + :param list|None sensor_names: List of sensors present on the measurement node + :param dict|None sleep_state: + :param int type_handle_def: TODO :return None: """ def __init__( self, - mics_freq=15625, - mics_bm=0x3FF, - baros_freq=100, - diff_baros_freq=1000, - baros_bm=0x3FF, acc_freq=100, acc_range=16, + analog_freq=16384, + baros_bm=0x3FF, + baros_freq=100, + blade_id="unknown", + constat_period=45, + decline_reason=None, + diff_baros_freq=1000, + default_handles=None, gyro_freq=100, gyro_range=2000, + local_info_type=None, mag_freq=12.5, - analog_freq=16384, - constat_period=45, # period in ms - serial_buffer_rx_size=100000, - serial_buffer_tx_size=1280, - baudrate=2300000, - endian="little", + mics_freq=15625, + mics_bm=0x3FF, max_timestamp_slack=5e-3, max_period_drift=0.02, - packet_key=0xFE, - type_handle_def=0xFF, - mics_samples_per_packet=8, - baros_samples_per_packet=1, - diff_baros_samples_per_packet=24, - imu_samples_per_packet=int(240 / 2 / 3), - analog_samples_per_packet=60, - constat_samples_per_packet=24, - sensor_names=None, - default_handles=None, - decline_reason=None, - sleep_state=None, - info_type=None, - samples_per_packet=None, + node_firmware_version="unknown", number_of_sensors=None, - sensor_conversion_constants=None, - period=None, + remote_info_type=None, + samples_per_packet=None, sensor_commands=None, - installation_data=None, - session_data=None, + sensor_conversion_constants=None, + sensor_coordinates=None, + sensor_names=None, + sleep_state=None, + type_handle_def=0xFF, ): - self.mics_freq = mics_freq - self.mics_bm = mics_bm - self.baros_freq = baros_freq - self.diff_baros_freq = diff_baros_freq - self.baros_bm = baros_bm + # Set kwargs as attributes directly self.acc_freq = acc_freq self.acc_range = acc_range + self.analog_freq = analog_freq + self.baros_bm = baros_bm + self.baros_freq = baros_freq + self.blade_id = blade_id + self.constat_period = constat_period + self.diff_baros_freq = diff_baros_freq self.gyro_freq = gyro_freq self.gyro_range = gyro_range self.mag_freq = mag_freq - self.analog_freq = analog_freq - self.constat_period = constat_period - self.serial_buffer_rx_size = serial_buffer_rx_size - self.serial_buffer_tx_size = serial_buffer_tx_size - self.baudrate = baudrate - self.endian = endian self.max_timestamp_slack = max_timestamp_slack self.max_period_drift = max_period_drift - self.packet_key = packet_key + self.mics_bm = mics_bm + self.mics_freq = mics_freq + self.node_firmware_version = node_firmware_version self.type_handle_def = type_handle_def - self.mics_samples_per_packet = mics_samples_per_packet - self.imu_samples_per_packet = imu_samples_per_packet - self.analog_samples_per_packet = analog_samples_per_packet - self.baros_samples_per_packet = baros_samples_per_packet - self.diff_baros_samples_per_packet = diff_baros_samples_per_packet - self.constat_samples_per_packet = constat_samples_per_packet - - self.sensor_names = sensor_names or [ - MICROPHONE_SENSOR_NAME, - "Baros_P", - "Baros_T", - "Diff_Baros", - "Acc", - "Gyro", - "Mag", - "Analog Vbat", - "Constat", - ] - - self.default_handles = default_handles or { - "34": "Abs. baros", - "36": "Diff. baros", - "38": "Mic 0", - "40": "Mic 1", - "42": "IMU Accel", - "44": "IMU Gyro", - "46": "IMU Magnetometer", - "48": "Analog1", - "50": "Analog2", - "52": "Constat", - "54": "Cmd Decline", - "56": "Sleep State", - "58": "Info Message", - } - self.decline_reason = decline_reason or { - "0": "Bad block detection ongoing", - "1": "Task already registered, cannot register again", - "2": "Task is not registered, cannot de-register", - "3": "Connection Parameter update unfinished", - } + # Set default dictionaries + self.decline_reason = decline_reason or DEFAULT_DECLINE_REASONS + self.default_handles = default_handles or DEFAULT_DEFAULT_HANDLES + self.remote_info_type = remote_info_type or DEFAULT_REMOTE_INFO_TYPES + self.local_info_type = local_info_type or DEFAULT_LOCAL_INFO_TYPES + self.number_of_sensors = number_of_sensors or DEFAULT_NUMBER_OF_SENSORS + self.samples_per_packet = samples_per_packet or DEFAULT_SAMPLES_PER_PACKET + self.sensor_commands = sensor_commands or DEFAULT_SENSOR_COMMANDS + self.sensor_conversion_constants = sensor_conversion_constants or DEFAULT_SENSOR_CONVERSION_CONSTANTS + self.sensor_names = sensor_names or DEFAULT_SENSOR_NAMES + self.sleep_state = sleep_state or DEFAULT_SLEEP_STATES - self.sleep_state = sleep_state or {"0": "Exiting sleep", "1": "Entering sleep"} + # Set calculated defaults + self.sensor_coordinates = sensor_coordinates or self._get_default_sensor_coordinates() - self.info_type = info_type or {"0": "Battery info"} + for sensor, coordinates in self.sensor_coordinates.items(): + number_of_sensors = self.number_of_sensors[sensor] - self.samples_per_packet = samples_per_packet or { - MICROPHONE_SENSOR_NAME: self.mics_samples_per_packet, - "Diff_Baros": self.diff_baros_samples_per_packet, - "Baros_P": self.baros_samples_per_packet, - "Baros_T": self.baros_samples_per_packet, - "Acc": self.imu_samples_per_packet, - "Gyro": self.imu_samples_per_packet, - "Mag": self.imu_samples_per_packet, - "Analog Vbat": self.analog_samples_per_packet, - "Constat": self.constat_samples_per_packet, - } + if len(coordinates) != number_of_sensors: + raise WrongNumberOfSensorCoordinatesError( + f"The number of sensors for the {sensor!r} sensor type is {number_of_sensors} but coordinates " + f"were only given for {len(coordinates)} sensors. Coordinates must be given for every sensor." + ) - self.number_of_sensors = number_of_sensors or { - MICROPHONE_SENSOR_NAME: 10, - "Baros_P": 40, - "Baros_T": 40, - "Diff_Baros": 5, - "Acc": 3, - "Gyro": 3, - "Mag": 3, - "Analog Vbat": 1, - "Constat": 4, - } + # Ensure conversion constants are consistent + self._expand_sensor_conversion_constants() - self.sensor_conversion_constants = sensor_conversion_constants or { - MICROPHONE_SENSOR_NAME: [1] * self.number_of_sensors[MICROPHONE_SENSOR_NAME], - "Diff_Baros": [1] * self.number_of_sensors["Diff_Baros"], - "Baros_P": [40.96] * self.number_of_sensors["Baros_P"], - "Baros_T": [100] * self.number_of_sensors["Baros_T"], - "Acc": [1] * self.number_of_sensors["Acc"], - "Gyro": [1] * self.number_of_sensors["Gyro"], - "Mag": [1] * self.number_of_sensors["Mag"], - "Analog Vbat": [1] * self.number_of_sensors["Analog Vbat"], - "Constat": [1] * self.number_of_sensors["Constat"], - } + # Validate the final configuration + self._check() - self.period = period or { - MICROPHONE_SENSOR_NAME: 1 / self.mics_freq, + @property + def periods(self): + """Get the period in seconds for each sensor (computed from the sensor frequencies). + + :return dict: + """ + return { + "Mics": 1 / self.mics_freq, "Baros_P": 1 / self.baros_freq, "Baros_T": 1 / self.baros_freq, "Diff_Baros": 1 / self.diff_baros_freq, @@ -193,48 +302,140 @@ def __init__( "Constat": self.constat_period / 1000, } - self.sensor_commands = sensor_commands or { - "start": ["startBaros", "startDiffBaros", "startIMU", "startMics"], - "stop": ["stopBaros", "stopDiffBaros", "stopIMU", "stopMics"], - "configuration": ["configBaros", "configAccel", "configGyro", "configMics"], - "utilities": [ - "getBattery", - "setConnInterval", - "tpcBoostIncrease", - "tpcBoostDecrease", - "tpcBoostHeapMemThr1", - "tpcBoostHeapMemThr2", - "tpcBoostHeapMemThr4", - ], - } + def to_dict(self): + """Serialise the configuration to a dictionary. - self.installation_data = installation_data or { - "installation_reference": None, - "longitude": None, - "latitude": None, - "turbine_id": None, - "blade_id": None, - "hardware_version": None, - "sensor_coordinates": { - sensor_name: [(0, 0, 0)] * number_of_sensors - for sensor_name, number_of_sensors in self.number_of_sensors.items() - }, - } + :return dict: + """ + return {**vars(self), "periods": self.periods} - self.session_data = session_data or { - "label": None, + def _get_default_sensor_coordinates(self): + return { + sensor_name: [(0, 0, 0)] * number_of_sensors + for sensor_name, number_of_sensors in self.number_of_sensors.items() } + def _check(self): + """Serialise self to JSON then make sure it matches a schema""" + # NOT IMPLEMENTED YET + # See https://github.com/aerosense-ai/data-gateway/issues/18 + + def _expand_sensor_conversion_constants(self): + """Expand the sensor conversion constant to arrays of the correct size + + This means that sensor conversion constants can be given as single values rather than lists. + Passing a full list of adjusted values is still possible, to calibrate individual samples. + """ + unconverted = self.sensor_conversion_constants + converted = dict() + for key, value in unconverted.items(): + number_of_samples = self.number_of_sensors[key] + if isinstance(value, (int, float)): + converted[key] = [value] * number_of_samples + elif isinstance(value, list): + if len(value) != number_of_samples: + raise ValueError( + f"If you give a list of conversion constants for {key}, it must be the same length as the number of samples you expect from that sensor (got length {len(value)}, require length {number_of_samples})" + ) + converted[key] = value + else: + raise ValueError(f"Unknown sensor conversion constant value {value}") + + self.sensor_conversion_constants = converted + + +class Configuration: + """Configuration class for gateway, node and session configuration data. + + :param dict|None gateway: A dict of values used to customise the gateway configuration + :param dict|None nodes: A dict of dicts, keyed by node_id, used to customise the configuration for each node + :param dict|None session: A dict of metadata about the current session of the gateway provided by the user + :return None: + """ + + def __init__(self, gateway=None, nodes=None, session=None, **kwargs): + gateway_configuration = gateway or {} + self.gateway = GatewayConfiguration(**gateway_configuration) + + if len(kwargs) > 0: + raise ValueError( + "Properties other than 'gateway', 'nodes' and 'session' passed to Configuration. Are you using an " + "old-format configuration file?" + ) + + # Set up a single-node default in the absence of any nodes at all. + self.nodes = {} + + if nodes is None: + self.nodes["0"] = NodeConfiguration() + else: + for node_id, node in nodes.items(): + self.nodes[str(node_id)] = NodeConfiguration(**node) + + # Set up the session-specific data as empty. + self.session = session or DEFAULT_SESSION + @classmethod def from_dict(cls, dictionary): - """Construct a configuration from a dictionary. Note that all the configuration values are required - the - construction will fail if any are missing (i.e. default arguments are disabled for this alternative constructor) + """Construct a configuration from a dictionary. Note that a dictionary for each sub-configurations is required + - the construction will fail if any are missing. :param dict dictionary: :return Configuration: """ - return cls(**{attribute_name: dictionary[attribute_name] for attribute_name in vars(Configuration())}) + dictionary = copy.deepcopy(dictionary) + + for node in dictionary["nodes"].values(): + if "periods" in node: + node.pop("periods") + + return cls( + gateway=dictionary["gateway"], + nodes=dictionary["nodes"], + session=dictionary["session"], + ) + + @property + def node_ids(self): + """Get the IDs of the nodes in the current configuration. + + :return list: A list of node ids + """ + return list(self.nodes) + + def get_leading_byte(self, node_id=None): + """Get the leading byte for a given node ID or for base station packets by default. Uses the packet key and the + packet key offset. + + :param int|str node_id: The node ID for which you want the packet key + :return bytes: The leading byte for packets from the given node or the base station + """ + if node_id is None: + return self.gateway.packet_key.to_bytes(1, "little") + + node_packet_key = self.gateway.packet_key_offset + int(node_id) + return node_packet_key.to_bytes(1, self.gateway.endian) + + @property + def leading_bytes_map(self): + """Access a dict that maps leading bytes to node_ids (or the base station id) + + :return dict: + """ + nodes = {self.get_leading_byte(node_id): node_id for node_id in self.node_ids} + + return { + self.get_leading_byte(): BASE_STATION_ID, + **nodes, + } def to_dict(self): - """Serialise the configuration to a dictionary.""" - return vars(self) + """Serialise the configuration to a dictionary. + + :return dict: + """ + return { + "gateway": self.gateway.to_dict(), + "nodes": {name: node.to_dict() for name, node in self.nodes.items()}, + "session": self.session, + } diff --git a/data_gateway/data_gateway.py b/data_gateway/data_gateway.py index 301267e3..2d11b1b7 100644 --- a/data_gateway/data_gateway.py +++ b/data_gateway/data_gateway.py @@ -7,15 +7,14 @@ import threading import time -import serial from octue.log_handlers import apply_log_handler from data_gateway import stop_gateway from data_gateway.configuration import Configuration -from data_gateway.dummy_serial import DummySerial from data_gateway.exceptions import DataMustBeSavedError from data_gateway.packet_reader import PacketReader from data_gateway.routine import Routine +from data_gateway.serial_port import get_serial_port logger = multiprocessing.get_logger() @@ -42,6 +41,7 @@ class DataGateway: :param str|serial.Serial serial_port: the name of the serial port or a `serial.Serial` instance to read from :param str configuration_path: the path to a JSON configuration file for the packet reader :param str routine_path: the path to a JSON routine file containing sensor commands to be run automatically + :param str stop_routine_path: the path to a JSON routine file containing sensor commands to be run automatically on exiot of the gateway (e.g. safe shutdown) :param bool save_locally: if `True`, save data windows to disk locally :param bool upload_to_cloud: if `True`, upload data windows to Google Cloud Storage :param bool interactive: if `True`, allow commands entered into `stdin` to be sent to the sensors in real time @@ -51,6 +51,7 @@ class DataGateway: :param str|None label: a label to be associated with the data collected in this run of the data gateway :param bool save_csv_files: if `True`, also save windows locally as CSV files for debugging :param bool use_dummy_serial_port: if `True` use a dummy serial port for testing + :param bool stop_sensors_on_exit: if true, and a `stop_routine_file` path is present, hte stop routine will be executed by the gateway main thread prior to quitting :return None: """ @@ -59,6 +60,7 @@ def __init__( serial_port, configuration_path="config.json", routine_path="routine.json", + stop_routine_path="stop_routine.json", save_locally=False, upload_to_cloud=True, interactive=False, @@ -85,12 +87,15 @@ def __init__( self.interactive = interactive packet_reader_configuration = self._load_configuration(configuration_path=configuration_path) - packet_reader_configuration.session_data["label"] = label + packet_reader_configuration.session["label"] = label - self.serial_port = self._get_serial_port( + self.serial_port_name = serial_port + self.use_dummy_serial_port = use_dummy_serial_port + + self.serial_port = get_serial_port( serial_port, configuration=packet_reader_configuration, - use_dummy_serial_port=use_dummy_serial_port, + use_dummy_serial_port=self.use_dummy_serial_port, ) self.packet_reader = PacketReader( @@ -104,6 +109,7 @@ def __init__( ) self.routine = self._load_routine(routine_path=routine_path) + self.stop_routine = self._load_routine(routine_path=stop_routine_path) self.stop_sensors_on_exit = stop_sensors_on_exit def start(self, stop_when_no_more_data_after=False): @@ -121,9 +127,10 @@ def start(self, stop_when_no_more_data_after=False): name="Reader", target=self.packet_reader.read_packets, kwargs={ - "serial_port": self.serial_port, + "serial_port_name": self.serial_port_name, "packet_queue": packet_queue, "stop_signal": stop_signal, + "use_dummy_serial_port": self.use_dummy_serial_port, }, daemon=True, ) @@ -167,22 +174,24 @@ def start(self, stop_when_no_more_data_after=False): time.sleep(5) finally: - if not self.stop_sensors_on_exit: - return - - sensor_stop_commands = self.packet_reader.config.sensor_commands.get("stop") - - if not sensor_stop_commands: - logger.warning( - "No sensor stop commands defined in configuration file - sensors cannot be automatically stopped." - ) - return - - # This should ensure that the `stopMics` command is run last. - for command in sensor_stop_commands: - self._send_command_to_sensors(command) - logger.info("Sent %r command.", command) - time.sleep(5) + if self.stop_sensors_on_exit: + if self.stop_routine is not None: + logger.info( + "Safely shutting down sensors using stop_routine. Press ctrl+c again to hard-exit (unsafe!)" + ) + # Run a thread to execute the stop routine + routine_thread = threading.Thread( + name="RoutineCommandsThread", + target=self.stop_routine.run, + kwargs={"stop_signal": stop_signal}, + daemon=True, + ) + routine_thread.start() + # Wait a sensible amount of time for the stop signals to flush, then exit + time.sleep(5) + + else: + logger.warning("No stop_routine file supplied - sensors cannot be automatically stopped.") def _load_configuration(self, configuration_path): """Load a configuration from the path if it exists; otherwise load the default configuration. @@ -201,32 +210,6 @@ def _load_configuration(self, configuration_path): logger.info("No configuration file provided - using default configuration.") return configuration - def _get_serial_port(self, serial_port, configuration, use_dummy_serial_port): - """Get the serial port or a dummy serial port if specified. If a serial port instance is provided, return that - as the serial port to use. - - :param str|serial.Serial serial_port: the name of a serial port or a `serial.Serial` instance - :param data_gateway.configuration.Configuration configuration: the packet reader configuration - :param bool use_dummy_serial_port: if `True`, use a dummy serial port instead - :return serial.Serial|data_gateway.dummy_serial.DummySerial: - """ - if isinstance(serial_port, str): - if not use_dummy_serial_port: - serial_port = serial.Serial(port=serial_port, baudrate=configuration.baudrate) - else: - serial_port = DummySerial(port=serial_port, baudrate=configuration.baudrate) - - # The buffer size can only be set on Windows. - if os.name == "nt": - serial_port.set_buffer_size( - rx_size=configuration.serial_buffer_rx_size, - tx_size=configuration.serial_buffer_tx_size, - ) - else: - logger.debug("Serial port buffer size can only be set on Windows.") - - return serial_port - def _load_routine(self, routine_path): """Load a sensor commands routine from the path if it exists, otherwise return no routine. If in interactive mode, the routine file is ignored. Note that "\n" has to be added to the end of each command sent to the serial diff --git a/data_gateway/new_packet_reader.py b/data_gateway/new_packet_reader.py new file mode 100644 index 00000000..96c77a85 --- /dev/null +++ b/data_gateway/new_packet_reader.py @@ -0,0 +1,851 @@ +# Works with python 3.6.5 + + +import os +import sys +import time +from _thread import start_new_thread +from datetime import datetime + +import serial + + +MICS_FREQ = 15625 + +MICS_BM = 0x3FF + +BAROS_FREQ = 100 + +DIFF_BAROS_FREQ = 1000 + +BAROS_BM = 0x3FF + +ACC_FREQ = 100 + +ACC_RANGE = 16 + +GYRO_FREQ = 100 + +GYRO_RANGE = 2000 + +ANALOG_FREQ = 16384 + +NUM_NODES = 6 + + +# for interaction with the base station + +mode = 0 # mode = 0: Linux, mode = 1: Windows + + +# for interaction with the aerosense debug + +# mode=1 + + +if mode == 0: + + BAUDRATE = 2300000 + + PORT = "/dev/ttyACM0" + +elif mode == 1: + + BAUDRATE = 2300000 + + PORT = "COM12" + + +ENDIAN = "little" + +MAX_TIMESTAMP_SLACK = 5e-3 # 5ms + +MAX_PERIOD_DRIFT = 0.02 # 2% difference between IMU clock and CPU clock allowed + + +PACKET_KEY = 0xFE + + +PACKET_KEY_OFFSET = 0xF5 + + +TYPE_HANDLE_DEF = 0xFF + + +handles = { + 34: "Abs. baros", + 36: "Diff. baros", + 38: "Mic 0", + 40: "Mic 1", + 42: "IMU Accel", + 44: "IMU Gyro", + 46: "IMU Magnetometer", + 48: "Analog1", + 50: "Analog2", + 52: "Constat", + 54: "Cmd Decline", + 56: "Sleep State", + 58: "Remote Info Message", + 60: "Timestamp Packet 0", + 62: "Timestamp Packet 1", + 64: "Local Info Message", +} + + +decline_reason = { + 0: "Bad block detection ongoing", + 1: "Task already registered, cannot register again", + 2: "Task is not registered, cannot de-register", + 3: "Connection parameter update unfinished", + 4: "Not ready to sleep", + 5: "Not in sleep", +} + + +sleep_state = {0: "Exiting sleep", 1: "Entering sleep"} + + +remote_info = {0: "Battery info"} + + +local_info = { + 0: "Synchronization not ready as not every sensor node is connected or connection parameters are not the desired ones", + 1: "Time synchronization info", + 2: "Time sync exception", + 4: "Time sync already in sync", + 8: "Time sync alignment error", + 16: "Time sync coarse data time diff error", + 32: "Device not connected", + 64: "select message destination successful", + 128: "Time sync success", + 129: "Coarse sync finish", + 130: "time sync msg sent", +} + + +def errPrint(s): + + print("***** " + s + " *****") + + +def parseHandleDef(payload): + + startHandle = int.from_bytes(payload[0:1], ENDIAN) + + endHandle = int.from_bytes(payload[2:3], ENDIAN) + + print(startHandle, endHandle) + + if endHandle - startHandle == 30: + + handles = { + startHandle + 2: "Abs. baros", + startHandle + 4: "Diff. baros", + startHandle + 6: "Mic 0", + startHandle + 8: "Mic 1", + startHandle + 10: "IMU Accel", + startHandle + 12: "IMU Gyro", + startHandle + 14: "IMU Magnetometer", + startHandle + 16: "Analog1", + startHandle + 18: "Analog2", + startHandle + 20: "Constat", + startHandle + 22: "Cmd Decline", + startHandle + 24: "Sleep State", + startHandle + 26: "Remote Info Message", + startHandle + 28: "Timestamp Packet 0", + startHandle + 30: "Timestamp Packet 1", + startHandle + 32: "Local Info Message", + } + + print("Successfully updated the handles") + + else: + + errPrint("Handle error: " + str(startHandle) + " " + str(endHandle)) + + +files = {} + + +MICS_SAMPLES_PER_PACKET = 8 + +BAROS_SAMPLES_PER_PACKET = 1 + +IMU_SAMPLES_PER_PACKET = int(240 / 2 / 3) + +ANALOG_SAMPLES_PER_PACKET = 60 + +DIFF_BAROS_SAMPLES_PER_PACKET = 24 + + +samplesPerPacket = { + "Mics": MICS_SAMPLES_PER_PACKET, + "Baros_P": BAROS_SAMPLES_PER_PACKET, + "Baros_T": BAROS_SAMPLES_PER_PACKET, + "Acc": IMU_SAMPLES_PER_PACKET, + "Gyro": IMU_SAMPLES_PER_PACKET, + "Mag": IMU_SAMPLES_PER_PACKET, + "Analog": ANALOG_SAMPLES_PER_PACKET, + "Diff_Baros": DIFF_BAROS_SAMPLES_PER_PACKET, +} + + +nMeasQty = { + "Mics": 10, + "Baros_P": 40, + "Baros_T": 40, + "Acc": 3, + "Gyro": 3, + "Mag": 3, + "Analog": 2, + "Diff_Baros": 5, +} + + +data = { + "Mics": [([0] * samplesPerPacket["Mics"]) for i in range(nMeasQty["Mics"])], + "Baros_P": [([0] * samplesPerPacket["Baros_P"]) for i in range(nMeasQty["Baros_P"])], + "Baros_T": [([0] * samplesPerPacket["Baros_T"]) for i in range(nMeasQty["Baros_T"])], + "Acc": [([0] * samplesPerPacket["Acc"]) for i in range(nMeasQty["Acc"])], + "Gyro": [([0] * samplesPerPacket["Gyro"]) for i in range(nMeasQty["Gyro"])], + "Mag": [([0] * samplesPerPacket["Mag"]) for i in range(nMeasQty["Mag"])], + "Analog": [([0] * samplesPerPacket["Analog"]) for i in range(nMeasQty["Analog"])], + "Diff_Baros": [([0] * samplesPerPacket["Diff_Baros"]) for i in range(nMeasQty["Diff_Baros"])], +} + + +period = { + "Mics": 1 / MICS_FREQ, + "Baros_P": 1 / BAROS_FREQ, + "Baros_T": 1 / BAROS_FREQ, + "Acc": 1 / ACC_FREQ, + "Gyro": 1 / GYRO_FREQ, + "Mag": 1 / 12.5, + "Analog": 1 / ANALOG_FREQ, + "Diff_Baros": 1 / DIFF_BAROS_FREQ, +} + + +currentTimestamp = {"Mics": 0, "Baros_P": 0, "Baros_T": 0, "Acc": 0, "Gyro": 0, "Mag": 0, "Analog": 0, "Diff_Baros": 0} + +prevIdealTimestamp = { + "Mics": 0, + "Baros_P": 0, + "Baros_T": 0, + "Acc": 0, + "Gyro": 0, + "Mag": 0, + "Analog": 0, + "Diff_Baros": 0, +} + + +def writeData(type, timestamp, period, node=1): # timestamp in s + + n = len(data[type][0]) # number of samples + + for i in range(len(data[type][0])): # iterate through all sample times + + time = timestamp - (n - i) * period + + files[node][type].write(str(time) + ",") + + for meas in data[type]: # iterate through all measured quantities + + files[node][type].write(str(meas[i]) + ",") + + files[node][type].write("\n") + + +# The sensor data arrive packets that contain n samples from some sensors of the same type, e.g. one barometer packet contains 40 samples from 4 barometers each. + +# For each sensor type (e.g. baro), this function waits until the packets from all sensors have arrived. Then it writes those to the .csv file. + +# Since timestamps only come at a packet level, this function also interpolates the within-packet-timestamps + + +def waitTillSetComplete(type, t, node=1): # timestamp in 1/(2**16) s + + if type == "Mics" or type == "Baros_P" or type == "Baros_T" or type == "Diff_Baros" or type == "Analog": + + # For those measurement types, the samples are inherently synchronized to the CPU time already. + + # The timestamps may be slightly off, so it takes the first one as a reference and then uses the following ones only to check if a packet has been dropped + + # Also, for mics and baros, there exist packet sets: Several packets arrive with the same timestamp + + if currentTimestamp[type] != 0: + + idealNewTimestamp = prevIdealTimestamp[type] + samplesPerPacket[type] * period[type] * (2 ** 16) + + if abs(idealNewTimestamp - currentTimestamp[type]) > MAX_TIMESTAMP_SLACK * ( + 2 ** 16 + ): # If at least one set (= one packet per mic/baro group) of packets was lost + + if prevIdealTimestamp[type] != 0 and type != "Mics": + + print( + "Lost set of " + + type + + " packets: " + + str((currentTimestamp[type] - idealNewTimestamp) / (2 ** 16) * 1000) + + "ms gap" + ) + + if type != "Mics": + + idealNewTimestamp = currentTimestamp[type] + + writeData(type, idealNewTimestamp / (2 ** 16), period[type], node) + + data[type] = [([0] * samplesPerPacket[type]) for i in range(nMeasQty[type])] + + prevIdealTimestamp[type] = idealNewTimestamp + + currentTimestamp[type] = t + + else: + + if type == "Mics": + + prevIdealTimestamp[type] = t + + currentTimestamp[type] = t + + print("Received first set of " + type + " packets") + + else: # The IMU values are not synchronized to the CPU time, so we simply always take the timestamp we have + + if currentTimestamp[type] != 0: + + per = period[type] + + if ( + prevIdealTimestamp[type] != 0 + ): # If there is a previous timestamp, calculate the actual sampling period from the difference to the current timestamp + + per = (currentTimestamp[type] - prevIdealTimestamp[type]) / samplesPerPacket[type] / (2 ** 16) + + if ( + abs(per - period[type]) / period[type] < MAX_PERIOD_DRIFT + ): # If the calculated period is reasonable, accept it. If not, most likely a packet got lost + + period[type] = per + + else: + + print( + "Lost " + + type + + " packet: " + + str((currentTimestamp[type] - prevIdealTimestamp[type]) / (2 ** 16) * 1000) + + "ms gap" + ) + + else: + + print("Received first " + type + " packet") + + writeData(type, t / (2 ** 16), period[type], node) + + prevIdealTimestamp[type] = currentTimestamp[type] + + currentTimestamp[type] = t + + +def parseSensorPacket(type, len, payload, node=1): + + global mic_cnt + + if not type in handles: + + print("Received packet with unknown type: ", type) + + print("Payload len: ", len) + + # print("Payload: ", int.from_bytes(payload, ENDIAN)) + + return + + t = int.from_bytes(payload[240:244], ENDIAN, signed=False) # Read timestamp from packet + + if handles[type] == "Abs. baros": + + waitTillSetComplete("Baros_P", t, node) + + waitTillSetComplete("Baros_T", t, node) + + # Write the received payload to the data field + + for i in range(BAROS_SAMPLES_PER_PACKET): + + for j in range(nMeasQty["Baros_P"]): + + bps = 6 # bytes per sample + + data["Baros_P"][j][i] = int.from_bytes( + payload[(bps * j) : (bps * j + 4)], ENDIAN, signed=False + ) # /4096 + + data["Baros_T"][j][i] = int.from_bytes( + payload[(bps * j + 4) : (bps * j + 6)], ENDIAN, signed=True + ) # /100 + + elif handles[type] == "Diff. baros": + + waitTillSetComplete("Diff_Baros", t, node) + + # int_payload = [x for x in payload] + + # print(int_payload) + + # Write the received payload to the data field + + for i in range(DIFF_BAROS_SAMPLES_PER_PACKET): + + for j in range(nMeasQty["Diff_Baros"]): + + bps = 2 # bytes per sample + + # this result depends on the sensor (multiply with the sensor max value to get the scaled result) + + # data["Diff_Baros"][j][i] = (int.from_bytes(payload[(bps*(nMeasQty["Diff_Baros"]*i+j)) : (bps*(nMeasQty["Diff_Baros"]*i+j)+bps)], ENDIAN, signed=False) - 6553)/(58982-6553) + + data["Diff_Baros"][j][i] = int.from_bytes( + payload[(bps * (nMeasQty["Diff_Baros"] * i + j)) : (bps * (nMeasQty["Diff_Baros"] * i + j) + bps)], + ENDIAN, + signed=False, + ) + + elif handles[type] == "Mic 0": + + waitTillSetComplete("Mics", t, node) + + bps = 3 # bytes per sample + + for i in range(MICS_SAMPLES_PER_PACKET // 2): + + for j in range(5): + + data["Mics"][j][2 * i] = int.from_bytes( + payload[(bps * j + 20 * bps * i) : (bps * j + 20 * bps * i + 3)], "big", signed=True + ) + + data["Mics"][j][2 * i + 1] = int.from_bytes( + payload[(bps * j + 20 * bps * i + 5 * bps) : (bps * j + 20 * bps * i + 3 + 5 * bps)], + "big", + signed=True, + ) + + data["Mics"][j + 5][2 * i] = int.from_bytes( + payload[(bps * j + 20 * bps * i + 10 * bps) : (bps * j + 20 * bps * i + 3 + 10 * bps)], + "big", + signed=True, + ) + + data["Mics"][j + 5][2 * i + 1] = int.from_bytes( + payload[(bps * j + 20 * bps * i + 15 * bps) : (bps * j + 20 * bps * i + 3 + 15 * bps)], + "big", + signed=True, + ) + + elif handles[type] == "Mic 1": + + if payload[0] == 1: + + print("Sensor reading from flash done") # print("Mics reading done") + + elif payload[0] == 2: + + print("Flash erasing done") # print("Mics erasing done") + + elif payload[0] == 3: + + print("Sensor started") # print("Mics and or (diff)baros started") + + elif handles[type].startswith("IMU Accel"): + + waitTillSetComplete("Acc", t, node) + + # Write the received payload to the data field + + for i in range(IMU_SAMPLES_PER_PACKET): + + data["Acc"][0][i] = int.from_bytes(payload[(6 * i) : (6 * i + 2)], ENDIAN, signed=True) + + data["Acc"][1][i] = int.from_bytes(payload[(6 * i + 2) : (6 * i + 4)], ENDIAN, signed=True) + + data["Acc"][2][i] = int.from_bytes(payload[(6 * i + 4) : (6 * i + 6)], ENDIAN, signed=True) + + elif handles[type] == "IMU Gyro": + + waitTillSetComplete("Gyro", t, node) + + # Write the received payload to the data field + + for i in range(IMU_SAMPLES_PER_PACKET): + + data["Gyro"][0][i] = int.from_bytes(payload[(6 * i) : (6 * i + 2)], ENDIAN, signed=True) + + data["Gyro"][1][i] = int.from_bytes(payload[(6 * i + 2) : (6 * i + 4)], ENDIAN, signed=True) + + data["Gyro"][2][i] = int.from_bytes(payload[(6 * i + 4) : (6 * i + 6)], ENDIAN, signed=True) + + elif handles[type] == "IMU Magnetometer": + + waitTillSetComplete("Mag", t, node) + + # Write the received payload to the data field + + for i in range(IMU_SAMPLES_PER_PACKET): + + data["Mag"][0][i] = int.from_bytes(payload[(6 * i) : (6 * i + 2)], ENDIAN, signed=True) + + data["Mag"][1][i] = int.from_bytes(payload[(6 * i + 2) : (6 * i + 4)], ENDIAN, signed=True) + + data["Mag"][2][i] = int.from_bytes(payload[(6 * i + 4) : (6 * i + 6)], ENDIAN, signed=True) + + # elif handles[type] == "Analog": + + # waitTillSetComplete("Analog", t) + + # def valToV(val): + + # return (val << 6) / 1e6 + + # for i in range(ANALOG_SAMPLES_PER_PACKET): + + # data["Analog"][0][i] = valToV(int.from_bytes(payload[(4*i):(4*i+2)], ENDIAN, signed=False)) + + # data["Analog"][1][i] = valToV(int.from_bytes(payload[(4*i+2):(4*i+4)], ENDIAN, signed=False)) + + # print(data["Analog"][0][0]) + + elif handles[type] == "Constat": + + print(f"Node: {node}, Constat packet: %d" % (t / (2 ** 16))) + + elif handles[type] == "Cmd Decline": + + reason_index = int.from_bytes(payload, ENDIAN, signed=False) + + print("Command declined, " + decline_reason[reason_index]) + + elif handles[type] == "Sleep State": + + state_index = int.from_bytes(payload, ENDIAN, signed=False) + + print("\n" + sleep_state[state_index] + "\n") + + # elif handles[type] == "Info Message": + + # info_index = int.from_bytes(payload[0:1], ENDIAN, signed=False) + + # print(info_index) + + # if info_type[info_index] == "Battery info": + + # voltage = int.from_bytes(payload[1:5], ENDIAN, signed=False) + + # cycle = int.from_bytes(payload[5:9], ENDIAN, signed=False) + + # stateOfCharge = int.from_bytes(payload[9:13], ENDIAN, signed=False) + + # print(f"Node: {node} \n Voltage : {voltage/1000000} v \n Cycle count: {cycle/100} \n State of charge: {stateOfCharge/256}%") + + ####################################################################################### + + elif handles[type] == "Remote Info Message": + + info_index = int.from_bytes(payload[0:1], ENDIAN, signed=False) + + print(remote_info[info_index]) + + if remote_info[info_index] == "Battery info": + + voltage = int.from_bytes(payload[1:5], ENDIAN, signed=False) + + cycle = int.from_bytes(payload[5:9], ENDIAN, signed=False) + + stateOfCharge = int.from_bytes(payload[9:13], ENDIAN, signed=False) + + print( + f"Node: {node} \n Voltage : {voltage/1000000} v \n Cycle count: {cycle/100} \n State of charge: {stateOfCharge/256}%" + ) + + elif handles[type] == "Local Info Message": + + info_index = int.from_bytes(payload[0:1], ENDIAN, signed=False) + + print(local_info[info_index]) + + if info_index == 130: + + print(int.from_bytes(payload[1:3], ENDIAN, signed=False)) + + if local_info[info_index] == "Time synchronization info": + + info_type = int.from_bytes(payload[1:5], ENDIAN, signed=False) + + if info_type == 0: + + print("seq data") + + for i in range(15): + + seqDataFile.write(str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + ",") + + for i in range(15, 18): + + seqDataFile.write(str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=True)) + ",") + + seqDataFile.close() + + elif info_type == 1: + + print("central data") + + for i in range(60): + + centralDataFile.write( + str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + ) + + centralCnt = centralCnt + 1 + + if centralCnt == 187: + + centralDataFile.close() + + break + + elif info_type == 2: + + print("perif 0 data") + + for i in range(61): + + perif0DataFile.write( + str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + ) + + perif0DataFile.close() + + elif info_type == 3: + + print("perif 1 data") + + for i in range(61): + + perif1DataFile.write( + str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + ) + + perif1DataFile.close() + + elif info_type == 4: + + print("perif 2 data") + + for i in range(61): + + perif2DataFile.write( + str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + ) + + perif2DataFile.close() + + elif handles[type] == "Timestamp Packet 0": + + print("timestamp packet", int(len / 4), len) + + for i in range(int(len / 4)): + + files["ts" + str(packet_source)].write( + str(int.from_bytes(payload[i * 4 : (i + 1) * 4], ENDIAN, signed=False)) + "," + ) + + # files["sampleElapse"+str(packet_source)].close() + + elif handles[type] == "Timestamp Packet 1": + + print("time elapse packet", int(len / 4), len) + + for i in range(int(len / 4)): + + files["sampleElapse" + str(packet_source)].write( + str(int.from_bytes(payload[i * 4 : (i + 1) * 4], ENDIAN, signed=False)) + "," + ) + + +# else: + +# print("unknown handle %d", type) + +####################################################################################### + + +stop = False + + +def read_packets(ser): + + global stop + + while not stop: + + r = ser.read() + + if len(r) == 0: + + continue + + # print(f"Got packet key {r[0]}, key-PACKET_KEY_OFFSET = {r[0]-PACKET_KEY_OFFSET}") + + if (r[0] == PACKET_KEY) or (((r[0] - PACKET_KEY_OFFSET) <= 5) & ((r[0] - PACKET_KEY_OFFSET) >= 0)): + + pack_type = int.from_bytes(ser.read(), ENDIAN) + + length = int.from_bytes(ser.read(), ENDIAN) + + payload = ser.read(length) + + # print(f"{time.time()}: Got packet type {pack_type}") + + if pack_type == TYPE_HANDLE_DEF: + + parseHandleDef(payload) + + nextPacketStart = 0 + + packetCnt = 0 + + else: + + parseSensorPacket(pack_type, length, payload, r[0] - PACKET_KEY_OFFSET) + + for type in files: + + for i in range(NUM_NODES): + + files[i][type].close() + + +def writeHeaders(): + + for i in range(NUM_NODES): + + files[i]["Mics"].write("Time (s),x,y,z\n") + + files[i]["Baros_P"].write("time,x,y,z\n") + + files[i]["Baros_T"].write("time,x,y,z\n") + + files[i]["Diff_Baros"].write('"time","baro0","baro1","baro2","baro3","baro4"\n') + + files[i]["Acc"].write('"time","x","y","z"\n') + + files[i]["Gyro"].write('"time","x","y","z"\n') + + files[i]["Mag"].write('"time","x","y","z"\n') + + files[i]["Analog"].write("time,x,y,z\n") + + +folderString = datetime.now().strftime("%Y_%m_%d__%H_%M_%S") + +os.mkdir(folderString) + +for i in range(NUM_NODES): + + files[i] = {} + + files[i]["Mics"] = open(folderString + "/" + str(i) + "_mics.csv", "w") + + files["ts" + str(i)] = open(folderString + "/ts" + str(i) + ".csv", "w") + + files["sampleElapse" + str(i)] = open(folderString + "/sampleElapse" + str(i) + ".csv", "w") + + files[i]["Baros_P"] = open(folderString + "/" + str(i) + "_baros_p.csv", "w") + + files[i]["Baros_T"] = open(folderString + "/" + str(i) + "_baros_T.csv", "w") + + files[i]["Diff_Baros"] = open(folderString + "/" + str(i) + "_diff_baros.csv", "w") + + files[i]["Acc"] = open(folderString + "/" + str(i) + "_acc.csv", "w") + + files[i]["Gyro"] = open(folderString + "/" + str(i) + "_gyro.csv", "w") + + files[i]["Mag"] = open(folderString + "/" + str(i) + "_mag.csv", "w") + + files[i]["Analog"] = open(folderString + "/" + str(i) + "_analog.csv", "w") + + +seqDataFile = open(folderString + "/seqData.csv", "w") + +centralDataFile = open(folderString + "/centralData.csv", "w") + +perif0DataFile = open(folderString + "/perif0Data.csv", "w") + +perif1DataFile = open(folderString + "/perif1Data.csv", "w") + +perif2DataFile = open(folderString + "/perif2Data.csv", "w") + + +writeHeaders() + + +ser = serial.Serial(PORT, BAUDRATE) # open serial port + +# ser.set_buffer_size(rx_size = 100000, tx_size = 1280) + + +start_new_thread(read_packets, (ser,)) + + +""" + +time.sleep(1) + +ser.write(("configMics " + str(MICS_FREQ) + " " + str(MICS_BM) + "\n").encode('utf_8')) + +time.sleep(1) + +ser.write(("configBaros " + str(BAROS_FREQ) + " " + str(BAROS_BM) + "\n").encode('utf_8')) + +time.sleep(1) + +ser.write(("configAccel " + str(ACC_FREQ) + " " + str(ACC_RANGE) + "\n").encode('utf_8')) + +time.sleep(1) + +ser.write(("configGyro " + str(GYRO_FREQ) + " " + str(GYRO_RANGE) + "\n").encode('utf_8')) + +""" + + +for line in sys.stdin: + + if line == "saveFinish\n": + + ser.write("syncSensorFinish\n".encode("utf_8")) + + print("----command syncSensorFinish issued----") + + for i in range(NUM_NODES): + + files["ts" + str(i)].close() + + files["sampleElapse" + str(i)].close() + + if line == "stop\n": + + stop = True + + break + + else: + + ser.write(line.encode("utf_8")) + + print("----command " + line[:-1] + " issued----") diff --git a/data_gateway/packet_reader.py b/data_gateway/packet_reader.py index 02748bd3..d26646b2 100644 --- a/data_gateway/packet_reader.py +++ b/data_gateway/packet_reader.py @@ -8,14 +8,15 @@ from octue.cloud import storage from octue.log_handlers import apply_log_handler -from data_gateway import MICROPHONE_SENSOR_NAME, exceptions, stop_gateway -from data_gateway.configuration import Configuration +from data_gateway import exceptions, stop_gateway +from data_gateway.configuration import BASE_STATION_ID, DEFAULT_LOCAL_INFO_TYPES, DEFAULT_SENSOR_NAMES, Configuration from data_gateway.persistence import ( DEFAULT_OUTPUT_DIRECTORY, BatchingFileWriter, BatchingUploader, NoOperationContextManager, ) +from data_gateway.serial_port import get_serial_port logger = multiprocessing.get_logger() @@ -61,39 +62,62 @@ def __init__( self.uploader = None self.writer = None - self.handles = self.config.default_handles + self.handles = {node_id: node_config.default_handles for node_id, node_config in self.config.nodes.items()} self.sleep = False self.sensor_time_offset = None - def read_packets(self, serial_port, packet_queue, stop_signal): + def read_packets(self, serial_port_name, packet_queue, stop_signal, use_dummy_serial_port=False): """Read packets from a serial port and send them to the parser thread for processing and persistence. - :param serial.Serial serial_port: name of serial port to read from + :param str serial_port_name: the name of the serial port to read from :param queue.Queue packet_queue: a thread-safe queue to put packets on to for the parser thread to pick up + :param multiprocessing.Value stop_signal: a value of 0 means don't stop; a value of 1 means stop + :param bool use_dummy_serial_port: if `True` use a dummy serial port for testing :return None: """ try: logger.info("Packet reader process started.") + serial_port = get_serial_port( + serial_port=serial_port_name, + configuration=self.config, + use_dummy_serial_port=use_dummy_serial_port, + ) + while stop_signal.value == 0: - serial_data = serial_port.read() - if len(serial_data) == 0: + # Check the leading byte of the packet + leading_byte = serial_port.read() + + # Handle no data on the serial port. + if len(leading_byte) == 0: continue - if serial_data[0] != self.config.packet_key: + # Get the ID of the packet origin + if leading_byte in self.config.leading_bytes_map: + packet_origin = self.config.leading_bytes_map[leading_byte] + else: + # logger.warning( + # "Unknown leading byte (packet key) %s (%s) . Allowable values are %s", + # int.from_bytes(leading_byte, self.config.gateway.endian), + # leading_byte, + # self.config.leading_bytes_map, + # ) continue - packet_type = str(int.from_bytes(serial_port.read(), self.config.endian)) - length = int.from_bytes(serial_port.read(), self.config.endian) + # Read the packet from the serial port. + packet_type = str(int.from_bytes(serial_port.read(), self.config.gateway.endian)) + length = int.from_bytes(serial_port.read(), self.config.gateway.endian) packet = serial_port.read(length) # Check for bytes in serial input buffer. A full buffer results in overflow. - if serial_port.in_waiting == self.config.serial_buffer_rx_size: + if serial_port.in_waiting == self.config.gateway.serial_buffer_rx_size: logger.warning("Serial port buffer is full - buffer overflow may occur, resulting in data loss.") continue - packet_queue.put({"packet_type": packet_type, "packet": packet}) + logger.debug("Received packet_type %s from packet_origin %s", packet_type, packet_origin) + + packet_queue.put({"packet_origin": packet_origin, "packet_type": packet_type, "packet": packet}) except KeyboardInterrupt: pass @@ -115,7 +139,7 @@ def parse_packets(self, packet_queue, stop_signal, stop_when_no_more_data_after= if self.upload_to_cloud: self.uploader = BatchingUploader( - sensor_names=self.config.sensor_names, + node_ids=self.config.node_ids, bucket_name=self.bucket_name, window_size=self.window_size, output_directory=self.cloud_output_directory, @@ -126,7 +150,7 @@ def parse_packets(self, packet_queue, stop_signal, stop_when_no_more_data_after= if self.save_locally: self.writer = BatchingFileWriter( - sensor_names=self.config.sensor_names, + node_ids=self.config.node_ids, window_size=self.window_size, output_directory=self.local_output_directory, save_csv_files=self.save_csv_files, @@ -140,12 +164,18 @@ def parse_packets(self, packet_queue, stop_signal, stop_when_no_more_data_after= previous_timestamp = {} data = {} - for sensor_name in self.config.sensor_names: - previous_timestamp[sensor_name] = -1 - data[sensor_name] = [ - ([0] * self.config.samples_per_packet[sensor_name]) - for _ in range(self.config.number_of_sensors[sensor_name]) - ] + for node_id in self.config.node_ids: + node_config = self.config.nodes[node_id] + data[node_id] = {} + previous_timestamp[node_id] = {} + + for sensor_name in node_config.sensor_names: + previous_timestamp[node_id][sensor_name] = -1 + + data[node_id][sensor_name] = [ + ([0] * node_config.samples_per_packet[sensor_name]) + for _ in range(node_config.number_of_sensors[sensor_name]) + ] if stop_when_no_more_data_after is False: timeout = 5 @@ -157,48 +187,133 @@ def parse_packets(self, packet_queue, stop_signal, stop_when_no_more_data_after= with self.writer: while stop_signal.value == 0: try: - packet_type, packet = packet_queue.get(timeout=timeout).values() + packet_origin, packet_type, packet = packet_queue.get(timeout=timeout).values() except queue.Empty: if stop_when_no_more_data_after is not False: break continue - if packet_type == str(self.config.type_handle_def): - self.update_handles(packet) - continue - - if packet_type not in self.handles: - logger.error("Received packet with unknown type: %s", packet_type) - continue + if packet_origin == BASE_STATION_ID: - if len(packet) == 244: # If the full data payload is received, proceed parsing it - timestamp = int.from_bytes(packet[240:244], self.config.endian, signed=False) / (2 ** 16) - - data, sensor_names = self._parse_sensor_packet_data( - packet_type=self.handles[packet_type], - payload=packet, - data=data, + local_info_key = int.from_bytes(packet[0:1], self.config.gateway.endian, signed=False) + logger.info( + "Received local (base-station) info packet: %s", + DEFAULT_LOCAL_INFO_TYPES[local_info_key], ) - for sensor_name in sensor_names: - self._check_for_packet_loss(sensor_name, timestamp, previous_timestamp) + # TODO Store local info packets + # if info_index == 130: + # print(int.from_bytes(payload[1:3], ENDIAN, signed=False)) + # if local_info[info_index] == "Time synchronization info": + # info_type = int.from_bytes(payload[1:5], ENDIAN, signed=False) + # if info_type == 0: + # print("seq data") + # for i in range(15): + # seqDataFile.write(str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + ",") + # for i in range(15, 18): + # seqDataFile.write(str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=True)) + ",") + # seqDataFile.close() + # elif info_type == 1: + # print("central data") + # for i in range(60): + # centralDataFile.write( + # str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + # ) + # centralCnt = centralCnt + 1 + # if centralCnt == 187: + # centralDataFile.close() + # break + # elif info_type == 2: + # print("perif 0 data") + # for i in range(61): + # perif0DataFile.write( + # str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + # ) + # perif0DataFile.close() + # elif info_type == 3: + # print("perif 1 data") + # for i in range(61): + # perif1DataFile.write( + # str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + # ) + # perif1DataFile.close() + # elif info_type == 4: + # print("perif 2 data") + # for i in range(61): + # perif2DataFile.write( + # str(int.from_bytes(payload[5 + i * 4 : 9 + i * 4], ENDIAN, signed=False)) + "," + # ) + # perif2DataFile.close() + + else: + node_id = packet_origin + node_config = self.config.nodes[node_id] + + if packet_type == str(node_config.type_handle_def): + logger.warning("Node %s (re)connected, updating handles.", node_id) + self.update_handles(packet, node_id) + continue + + if packet_type not in self.handles[node_id]: + logger.error( + "Received packet from node %s with unknown type: %s ", node_id, packet_type + ) + continue + else: + logger.debug( + "Processing packet from node %s with type %s (%s)", + node_id, + self.handles[node_id][packet_type], + packet_type, + ) - self._timestamp_and_persist_data( - data=data, - sensor_name=sensor_name, - timestamp=timestamp, - period=self.config.period[sensor_name], + if len(packet) == 244: # If the full data payload is received, proceed parsing it + timestamp = ( + int.from_bytes( + packet[240:244], + self.config.gateway.endian, + signed=False, + ) + / 2**16 ) - continue + data, sensor_names = self._parse_sensor_packet_data( + node_id=node_id, + packet_type=self.handles[node_id][packet_type], + payload=packet, + data=data, + ) - if self.handles[packet_type] in [ - "Mic 1", - "Cmd Decline", - "Sleep State", - "Info Message", - ]: - self._parse_info_packet(self.handles[packet_type], packet, previous_timestamp) + for sensor_name in sensor_names: + # self._check_for_packet_loss( + # node_id=node_id, + # sensor_name=sensor_name, + # timestamp=timestamp, + # previous_timestamp=previous_timestamp, + # ) + + self._timestamp_and_persist_data( + data=data, + node_id=node_id, + sensor_name=sensor_name, + timestamp=timestamp, + period=node_config.periods[sensor_name], + ) + + continue + + if self.handles[node_id][packet_type] in [ + "Mic 1", + "Cmd Decline", + "Sleep State", + "Remote Info Message", + ]: + self._parse_remote_info_packet( + node_id=node_id, + packet_type=self.handles[node_id][packet_type], + packet=packet, + previous_timestamp=previous_timestamp, + ) except KeyboardInterrupt: pass @@ -206,18 +321,19 @@ def parse_packets(self, packet_queue, stop_signal, stop_when_no_more_data_after= finally: stop_gateway(logger, stop_signal) - def update_handles(self, payload): + def update_handles(self, payload, node_id): """Update the Bluetooth handles object. Handles are updated every time a new Bluetooth connection is established. :param iter payload: + :param str node_id: the ID of the node the packet is from :return None: """ - start_handle = int.from_bytes(payload[0:1], self.config.endian) - end_handle = int.from_bytes(payload[2:3], self.config.endian) + start_handle = int.from_bytes(payload[0:1], self.config.gateway.endian) + end_handle = int.from_bytes(payload[2:3], self.config.gateway.endian) if end_handle - start_handle == 26: - self.handles = { + self.handles[node_id] = { str(start_handle + 2): "Abs. baros", str(start_handle + 4): "Diff. baros", str(start_handle + 6): "Mic 0", @@ -235,10 +351,15 @@ def update_handles(self, payload): self.sensor_time_offset = None - logger.info("Successfully updated handles.") + logger.info("Successfully updated handles for node %s.", node_id) return - logger.error("Handle error: start handle is %s, end handle is %s.", start_handle, end_handle) + logger.error( + "Error while updating handles for node %s: start handle is %s, end handle is %s.", + node_id, + start_handle, + end_handle, + ) def _save_configuration_to_disk(self): """Save the configuration to disk as a JSON file. @@ -248,29 +369,32 @@ def _save_configuration_to_disk(self): with open(os.path.join(self.local_output_directory, "configuration.json"), "w") as f: json.dump(self.config.to_dict(), f) - def _parse_sensor_packet_data(self, packet_type, payload, data): + def _parse_sensor_packet_data(self, node_id, packet_type, payload, data): """Parse sensor data type payloads. + :param str node_id: the ID of the node the packet is from :param str packet_type: Type of the packet :param iter payload: Raw payload to be parsed :param dict data: Initialised data dict to be completed with parsed data :return dict data: """ + node_config = self.config.nodes[node_id] + if packet_type == "Abs. baros": # Write the received payload to the data field # TODO bytes_per_sample should probably be in the configuration bytes_per_sample = 6 - for i in range(self.config.baros_samples_per_packet): - for j in range(self.config.number_of_sensors["Baros_P"]): - data["Baros_P"][j][i] = int.from_bytes( + for i in range(node_config.samples_per_packet["Baros_P"]): + for j in range(node_config.number_of_sensors["Baros_P"]): + data[node_id]["Baros_P"][j][i] = int.from_bytes( payload[(bytes_per_sample * j) : (bytes_per_sample * j + 4)], - self.config.endian, + self.config.gateway.endian, signed=False, ) - data["Baros_T"][j][i] = int.from_bytes( + data[node_id]["Baros_T"][j][i] = int.from_bytes( payload[(bytes_per_sample * j + 4) : (bytes_per_sample * j + 6)], - self.config.endian, + self.config.gateway.endian, signed=True, ) @@ -278,15 +402,17 @@ def _parse_sensor_packet_data(self, packet_type, payload, data): if packet_type == "Diff. baros": bytes_per_sample = 2 - for i in range(self.config.diff_baros_samples_per_packet): - for j in range(self.config.number_of_sensors["Diff_Baros"]): - data["Diff_Baros"][j][i] = int.from_bytes( + number_of_diff_baros_sensors = node_config.number_of_sensors["Diff_Baros"] + + for i in range(node_config.samples_per_packet["Diff_Baros"]): + for j in range(number_of_diff_baros_sensors): + data[node_id]["Diff_Baros"][j][i] = int.from_bytes( payload[ - (bytes_per_sample * (self.config.number_of_sensors["Diff_Baros"] * i + j)) : ( - bytes_per_sample * (self.config.number_of_sensors["Diff_Baros"] * i + j + 1) + (bytes_per_sample * (number_of_diff_baros_sensors * i + j)) : ( + bytes_per_sample * (number_of_diff_baros_sensors * i + j + 1) ) ], - self.config.endian, + self.config.gateway.endian, signed=False, ) @@ -296,57 +422,57 @@ def _parse_sensor_packet_data(self, packet_type, payload, data): # Write the received payload to the data field bytes_per_sample = 3 - for i in range(self.config.mics_samples_per_packet // 2): - for j in range(self.config.number_of_sensors[MICROPHONE_SENSOR_NAME] // 2): + for i in range(node_config.samples_per_packet["Mics"] // 2): + for j in range(node_config.number_of_sensors[DEFAULT_SENSOR_NAMES[0]] // 2): index = j + 20 * i - data[MICROPHONE_SENSOR_NAME][j][2 * i] = int.from_bytes( + data[node_id][DEFAULT_SENSOR_NAMES[0]][j][2 * i] = int.from_bytes( payload[(bytes_per_sample * index) : (bytes_per_sample * index + 3)], "big", # Unlike the other sensors, the microphone data come in big-endian signed=True, ) - data[MICROPHONE_SENSOR_NAME][j][2 * i + 1] = int.from_bytes( + data[node_id][DEFAULT_SENSOR_NAMES[0]][j][2 * i + 1] = int.from_bytes( payload[(bytes_per_sample * (index + 5)) : (bytes_per_sample * (index + 5) + 3)], "big", # Unlike the other sensors, the microphone data come in big-endian signed=True, ) - data[MICROPHONE_SENSOR_NAME][j + 5][2 * i] = int.from_bytes( + data[node_id][DEFAULT_SENSOR_NAMES[0]][j + 5][2 * i] = int.from_bytes( payload[(bytes_per_sample * (index + 10)) : (bytes_per_sample * (index + 10) + 3)], "big", # Unlike the other sensors, the microphone data come in big-endian signed=True, ) - data[MICROPHONE_SENSOR_NAME][j + 5][2 * i + 1] = int.from_bytes( + data[node_id][DEFAULT_SENSOR_NAMES[0]][j + 5][2 * i + 1] = int.from_bytes( payload[(bytes_per_sample * (index + 15)) : (bytes_per_sample * (index + 15) + 3)], "big", # Unlike the other sensors, the microphone data come in big-endian signed=True, ) - return data, [MICROPHONE_SENSOR_NAME] + return data, [DEFAULT_SENSOR_NAMES[0]] if packet_type.startswith("IMU"): - imu_sensor_names = {"IMU Accel": "Acc", "IMU Gyro": "Gyro", "IMU Magnetometer": "Mag"} - imu_sensor = imu_sensor_names[packet_type] # Write the received payload to the data field - for i in range(self.config.imu_samples_per_packet): + for i in range(node_config.samples_per_packet["Acc"]): index = 6 * i - data[imu_sensor][0][i] = int.from_bytes(payload[index : (index + 2)], self.config.endian, signed=True) - data[imu_sensor][1][i] = int.from_bytes( - payload[(index + 2) : (index + 4)], self.config.endian, signed=True + data[node_id][imu_sensor][0][i] = int.from_bytes( + payload[index : (index + 2)], self.config.gateway.endian, signed=True ) - data[imu_sensor][2][i] = int.from_bytes( - payload[(index + 4) : (index + 6)], self.config.endian, signed=True + data[node_id][imu_sensor][1][i] = int.from_bytes( + payload[(index + 2) : (index + 4)], self.config.gateway.endian, signed=True + ) + data[node_id][imu_sensor][2][i] = int.from_bytes( + payload[(index + 4) : (index + 6)], self.config.gateway.endian, signed=True ) return data, [imu_sensor] # TODO Analog sensor definitions if packet_type in {"Analog Kinetron", "Analog1", "Analog2"}: - logger.error("Received Analog packet. Not supported atm") + logger.error("Received Analog Kinetron, Analog1 or Analog2 packet. Not supported atm") raise exceptions.UnknownPacketTypeError(f"Packet of type {packet_type!r} is unknown.") if packet_type == "Analog Vbat": @@ -354,37 +480,45 @@ def _parse_sensor_packet_data(self, packet_type, payload, data): def val_to_v(val): return val / 1e6 - for i in range(self.config.analog_samples_per_packet): + for i in range(node_config.samples_per_packet["Analog Vbat"]): index = 4 * i - data["Analog Vbat"][0][i] = val_to_v( - int.from_bytes(payload[index : (index + 4)], self.config.endian, signed=False) + data[node_id]["Analog Vbat"][0][i] = val_to_v( + int.from_bytes(payload[index : (index + 4)], self.config.gateway.endian, signed=False) ) return data, ["Analog Vbat"] if packet_type == "Constat": bytes_per_sample = 10 - for i in range(self.config.constat_samples_per_packet): - data["Constat"][0][i] = struct.unpack( - "f", + for i in range(node_config.samples_per_packet["Constat"]): + data[node_id]["Constat"][0][i] = struct.unpack( + "f", payload[(bytes_per_sample * i) : (bytes_per_sample * i + 4)], )[0] - data["Constat"][1][i] = int.from_bytes( + data[node_id]["Constat"][1][i] = int.from_bytes( payload[(bytes_per_sample * i + 4) : (bytes_per_sample * i + 5)], - self.config.endian, + self.config.gateway.endian, signed=True, ) - data["Constat"][2][i] = int.from_bytes( + data[node_id]["Constat"][2][i] = int.from_bytes( payload[(bytes_per_sample * i + 5) : (bytes_per_sample * i + 6)], - self.config.endian, + self.config.gateway.endian, signed=True, ) - data["Constat"][3][i] = int.from_bytes( + data[node_id]["Constat"][3][i] = int.from_bytes( payload[(bytes_per_sample * i + 6) : (bytes_per_sample * i + 10)], - self.config.endian, + self.config.gateway.endian, signed=False, ) + logger.debug( + "Constats received from node %s: filtered_rssi=%s, raw_rssi=%s, tx_power=%s, allocated_heap_memory=%s", + node_id, + data[node_id]["Constat"][0][i], + data[node_id]["Constat"][1][i], + data[node_id]["Constat"][2][i], + data[node_id]["Constat"][3][i], + ) return data, ["Constat"] @@ -392,49 +526,55 @@ def val_to_v(val): logger.error("Sensor of type %r is unknown.", packet_type) raise exceptions.UnknownPacketTypeError(f"Sensor of type {packet_type!r} is unknown.") - def _parse_info_packet(self, information_type, payload, previous_timestamp): + def _parse_remote_info_packet(self, node_id, packet_type, packet, previous_timestamp): """Parse information type packet and send the information to logger. - :param str information_type: From packet handles, defines what information is stored in payload. - :param iter payload: + :param str node_id: the ID of the node the packet is from + :param str packet_type: From packet handles, defines what information is stored in the packet. + :param iter packet: The packet :return None: """ - if information_type == "Mic 1": - if payload[0] == 1: + node_config = self.config.nodes[node_id] + + if packet_type == "Mic 1": + if packet[0] == 1: logger.info("Microphone data reading done") - elif payload[0] == 2: + elif packet[0] == 2: logger.info("Microphone data erasing done") - elif payload[0] == 3: + elif packet[0] == 3: logger.info("Microphones started ") return - if information_type == "Cmd Decline": - reason_index = str(int.from_bytes(payload, self.config.endian, signed=False)) - logger.info("Command declined, %s", self.config.decline_reason[reason_index]) + if packet_type == "Cmd Decline": + reason_index = str(int.from_bytes(packet, self.config.gateway.endian, signed=False)) + logger.info("Command declined, %s", node_config.decline_reason[reason_index]) return - if information_type == "Sleep State": - state_index = str(int.from_bytes(payload, self.config.endian, signed=False)) - logger.info("\n%s\n", self.config.sleep_state[state_index]) + if packet_type == "Sleep State": + state_index = str(int.from_bytes(packet, self.config.gateway.endian, signed=False)) + logger.info("Sleep state updated on node %s: %s", node_id, node_config.sleep_state[state_index]) + # TODO make this node-specific if bool(int(state_index)): self.sleep = True else: self.sleep = False # Reset previous timestamp on wake up - for sensor_name in self.config.sensor_names: - previous_timestamp[sensor_name] = -1 + for sensor_name in node_config.sensor_names: + previous_timestamp[node_id][sensor_name] = -1 return - if information_type == "Info Message": - info_index = str(int.from_bytes(payload[0:1], self.config.endian, signed=False)) - logger.info(self.config.info_type[info_index]) + if packet_type == "Remote Info Message": + remote_info_key = str(int.from_bytes(packet[0:1], self.config.gateway.endian, signed=False)) + info_subtype = node_config.remote_info_type[remote_info_key] + logger.info("Received remote info packet from node %s: %s", node_id, info_subtype) - if self.config.info_type[info_index] == "Battery info": - voltage = int.from_bytes(payload[1:5], self.config.endian, signed=False) / 1000000 - cycle = int.from_bytes(payload[5:9], self.config.endian, signed=False) / 100 - state_of_charge = int.from_bytes(payload[9:13], self.config.endian, signed=False) / 256 + # TODO store the voltage in results so that we'll be able to display it in the dashboard + if info_subtype == "Battery info": + voltage = int.from_bytes(packet[1:5], self.config.gateway.endian, signed=False) / 1000000 + cycle = int.from_bytes(packet[5:9], self.config.gateway.endian, signed=False) / 100 + state_of_charge = int.from_bytes(packet[9:13], self.config.gateway.endian, signed=False) / 256 logger.info( "Voltage : %fV\n Cycle count: %f\nState of charge: %f%%", @@ -445,7 +585,23 @@ def _parse_info_packet(self, information_type, payload, previous_timestamp): return - def _check_for_packet_loss(self, sensor_name, timestamp, previous_timestamp): + if packet_type == "Timestamp Packet 0": + logger.warning("Received Timestamp Packet 0, handling not implemented yet") + # print("timestamp packet", int(len / 4), len) + # for i in range(int(len / 4)): + # files["ts" + str(packet_source)].write( + # str(int.from_bytes(payload[i * 4 : (i + 1) * 4], ENDIAN, signed=False)) + "," + # ) + + if packet_type == "Timestamp Packet 1": + logger.warning("Received Timestamp Packet 1, handling not implemented yet") + # print("time elapse packet", int(len / 4), len) + # for i in range(int(len / 4)): + # files["sampleElapse" + str(packet_source)].write( + # str(int.from_bytes(payload[i * 4 : (i + 1) * 4], ENDIAN, signed=False)) + "," + # ) + + def _check_for_packet_loss(self, node_id, sensor_name, timestamp, previous_timestamp): """Check if a packet was lost by looking at the time interval between previous_timestamp and timestamp for the sensor_name. @@ -454,32 +610,41 @@ def _check_for_packet_loss(self, sensor_name, timestamp, previous_timestamp): timestamps in two consecutive packets is expected to be approximately equal to the number of samples in the packet times sampling period. + :param str node_id: the ID of the node the packet is from :param str sensor_name: :param float timestamp: Current timestamp for the first sample in the packet Unit: s :param dict previous_timestamp: Timestamp for the first sample in the previous packet. Must be initialized with -1. Unit: s :return None: """ - if previous_timestamp[sensor_name] == -1: + node_config = self.config.nodes[node_id] + + if previous_timestamp[node_id][sensor_name] == -1: logger.info("Received first %s packet." % sensor_name) else: expected_current_timestamp = ( - previous_timestamp[sensor_name] - + self.config.samples_per_packet[sensor_name] * self.config.period[sensor_name] + previous_timestamp[node_id][sensor_name] + + node_config.samples_per_packet[sensor_name] * node_config.periods[sensor_name] ) timestamp_deviation = timestamp - expected_current_timestamp - if abs(timestamp_deviation) > self.config.max_timestamp_slack: + if abs(timestamp_deviation) > node_config.max_timestamp_slack: if self.sleep: - # Only Constat (Connections statistics) comes during sleep + # Only Constat (Connections statistics) comes during sleep. return if sensor_name in ["Acc", "Gyro", "Mag"]: - # IMU sensors are not synchronised to CPU, so their actual periods might differ - self.config.period[sensor_name] = ( - timestamp - previous_timestamp[sensor_name] - ) / self.config.samples_per_packet[sensor_name] - logger.debug("Updated %s period to %f ms.", sensor_name, self.config.period[sensor_name] * 1000) + # IMU sensors are not synchronised to CPU, so their actual periods might differ. + node_config.periods[sensor_name] = ( + timestamp - previous_timestamp[node_id][sensor_name] + ) / node_config.samples_per_packet[sensor_name] + + logger.debug( + "Updated %s period to %f ms.", + sensor_name, + node_config.periods[sensor_name] * 1000, + ) + else: logger.warning( "Possible packet loss. %s sensor packet is timestamped %d ms later than expected", @@ -487,66 +652,41 @@ def _check_for_packet_loss(self, sensor_name, timestamp, previous_timestamp): timestamp_deviation * 1000, ) - previous_timestamp[sensor_name] = timestamp + previous_timestamp[node_id][sensor_name] = timestamp - def _timestamp_and_persist_data(self, data, sensor_name, timestamp, period): - """Persist data to the required storage media. - Since timestamps only come at a packet level, this function assumes constant period for - the within-packet-timestamps + def _timestamp_and_persist_data(self, data, node_id, sensor_name, timestamp, period): + """Persist data to the required storage media. Since timestamps only come at a packet level, this function + assumes constant period for the within-packet-timestamps. :param dict data: data to persist + :param str node_id: the ID of the node the data is from :param str sensor_name: sensor type to persist data from :param float timestamp: timestamp in s :param float period: :return None: """ - number_of_samples = len(data[sensor_name][0]) - time = None + number_of_samples = len(data[node_id][sensor_name][0]) # Iterate through all sample times. for i in range(number_of_samples): time = timestamp + i * period sample = [time] - for meas in data[sensor_name]: + for meas in data[node_id][sensor_name]: sample.append(meas[i]) - self._add_data_to_current_window(sensor_name, data=sample) - - # The first time this method runs, calculate the offset between the last timestamp of the first sample and the - # UTC time now. Store it as the `sensor_time_offset` metadata in the windows. - if sensor_name == "Constat": - logger.debug("Constat packet: %d" % timestamp) - if time and self.sensor_time_offset is None: - self._calculate_and_store_sensor_timestamp_offset(time) - - def _calculate_and_store_sensor_timestamp_offset(self, timestamp): - """Calculate the offset between the given timestamp and the UTC time now, storing it in the metadata of the - windows in the uploader and/or writer. - - :param float timestamp: posix timestamp from sensor - :return None: - """ - now = datetime.datetime.now().replace(tzinfo=datetime.timezone.utc).timestamp() - self.sensor_time_offset = now - timestamp - - if hasattr(self.writer, "current_window"): - self.writer.current_window["sensor_time_offset"] = self.sensor_time_offset - self.writer.ready_window["sensor_time_offset"] = self.sensor_time_offset - - if hasattr(self.uploader, "current_window"): - self.uploader.current_window["sensor_time_offset"] = self.sensor_time_offset - self.uploader.ready_window["sensor_time_offset"] = self.sensor_time_offset + self._add_data_to_current_window(node_id, sensor_name, data=sample) - def _add_data_to_current_window(self, sensor_name, data): + def _add_data_to_current_window(self, node_id, sensor_name, data): """Add data to the current window. + :param str node_id: the ID of the node the data is from :param str sensor_name: sensor type to persist data from :param iter data: data to persist :return None: """ if self.save_locally: - self.writer.add_to_current_window(sensor_name, data) + self.writer.add_to_current_window(node_id, sensor_name, data) if self.upload_to_cloud: - self.uploader.add_to_current_window(sensor_name, data) + self.uploader.add_to_current_window(node_id, sensor_name, data) diff --git a/data_gateway/persistence.py b/data_gateway/persistence.py index 7d4e6842..4f89639d 100644 --- a/data_gateway/persistence.py +++ b/data_gateway/persistence.py @@ -5,6 +5,7 @@ import multiprocessing import os import time +from collections import defaultdict from octue.cloud import storage from octue.cloud.storage.client import GoogleCloudStorageClient @@ -40,7 +41,7 @@ def force_persist(self): class TimeBatcher: """A batcher that groups the given data into time windows. - :param iter(str) sensor_names: names of sensors to group data for + :param iter(str) node_ids: the IDs of the nodes that data is being persisted for :param float window_size: length of time window in seconds :param str output_directory: directory to write windows to :return None: @@ -48,11 +49,12 @@ class TimeBatcher: _file_prefix = "window" - def __init__(self, sensor_names, window_size, output_directory=DEFAULT_OUTPUT_DIRECTORY): - self.current_window = {"sensor_time_offset": None, "sensor_data": {name: [] for name in sensor_names}} + def __init__(self, node_ids, window_size, output_directory=DEFAULT_OUTPUT_DIRECTORY): + self._node_ids = node_ids + self.current_window = {node_id: defaultdict(list) for node_id in self._node_ids} + self.ready_window = {node_id: defaultdict(list) for node_id in self._node_ids} self.window_size = window_size self.output_directory = output_directory - self.ready_window = {"sensor_time_offset": None, "sensor_data": {}} self._start_time = time.perf_counter() self._window_number = 0 @@ -62,9 +64,10 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): self.force_persist() - def add_to_current_window(self, sensor_name, data): + def add_to_current_window(self, node_id, sensor_name, data): """Add data to the current window for the given sensor name. + :param str node_id: the ID of the node the data is from :param str sensor_name: name of sensor :param iter data: data to add to window :return None: @@ -76,7 +79,7 @@ def add_to_current_window(self, sensor_name, data): self._prepare_for_next_window() # Then add data to the current/new window. - self.current_window["sensor_data"][sensor_name].append(data) + self.current_window[node_id][sensor_name].append(data) def finalise_current_window(self): """Finalise the current window for the given sensor name. This puts the current window into the queue of ready @@ -84,10 +87,11 @@ def finalise_current_window(self): :return None: """ - for sensor_name, data in self.current_window["sensor_data"].items(): - if data: - self.ready_window["sensor_data"][sensor_name] = copy.deepcopy(data) - data.clear() + for node_id in self.current_window: + for sensor_name, data in self.current_window[node_id].items(): + if data: + self.ready_window[node_id][sensor_name] = copy.deepcopy(data) + data.clear() def force_persist(self): """Persist all current windows, regardless of whether a complete time window has passed. @@ -112,7 +116,7 @@ def _prepare_for_next_window(self): :return None: """ self._window_number += 1 - self.ready_window["sensor_data"] = {} + self.ready_window = {node_id: defaultdict(list) for node_id in self._node_ids} self._start_time = time.perf_counter() @abc.abstractmethod @@ -128,8 +132,9 @@ def _generate_window_path(self): class BatchingFileWriter(TimeBatcher): """A file writer that groups the given into time windows, saving each window to disk. - :param iter(str) sensor_names: names of sensors to make windows for + :param iter(str) node_ids: the IDs of the nodes that data is being persisted for :param float window_size: length of time window in seconds + :param bool save_csv_files: :param str output_directory: directory to write windows to :param int storage_limit: storage limit in bytes (default is 1 GB) :return None: @@ -137,15 +142,15 @@ class BatchingFileWriter(TimeBatcher): def __init__( self, - sensor_names, + node_ids, window_size, save_csv_files=False, output_directory=DEFAULT_OUTPUT_DIRECTORY, - storage_limit=1024 ** 3, + storage_limit=1024**3, ): self._save_csv_files = save_csv_files self.storage_limit = storage_limit - super().__init__(sensor_names, window_size, output_directory) + super().__init__(node_ids, window_size, output_directory) os.makedirs(self.output_directory, exist_ok=True) logger.info("Windows will be saved to %r at intervals of %s seconds.", self.output_directory, self.window_size) @@ -166,14 +171,15 @@ def _persist_window(self, window=None): logger.info("%s %d written to disk.", self._file_prefix.capitalize(), self._window_number) if self._save_csv_files: - for sensor in window["sensor_data"]: - csv_path = os.path.join(os.path.dirname(window_path), f"{sensor}.csv") - logger.info("Saving %s data to csv file.", sensor) + for node_id in window: + for sensor in window[node_id]: + csv_path = os.path.join(os.path.dirname(window_path), f"{sensor}.csv") + logger.info("Saving %s data to csv file.", sensor) - with open(csv_path, "w", newline="") as f: - writer = csv.writer(f, delimiter=",") - for row in self.ready_window["sensor_data"][sensor]: - writer.writerow(row) + with open(csv_path, "w", newline="") as f: + writer = csv.writer(f, delimiter=",") + for row in self.ready_window[node_id][sensor]: + writer.writerow(row) def _manage_storage(self): """Check if the output directory has reached its storage limit and, if it has, delete the oldest window. @@ -181,7 +187,7 @@ def _manage_storage(self): :return None: """ filter = lambda path: os.path.split(path)[-1].startswith("window") # noqa - storage_limit_in_mb = self.storage_limit / 1024 ** 2 + storage_limit_in_mb = self.storage_limit / 1024**2 if calculate_disk_usage(self.output_directory, filter) >= self.storage_limit: oldest_window = get_oldest_file_in_directory(self.output_directory, filter) @@ -211,7 +217,7 @@ class BatchingUploader(TimeBatcher): Storage. If upload fails for a window, it will be written to the backup directory. If the `upload_backup_files` flag is `True`, its upload will then be reattempted after the upload of each subsequent window. - :param iter(str) sensor_names: names of sensors to group data for + :param iter(str) node_ids: the IDs of the nodes that data is being persisted for :param str bucket_name: name of Google Cloud bucket to upload to :param float window_size: length of time window in seconds :param str output_directory: directory to write windows to @@ -222,7 +228,7 @@ class BatchingUploader(TimeBatcher): def __init__( self, - sensor_names, + node_ids, bucket_name, window_size, output_directory=DEFAULT_OUTPUT_DIRECTORY, @@ -235,10 +241,10 @@ def __init__( self.metadata = metadata or {} self.upload_timeout = upload_timeout self.upload_backup_files = upload_backup_files - super().__init__(sensor_names, window_size, output_directory) + super().__init__(node_ids, window_size, output_directory) self._backup_directory = os.path.join(self.output_directory, ".backup") - self._backup_writer = BatchingFileWriter(sensor_names, window_size, output_directory=self._backup_directory) + self._backup_writer = BatchingFileWriter(node_ids, window_size, output_directory=self._backup_directory) logger.info( "Windows will be uploaded to %r at intervals of %s seconds.", self.output_directory, self.window_size @@ -250,6 +256,9 @@ def _persist_window(self): :return None: """ try: + logger.info( + "Uploading window to bucket_name %s with path %s", self.bucket_name, self._generate_window_path() + ) self.client.upload_from_string( string=json.dumps(self.ready_window), cloud_path=storage.path.generate_gs_path(self.bucket_name, self._generate_window_path()), diff --git a/data_gateway/serial_port.py b/data_gateway/serial_port.py new file mode 100644 index 00000000..e0aa856c --- /dev/null +++ b/data_gateway/serial_port.py @@ -0,0 +1,40 @@ +import logging +import os + +import serial + +from data_gateway.dummy_serial import DummySerial + + +logger = logging.getLogger(__name__) + + +def get_serial_port(serial_port, configuration, use_dummy_serial_port=False): + """Get the serial port or a dummy serial port if specified. If a `serial.Serial` instance is provided, return that + as the serial port to use. + + :param str|serial.Serial serial_port: the name of a serial port or a `serial.Serial` instance + :param data_gateway.configuration.Configuration configuration: the packet reader configuration + :param bool use_dummy_serial_port: if `True`, use a dummy serial port instead + :return serial.Serial|data_gateway.dummy_serial.DummySerial: + """ + if isinstance(serial_port, str): + serial_port_name = serial_port + + if use_dummy_serial_port: + serial_port = DummySerial(port=serial_port_name, baudrate=configuration.gateway.baudrate) + else: + serial_port = serial.Serial(port=serial_port_name, baudrate=configuration.gateway.baudrate) + + logger.info("Serial port %r found.", serial_port_name) + + # The buffer size can only be set on Windows. + if os.name == "nt": + serial_port.set_buffer_size( + rx_size=configuration.gateway.serial_buffer_rx_size, + tx_size=configuration.gateway.serial_buffer_tx_size, + ) + else: + logger.debug("Serial port buffer size can only be set on Windows.") + + return serial_port diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..260853df --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,9 @@ +version: "2" +volumes: + resin-data: +services: + main: + build: . + privileged: true + volumes: + - "resin-data:/data" diff --git a/docs/source/installation.rst b/docs/source/installation.rst index e8bb6992..87ec84e6 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -23,9 +23,12 @@ You'll need to install Raspberry Pi OS (formerly "Raspbian", which was a much be When booted into your **pi**, use the following commands to install... .. code-block:: shell + sudo apt-get update + sudo apt-get install libhdf5-dev libhdf5-serial-dev - export GATEWAY_VERSION="0.11.8" # Or whatever release number you aim to use, check the latest available on GitHub - pip install git+https://github.com/aerosense-ai/data-gateway.git@${GATEWAY_VERSION} + git clone https://github.com/aerosense-ai/data-gateway.git + cd data-gateway + pip install -r requirements-pi.txt This installs the CLI :ref:`gateway_cli`, which enables you to start the gateway. diff --git a/docs/source/routines.rst b/docs/source/routines.rst index b9817865..1229f7fc 100644 --- a/docs/source/routines.rst +++ b/docs/source/routines.rst @@ -43,3 +43,52 @@ Routine file schema send to the sensors and a delay in seconds from the gateway starting to run the command. - An optional ``period`` in seconds can be provided to repeat the routine. If none is provided, the routine is run once only. The period must be greater than each of the commands' delays. + + +Example routine files +--------------------- + +The following routine file instructs the gateway to +.. code-block:: shell + + { + "commands": [ + [ + "startDiffBaros", + 60 + ], + [ + "startIMU", + 65 + ], + [ + "getBattery", + 70 + ], + [ + "stopDiffBaros", + 660 + ], + [ + "startBaros", + 670 + ], + [ + "startMics", + 1265 + ], + [ + "stopBaros", + 1270 + ], + [ + "stopIMU", + 1275 + ], + [ + "stopMics", + 1280 + ] + ], + "period": 3600 + } diff --git a/examples/configurations/default.json b/examples/configurations/default.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/examples/configurations/default.json @@ -0,0 +1 @@ +{} diff --git a/examples/configurations/manual-test.json b/examples/configurations/manual-test.json new file mode 100644 index 00000000..fc468c3c --- /dev/null +++ b/examples/configurations/manual-test.json @@ -0,0 +1,12 @@ +{ + "gateway": { + "packet_key": 254, + "packet_key_offset": 245 + }, + "nodes": { + "4": {} + }, + "session": { + "notes": "A basic session for integration testing. Data is likely totally meaningless." + } +} diff --git a/examples/configurations/multi-node-aventa-tests.json b/examples/configurations/multi-node-aventa-tests.json new file mode 100644 index 00000000..bc44f2bb --- /dev/null +++ b/examples/configurations/multi-node-aventa-tests.json @@ -0,0 +1,13 @@ +{ + "gateway": { + "installation_reference": "aventa-turbine-test" + }, + "nodes": { + "2": {}, + "3": {} + }, + "session": { + "label": "aventa-test-setup", + "notes": "Aventa wind turbine tests setup (in the lab)" + } +} diff --git a/examples/routines/manual-test.json b/examples/routines/manual-test.json new file mode 100644 index 00000000..fc50db45 --- /dev/null +++ b/examples/routines/manual-test.json @@ -0,0 +1,11 @@ +{ + "commands": [ + ["selMsgDest 4", 0.1], + ["wakeUp", 0.2], + ["getBattery", 30], + ["startBaros", 35], + ["stopBaros", 65], + ["readSens", 70], + ["sleep", 140] + ] +} diff --git a/examples/routines/multi-node-aventa-tests.json b/examples/routines/multi-node-aventa-tests.json new file mode 100644 index 00000000..f1a34eb0 --- /dev/null +++ b/examples/routines/multi-node-aventa-tests.json @@ -0,0 +1,48 @@ +{ + "commands": [ + ["selMsgDest 2", 5], + ["wakeUp", 5.1], + ["selMsgDest 3", 5.2], + ["wakeUp", 5.3], + + ["startSync", 30], + + ["selMsgDest 2", 60], + ["getBattery", 60.1], + ["startBaros", 60.2], + ["startIMU", 60.3], + + ["selMsgDest 3", 60.4], + ["startBaros", 60.6], + ["startIMU", 60.7], + + ["selMsgDest 2", 180], + ["startMics", 180.1], + ["selMsgDest 3", 180.2], + ["startDiffBaros", 180.3], + + ["selMsgDest 2", 240], + ["stopMics", 240.1], + ["selMsgDest 3", 240.2], + ["stopDiffBaros", 240.3], + + ["selMsgDest 2", 360], + ["stopBaros", 360.1], + ["stopIMU", 360.2], + ["selMsgDest 3", 360.3], + ["stopBaros", 360.4], + ["stopIMU", 360.5], + + ["stopSync", 365], + + ["selMsgDest 3", 370], + ["readSens", 370.1], + ["sleep", 670], + + ["selMsgDest 2", 675], + ["readSens", 675.1], + ["readMics", 735.0], + ["sleep", 1075] + ], + "period": 1200 +} diff --git a/examples/routines/multi-node-get-battery.json b/examples/routines/multi-node-get-battery.json new file mode 100644 index 00000000..a7a4a4d5 --- /dev/null +++ b/examples/routines/multi-node-get-battery.json @@ -0,0 +1,10 @@ +{ + "commands": [ + ["selMsgDest 3", 0.1], + ["getBattery", 0.2], + ["selMsgDest 4", 1.1], + ["getBattery", 1.2], + ["selMsgDest 5", 2.1], + ["getBattery", 2.2] + ] +} diff --git a/examples/routines/multi-node-quick-test.json b/examples/routines/multi-node-quick-test.json new file mode 100644 index 00000000..3cf47c71 --- /dev/null +++ b/examples/routines/multi-node-quick-test.json @@ -0,0 +1,33 @@ +{ + "commands": [ + ["selMsgDest 2", 5], + ["wakeUp", 6], + ["selMsgDest 3", 7], + ["wakeUp", 8], + + ["startSync", 60], + + ["selMsgDest 2", 70], + ["getBattery", 71], + ["startBaros", 72], + + ["selMsgDest 3", 73], + ["startBaros", 74], + + ["selMsgDest 2", 75], + ["stopBaros", 76], + + ["selMsgDest 3", 78], + ["stopBaros", 79], + + ["stopSync", 85], + + ["selMsgDest 2", 90], + ["readSens", 91], + ["sleep", 120], + + ["selMsgDest 3", 121], + ["readSens", 125], + ["sleep", 155] + ] +} diff --git a/examples/routines/multi-node-time-synced.json b/examples/routines/multi-node-time-synced.json new file mode 100644 index 00000000..d59d3cab --- /dev/null +++ b/examples/routines/multi-node-time-synced.json @@ -0,0 +1,43 @@ +{ + "commands": [ + ["selMsgDest 2", 0.005], // get feedback + ["wakeUp", 0.01], // Exiting sleep (message received after up to 30s) + ["selMsgDest 3", 0.02], + ["wakeUp", 0.03], + ["selMsgDest 4", 0.04], + ["wakeUp", 0.05], // command declined if already away, or exit from sleep + ["startSync", 0.06], // time synchronisation success should be returned + ["selMsgDest 3", 0.1], // select message destination successful, or Device not connected + ["getBattery", 0.2], // battery info displayed + ["startBaros", 0.3], + ["selMsgDest 4", 1.1], + ["getBattery", 1.2], + ["startBaros", 1.3], + ["selMsgDest 5", 2.1], + ["getBattery", 2.2], + ["startBaros", 2.3], + ["selMsgDest 3", 60.1], + ["stopBaros", 60.2], + ["selMsgDest 4", 61.1], + ["stopBaros", 61.2], + ["selMsgDest 5", 62.1], + ["stopBaros", 62.2], // no feedback + ["stopSync", 65], // no feedback + ["enterHighSpeed", 65.1], // no feedback + ["selDevice 3", 70], // no feedback + ["readSens", 75], // received first packet + ["selDevice 4", 135], + ["readSens", 140], // if the node is turned off this will throw us into an infinite loop; make sure there's a timeout. + ["selDevice 5", 200], + ["readSens", 205], // sensor reading from flash done + // flash erasing done is a result of turning the sensor on and off again + ["exitHighSpeed", 265], // no feedback + ["selMsgDest 3", 270.0], + ["sleep", 270.1], // Entering sleep + ["selMsgDest 4", 271.0], + ["sleep", 271.1], + ["selMsgDest 5", 272.0], + ["sleep", 272.1] + ], + "period": 360 +} diff --git a/poetry.lock b/poetry.lock index b7612c8a..7419c571 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6,6 +6,14 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "attrs" version = "21.4.0" @@ -22,47 +30,30 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "black" -version = "21.6b0" +version = "22.3.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.6.2" [package.dependencies] -appdirs = "*" -click = ">=7.1.2" +click = ">=8.0.0" mypy-extensions = ">=0.4.3" -pathspec = ">=0.8.1,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] -python2 = ["typed-ast (>=1.4.2)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "blake3" -version = "0.2.1" -description = "Python bindings for the Rust blake3 crate" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "cached-property" -version = "1.5.2" -description = "A decorator for caching properties in classes." -category = "main" -optional = false -python-versions = "*" - [[package]] name = "cachetools" -version = "5.0.0" +version = "5.2.0" description = "Extensible memoizing collections and decorators" category = "main" optional = false @@ -70,11 +61,11 @@ python-versions = "~=3.7" [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.5.18.1" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "cffi" @@ -108,17 +99,21 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "7.1.2" +version = "8.1.3" description = "Composable command line interface toolkit" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -132,7 +127,7 @@ python-versions = "*" [[package]] name = "coverage" -version = "6.3.2" +version = "6.4.1" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -167,8 +162,8 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] [[package]] name = "email-validator" -version = "1.1.3" -description = "A robust email syntax and deliverability validation library for Python 2.x/3.x." +version = "1.2.1" +description = "A robust email syntax and deliverability validation library." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" @@ -179,7 +174,7 @@ idna = ">=2.0.0" [[package]] name = "filelock" -version = "3.6.0" +version = "3.7.1" description = "A platform independent file lock." category = "dev" optional = false @@ -223,7 +218,7 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-wtf" -version = "1.0.0" +version = "1.0.1" description = "Form rendering, validation, and CSRF protection for Flask with WTForms." category = "dev" optional = false @@ -267,7 +262,7 @@ google-crc32c = "1.1.2" [[package]] name = "google-api-core" -version = "2.8.0" +version = "2.8.2" description = "Google API client core library" category = "main" optional = false @@ -275,20 +270,18 @@ python-versions = ">=3.6" [package.dependencies] google-auth = ">=1.25.0,<3.0dev" -googleapis-common-protos = ">=1.52.0,<2.0dev" +googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.12.0" +protobuf = ">=3.15.0,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" [package.extras] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] [[package]] name = "google-auth" -version = "2.6.2" +version = "2.7.0" description = "Google Authentication Library" category = "main" optional = false @@ -302,6 +295,7 @@ six = ">=1.9.0" [package.extras] aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"] +enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] @@ -336,7 +330,7 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] name = "google-cloud-core" -version = "2.2.3" +version = "2.3.0" description = "Google Cloud API client core library" category = "main" optional = false @@ -351,7 +345,7 @@ grpc = ["grpcio (>=1.8.2,<2.0dev)"] [[package]] name = "google-cloud-pubsub" -version = "2.11.0" +version = "2.13.0" description = "Google Cloud Pub/Sub API client library" category = "main" optional = false @@ -359,17 +353,18 @@ python-versions = ">=3.6" [package.dependencies] google-api-core = {version = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} -grpc-google-iam-v1 = ">=0.12.3,<0.13dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" grpcio = ">=1.38.1,<2.0dev" grpcio-status = ">=1.16.0" -proto-plus = ">=1.15.0" +proto-plus = ">=1.15.0,<2.0.0dev" +protobuf = ">=3.19.0,<4.0.0dev" [package.extras] libcst = ["libcst (>=0.3.10)"] [[package]] name = "google-cloud-secret-manager" -version = "2.9.2" +version = "2.11.1" description = "Secret Manager API API client library" category = "main" optional = false @@ -377,8 +372,9 @@ python-versions = ">=3.6" [package.dependencies] google-api-core = {version = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev", extras = ["grpc"]} -grpc-google-iam-v1 = ">=0.12.3,<0.13dev" -proto-plus = ">=1.15.0" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.15.0,<2.0.0dev" +protobuf = ">=3.19.0,<4.0.0dev" [package.extras] libcst = ["libcst (>=0.2.5)"] @@ -416,7 +412,7 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.3.2" +version = "2.3.3" description = "Utilities for Google Media Downloads and Resumable Uploads" category = "main" optional = false @@ -431,29 +427,29 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.56.0" +version = "1.56.2" description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -grpcio = {version = ">=1.0.0", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.12.0" +grpcio = {version = ">=1.0.0,<2.0.0dev", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.15.0,<4.0.0dev" [package.extras] -grpc = ["grpcio (>=1.0.0)"] +grpc = ["grpcio (>=1.0.0,<2.0.0dev)"] [[package]] name = "grpc-google-iam-v1" -version = "0.12.3" -description = "GRPC library for the google-iam-v1 service" +version = "0.12.4" +description = "IAM API client library" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -googleapis-common-protos = {version = ">=1.5.2,<2.0.0dev", extras = ["grpc"]} +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} grpcio = ">=1.0.0,<2.0.0dev" [[package]] @@ -499,19 +495,18 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "h5py" -version = "3.6.0" +version = "3.7.0" description = "Read and write HDF5 files from Python" -category = "main" +category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -cached-property = {version = "*", markers = "python_version < \"3.8\""} numpy = ">=1.14.5" [[package]] name = "identify" -version = "2.4.12" +version = "2.5.1" description = "File identification library for Python" category = "dev" optional = false @@ -546,7 +541,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytes [[package]] name = "importlib-resources" -version = "5.6.0" +version = "5.8.0" description = "Read resources from Python packages" category = "main" optional = false @@ -559,6 +554,14 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "itsdangerous" version = "2.1.2" @@ -569,7 +572,7 @@ python-versions = ">=3.7" [[package]] name = "jinja2" -version = "3.1.1" +version = "3.1.2" description = "A very fast and expressive template engine." category = "main" optional = false @@ -634,15 +637,15 @@ python-versions = "*" [[package]] name = "numpy" -version = "1.21.5" +version = "1.21.6" description = "NumPy is the fundamental package for array computing with Python." -category = "main" +category = "dev" optional = false python-versions = ">=3.7,<3.11" [[package]] name = "octue" -version = "0.27.0" +version = "0.29.2" description = "A package providing template applications for data services, and a python SDK to the Octue API." category = "main" optional = false @@ -658,7 +661,7 @@ google-cloud-secret-manager = ">=2.3,<3.0" google-cloud-storage = ">=1.35.1,<3" google-crc32c = ">=1.1,<2.0" gunicorn = ">=20.1,<21.0" -h5py = {version = ">=3.6,<4.0", optional = true, markers = "extra == \"hdf5\""} +packaging = ">=21.3,<22.0" python-dateutil = ">=2.8,<3.0" pyyaml = ">=6,<7" twined = ">=0.5.0,<0.6.0" @@ -671,34 +674,13 @@ dataflow = ["apache-beam[gcp] (>=2.37,<3.0)"] name = "packaging" version = "21.3" description = "Core utilities for Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" -[[package]] -name = "pandas" -version = "1.3.5" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "dev" -optional = false -python-versions = ">=3.7.1" - -[package.dependencies] -numpy = [ - {version = ">=1.17.3", markers = "platform_machine != \"aarch64\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, - {version = ">=1.19.2", markers = "platform_machine == \"aarch64\" and python_version < \"3.10\""}, - {version = ">=1.20.0", markers = "platform_machine == \"arm64\" and python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, -] -python-dateutil = ">=2.7.3" -pytz = ">=2017.3" - -[package.extras] -test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] - [[package]] name = "pathspec" version = "0.9.0" @@ -709,15 +691,15 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "platformdirs" -version = "2.5.1" +version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] [[package]] name = "pluggy" @@ -736,11 +718,11 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "2.17.0" +version = "2.19.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] cfgv = ">=2.0.0" @@ -753,25 +735,25 @@ virtualenv = ">=20.0.8" [[package]] name = "proto-plus" -version = "1.20.3" +version = "1.20.5" description = "Beautiful, Pythonic protocol buffers." category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -protobuf = ">=3.19.0" +protobuf = ">=3.19.0,<4.0.0dev" [package.extras] -testing = ["google-api-core[grpc] (>=1.22.2)"] +testing = ["google-api-core[grpc] (>=1.31.5)"] [[package]] name = "protobuf" -version = "3.19.4" +version = "3.20.1" description = "Protocol Buffers" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "py" @@ -826,14 +808,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" -category = "dev" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] [[package]] name = "pyrsistent" @@ -854,6 +836,28 @@ python-versions = "*" [package.extras] cp2110 = ["hidapi"] +[[package]] +name = "pytest" +version = "7.1.2" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -906,14 +910,6 @@ category = "main" optional = false python-versions = ">=3.6" -[[package]] -name = "regex" -version = "2022.3.15" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = ">=3.6" - [[package]] name = "requests" version = "2.27.1" @@ -943,20 +939,9 @@ python-versions = ">=3.6,<4" [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "scipy" -version = "1.7.3" -description = "SciPy: Scientific Library for Python" -category = "dev" -optional = false -python-versions = ">=3.7,<3.11" - -[package.dependencies] -numpy = ">=1.16.5,<1.23.0" - [[package]] name = "shapely" -version = "1.8.1.post1" +version = "1.8.2" description = "Geometric objects, predicates, and operations" category = "dev" optional = false @@ -991,9 +976,17 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "tox" -version = "3.24.5" +version = "3.25.0" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -1028,7 +1021,7 @@ python-dotenv = "*" [[package]] name = "typed-ast" -version = "1.5.2" +version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -1036,11 +1029,11 @@ python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.2.0" +description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "urllib3" @@ -1057,7 +1050,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.14.0" +version = "20.14.1" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -1076,7 +1069,7 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", [[package]] name = "werkzeug" -version = "2.1.0" +version = "2.1.2" description = "The comprehensive WSGI web application library." category = "main" optional = false @@ -1102,71 +1095,66 @@ email = ["email-validator"] [[package]] name = "zipp" -version = "3.7.0" +version = "3.8.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" -python-versions = ">=3.7.1,<3.11" -content-hash = "0628d4c89093a7bf71441e7515b2433857a6772c8e1d3d45d2fd5921e66d939e" +python-versions = ">=3.7.1,<3.10" +content-hash = "7364bc05e1f2282bec99abc254554cc4bfe3d730ef30f9e525fc96c3ec3e9bf7" [metadata.files] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] black = [ - {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, - {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, -] -blake3 = [ - {file = "blake3-0.2.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:228fd623d69ab67d82a420ce3b0ab5fd575ed9db215ca7e0a10d9417bbaedbcf"}, - {file = "blake3-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500d20efef13bcd7974240341eada92b1c640b31a51973e5166c51e00220fe32"}, - {file = "blake3-0.2.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c4d1a317c1937e7ddaba4c8d5316f2c08b56ef7591662496f523a96848969022"}, - {file = "blake3-0.2.1-cp310-none-win32.whl", hash = "sha256:0500592524e2180c094aca77fd505cf727ce77a4d50ddd816a58c3def31aa7c3"}, - {file = "blake3-0.2.1-cp310-none-win_amd64.whl", hash = "sha256:df90be81df4ac76e9cdbc4cc327caad08502b3cd61bcf3f6323445ac91d0abbe"}, - {file = "blake3-0.2.1-cp36-cp36m-macosx_10_7_x86_64.whl", hash = "sha256:de1c8eaa0f17628869e4fa3a2fdd119845950d6c786c67b723267c1988d6b25c"}, - {file = "blake3-0.2.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:868bff669115ffbeec6e759a89c228648aa58a6fcef8589342de6cb6b9ed8ca9"}, - {file = "blake3-0.2.1-cp36-none-win32.whl", hash = "sha256:6ea193219f38a37e838a61485df5f549ac151d3a8dbd41d9e3ebd894f5296b19"}, - {file = "blake3-0.2.1-cp36-none-win_amd64.whl", hash = "sha256:1311f0091cb152c9ec1c2273d64b1c3d166bb0b513734ebcb24329370c4fc08a"}, - {file = "blake3-0.2.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:abab5b261f32b02a0b42bb5474a7268fe199391961952c38195fd4f357a6e8b0"}, - {file = "blake3-0.2.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:34b04ef213b78014cef91d9360e29fcaab130dee7fc1e5450add2e4a1fd3e907"}, - {file = "blake3-0.2.1-cp37-none-win32.whl", hash = "sha256:233eb4125a17908e6d0c733f76c0e4b6cf01ec59aaa1d7810853173749fa9d33"}, - {file = "blake3-0.2.1-cp37-none-win_amd64.whl", hash = "sha256:136d02fa9c8a14e1894cc4e0865bd98d9f5a41baf2b9b500b3d6690134f951fc"}, - {file = "blake3-0.2.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:41789806873d196be059ace27ab0f5565db20dc74c6da53d29fbd424b87ee271"}, - {file = "blake3-0.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3715da23b9555429e15f6b26718a70456f089456e6e1929949808caa383d0cb4"}, - {file = "blake3-0.2.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:16352ab67bdf9da2ba1c92232aabc56a9fd7bfc06ae2ad4d550c777400cfdbc0"}, - {file = "blake3-0.2.1-cp38-none-win32.whl", hash = "sha256:990631b705a069e485409cc6376495893786b202e80c8bd5e75a936445106654"}, - {file = "blake3-0.2.1-cp38-none-win_amd64.whl", hash = "sha256:ac01bd4541df763fce1ba3ffa1af1a0fdb39eba73cfbbff02a4108736a7fbc27"}, - {file = "blake3-0.2.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:ebb98db430271fa9814063636f3cbcaf964223d05a1134da8eb8d0af8868ff16"}, - {file = "blake3-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19f4e5c8eb0660c48b3f0dc70138ab26a7373d083dbdf27e701b81e2096d0cb4"}, - {file = "blake3-0.2.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5dd0bff49a63839dc649872368b93a51d09098efc9485c84ae910d74058a5dc4"}, - {file = "blake3-0.2.1-cp39-none-win32.whl", hash = "sha256:381e8ea6403f4c300abb19a009ca643016f57951cc72ec2f74be49642b209a1b"}, - {file = "blake3-0.2.1-cp39-none-win_amd64.whl", hash = "sha256:355e0a89a569d7f2148a421117a2fb611f3c2e977a9772d46f56ed8e0b75ca84"}, - {file = "blake3-0.2.1.tar.gz", hash = "sha256:e298e7c8e56ab37a1942b9c595f15f72695b5a31c4e8ac9957fc8e4df14a3109"}, -] -cached-property = [ - {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, - {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, + {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, + {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, + {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, + {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, + {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, + {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, + {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, + {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, + {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, + {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, + {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, + {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, + {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, + {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, + {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, + {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, + {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, ] cachetools = [ - {file = "cachetools-5.0.0-py3-none-any.whl", hash = "sha256:8fecd4203a38af17928be7b90689d8083603073622229ca7077b72d8e5a976e4"}, - {file = "cachetools-5.0.0.tar.gz", hash = "sha256:486471dfa8799eb7ec503a8059e263db000cdda20075ce5e48903087f79d5fd6"}, + {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, + {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, ] cffi = [ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, @@ -1229,8 +1217,8 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -1241,47 +1229,47 @@ coolname = [ {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, ] coverage = [ - {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, - {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, - {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, - {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, - {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, - {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, - {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, - {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, - {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, - {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, - {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, - {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, - {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, - {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, - {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, - {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, - {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, - {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, - {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, - {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, - {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, - {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, - {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, - {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, - {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, - {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, - {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, - {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, - {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, - {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, - {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, - {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, - {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, - {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, - {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, - {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, - {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, - {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, - {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, - {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, - {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, + {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, + {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, + {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, + {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, + {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, + {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, + {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, + {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, + {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, + {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, + {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, ] distlib = [ {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, @@ -1292,12 +1280,12 @@ dnspython = [ {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"}, ] email-validator = [ - {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"}, - {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"}, + {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"}, + {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"}, ] filelock = [ - {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"}, - {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"}, + {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, + {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -1308,8 +1296,8 @@ flask = [ {file = "Flask-2.0.3.tar.gz", hash = "sha256:e1120c228ca2f553b470df4a5fa927ab66258467526069981b3eb0a91902687d"}, ] flask-wtf = [ - {file = "Flask-WTF-1.0.0.tar.gz", hash = "sha256:872fbb17b5888bfc734edbdcf45bc08fb365ca39f69d25dc752465a455517b28"}, - {file = "Flask_WTF-1.0.0-py3-none-any.whl", hash = "sha256:01feccfc395405cea48a3f36c23f0d766e2cc6fd2a5a065ad50ad3e5827ec797"}, + {file = "Flask-WTF-1.0.1.tar.gz", hash = "sha256:34fe5c6fee0f69b50e30f81a3b7ea16aa1492a771fe9ad0974d164610c09a6c9"}, + {file = "Flask_WTF-1.0.1-py3-none-any.whl", hash = "sha256:9d733658c80be551ce7d5bc13c7a7ac0d80df509be1e23827c847d9520f4359a"}, ] fs = [ {file = "fs-2.4.13-py2.py3-none-any.whl", hash = "sha256:1d10cc8f9c55fbcf7b23775289a13f6796dca7acd5a135c379f49e87a56a7230"}, @@ -1320,28 +1308,28 @@ gcp-storage-emulator = [ {file = "gcp_storage_emulator-2021.6.2-py3-none-any.whl", hash = "sha256:ed27cefe4d510ec5c9a744f713e9c5cac1576dd9cc6df1847650868727db70df"}, ] google-api-core = [ - {file = "google-api-core-2.8.0.tar.gz", hash = "sha256:065bb8e11c605fd232707ae50963dc1c8af5b3c95b4568887515985e6c1156b3"}, - {file = "google_api_core-2.8.0-py3-none-any.whl", hash = "sha256:1b9f59236ce1bae9a687c1d4f22957e79a2669e53d032893f6bf0fca54f6931d"}, + {file = "google-api-core-2.8.2.tar.gz", hash = "sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc"}, + {file = "google_api_core-2.8.2-py3-none-any.whl", hash = "sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50"}, ] google-auth = [ - {file = "google-auth-2.6.2.tar.gz", hash = "sha256:60d449f8142c742db760f4c0be39121bc8d9be855555d784c252deaca1ced3f5"}, - {file = "google_auth-2.6.2-py2.py3-none-any.whl", hash = "sha256:3ba4d63cb29c1e6d5ffcc1c0623c03cf02ede6240a072f213084749574e691ab"}, + {file = "google-auth-2.7.0.tar.gz", hash = "sha256:8a954960f852d5f19e6af14dd8e75c20159609e85d8db37e4013cc8c3824a7e1"}, + {file = "google_auth-2.7.0-py2.py3-none-any.whl", hash = "sha256:df549a1433108801b11bdcc0e312eaf0d5f0500db42f0523e4d65c78722e8475"}, ] google-cloud-bigquery = [ {file = "google-cloud-bigquery-2.34.3.tar.gz", hash = "sha256:0ab6362a86a29f17e379e886b49544bc0b75626902a48d12c13a0b47f821bf4a"}, {file = "google_cloud_bigquery-2.34.3-py2.py3-none-any.whl", hash = "sha256:d702c609e57a3d7d7fbd37e4913d8d0e0e77eabaf7119037ceaa33e2370d7dcb"}, ] google-cloud-core = [ - {file = "google-cloud-core-2.2.3.tar.gz", hash = "sha256:89d2f7189bc6dc74de128d423ea52cc8719f0a5dbccd9ca80433f6504a20255c"}, - {file = "google_cloud_core-2.2.3-py2.py3-none-any.whl", hash = "sha256:a423852f4c36622376c8f0be509b67533690e061062368b763b92694c4ee06a7"}, + {file = "google-cloud-core-2.3.0.tar.gz", hash = "sha256:fdaa629e6174b4177c2d56eb8ab1ddd87661064d0a3e9bb06b62e4d7e2344669"}, + {file = "google_cloud_core-2.3.0-py2.py3-none-any.whl", hash = "sha256:35900f614045a33d5208e1d50f0d7945df98ce088388ce7237e7a2db12d5656e"}, ] google-cloud-pubsub = [ - {file = "google-cloud-pubsub-2.11.0.tar.gz", hash = "sha256:6ed3c9d97f88d1ff02dd80c1f5b6703efec75a7aacabd9285ac4fe0b22b50203"}, - {file = "google_cloud_pubsub-2.11.0-py2.py3-none-any.whl", hash = "sha256:4cef16ef78b533f5177aaafc85c9ceac29a61afdbd2efad017999859c7ea5ed4"}, + {file = "google-cloud-pubsub-2.13.0.tar.gz", hash = "sha256:a5c2e05c83d60bb17a152e599e7f4324c9ffb6c8cda44ffb6250b16280c383e6"}, + {file = "google_cloud_pubsub-2.13.0-py2.py3-none-any.whl", hash = "sha256:555f792093c32a8611dbec0419884e83a77827b45246cf991506122a06084748"}, ] google-cloud-secret-manager = [ - {file = "google-cloud-secret-manager-2.9.2.tar.gz", hash = "sha256:5bddd20c947c6d43d05fd2b3a19e98bd5f6453e4f077d86e1f4f5299aa78f358"}, - {file = "google_cloud_secret_manager-2.9.2-py2.py3-none-any.whl", hash = "sha256:befa099b43f559c44f198fcb900ef1a9dde13bc802b58c451b0c64f6e3fa1366"}, + {file = "google-cloud-secret-manager-2.11.1.tar.gz", hash = "sha256:b52cb477c91dc83484fbf81c83807e7e99672c9e22a686be4d9bd4a04c5a6155"}, + {file = "google_cloud_secret_manager-2.11.1-py2.py3-none-any.whl", hash = "sha256:41c837a583b904a134e65c2347b60009a10ead00dc040db8570b73bc78a6777f"}, ] google-cloud-storage = [ {file = "google-cloud-storage-1.44.0.tar.gz", hash = "sha256:29edbfeedd157d853049302bf5d104055c6f0cb7ef283537da3ce3f730073001"}, @@ -1379,15 +1367,16 @@ google-crc32c = [ {file = "google_crc32c-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:78cf5b1bd30f3a6033b41aa4ce8c796870bc4645a15d3ef47a4b05d31b0a6dc1"}, ] google-resumable-media = [ - {file = "google-resumable-media-2.3.2.tar.gz", hash = "sha256:06924e8b1e79f158f0202e7dd151ad75b0ea9d59b997c850f56bdd4a5a361513"}, - {file = "google_resumable_media-2.3.2-py2.py3-none-any.whl", hash = "sha256:3c13f84813861ac8f5b6371254bdd437076bf1f3bac527a9f3fd123a70166f52"}, + {file = "google-resumable-media-2.3.3.tar.gz", hash = "sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c"}, + {file = "google_resumable_media-2.3.3-py2.py3-none-any.whl", hash = "sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5"}, ] googleapis-common-protos = [ - {file = "googleapis-common-protos-1.56.0.tar.gz", hash = "sha256:4007500795bcfc269d279f0f7d253ae18d6dc1ff5d5a73613ffe452038b1ec5f"}, - {file = "googleapis_common_protos-1.56.0-py2.py3-none-any.whl", hash = "sha256:60220c89b8bd5272159bed4929ecdc1243ae1f73437883a499a44a1cbc084086"}, + {file = "googleapis-common-protos-1.56.2.tar.gz", hash = "sha256:b09b56f5463070c2153753ef123f07d2e49235e89148e9b2459ec8ed2f68d7d3"}, + {file = "googleapis_common_protos-1.56.2-py2.py3-none-any.whl", hash = "sha256:023eaea9d8c1cceccd9587c6af6c20f33eeeb05d4148670f2b0322dc1511700c"}, ] grpc-google-iam-v1 = [ - {file = "grpc-google-iam-v1-0.12.3.tar.gz", hash = "sha256:0bfb5b56f648f457021a91c0df0db4934b6e0c300bd0f2de2333383fe958aa72"}, + {file = "grpc-google-iam-v1-0.12.4.tar.gz", hash = "sha256:3f0ac2c940b9a855d7ce7e31fde28bddb0d9ac362d32d07c67148306931a0e30"}, + {file = "grpc_google_iam_v1-0.12.4-py2.py3-none-any.whl", hash = "sha256:312801ae848aeb8408c099ea372b96d253077e7851aae1a9e745df984f81f20c"}, ] grpcio = [ {file = "grpcio-1.45.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:0d74a159df9401747e57960f0772f4371486e3281919004efa9df8a82985abee"}, @@ -1454,26 +1443,30 @@ gunicorn = [ {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] h5py = [ - {file = "h5py-3.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a5320837c60870911645e9a935099bdb2be6a786fcf0dac5c860f3b679e2de55"}, - {file = "h5py-3.6.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98646e659bf8591a2177e12a4461dced2cad72da0ba4247643fd118db88880d2"}, - {file = "h5py-3.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:5996ff5adefd2d68c330a4265b6ef92e51b2fc674834a5990add5033bf109e20"}, - {file = "h5py-3.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c9a5529343a619fea777b7caa27d493595b28b5af8b005e8d1817559fcccf493"}, - {file = "h5py-3.6.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e2b49c48df05e19bb20b400b7ff7dc6f1ee36b84dc717c3771c468b33697b466"}, - {file = "h5py-3.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd9447633b0bafaf82190d9a8d56f3cb2e8d30169483aee67d800816e028190a"}, - {file = "h5py-3.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1c5acc660c458421e88c4c5fe092ce15923adfac4c732af1ac4fced683a5ea97"}, - {file = "h5py-3.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:35ab552c6f0a93365b3cb5664a5305f3920daa0a43deb5b2c547c52815ec46b9"}, - {file = "h5py-3.6.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:542781d50e1182b8fb619b1265dfe1c765e18215f818b0ab28b2983c28471325"}, - {file = "h5py-3.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f39242960b8d7f86f3056cc2546aa3047ff4835985f6483229af8f029e9c8db"}, - {file = "h5py-3.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ecedf16c613973622a334701f67edcc0249469f9daa0576e994fb20ac0405db"}, - {file = "h5py-3.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d8cacad89aa7daf3626fce106f7f2662ac35b14849df22d252d0d8fab9dc1c0b"}, - {file = "h5py-3.6.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dbaa1ed9768bf9ff04af0919acc55746e62b28333644f0251f38768313f31745"}, - {file = "h5py-3.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954c5c39a09b5302f69f752c3bbf165d368a65c8d200f7d5655e0fa6368a75e6"}, - {file = "h5py-3.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:9fd8a14236fdd092a20c0bdf25c3aba3777718d266fabb0fdded4fcf252d1630"}, - {file = "h5py-3.6.0.tar.gz", hash = "sha256:8752d2814a92aba4e2b2a5922d2782d0029102d99caaf3c201a566bc0b40db29"}, + {file = "h5py-3.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d77af42cb751ad6cc44f11bae73075a07429a5cf2094dfde2b1e716e059b3911"}, + {file = "h5py-3.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63beb8b7b47d0896c50de6efb9a1eaa81dbe211f3767e7dd7db159cea51ba37a"}, + {file = "h5py-3.7.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:04e2e1e2fc51b8873e972a08d2f89625ef999b1f2d276199011af57bb9fc7851"}, + {file = "h5py-3.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73307c876af49aa869ec5df1818e9bb0bdcfcf8a5ba773cc45a4fba5a286a5c"}, + {file = "h5py-3.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:f514b24cacdd983e61f8d371edac8c1b780c279d0acb8485639e97339c866073"}, + {file = "h5py-3.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:43fed4d13743cf02798a9a03a360a88e589d81285e72b83f47d37bb64ed44881"}, + {file = "h5py-3.7.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c038399ce09a58ff8d89ec3e62f00aa7cb82d14f34e24735b920e2a811a3a426"}, + {file = "h5py-3.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03d64fb86bb86b978928bad923b64419a23e836499ec6363e305ad28afd9d287"}, + {file = "h5py-3.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e5b7820b75f9519499d76cc708e27242ccfdd9dfb511d6deb98701961d0445aa"}, + {file = "h5py-3.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a9351d729ea754db36d175098361b920573fdad334125f86ac1dd3a083355e20"}, + {file = "h5py-3.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6776d896fb90c5938de8acb925e057e2f9f28755f67ec3edcbc8344832616c38"}, + {file = "h5py-3.7.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a047fddbe6951bce40e9cde63373c838a978c5e05a011a682db9ba6334b8e85"}, + {file = "h5py-3.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0798a9c0ff45f17d0192e4d7114d734cac9f8b2b2c76dd1d923c4d0923f27bb6"}, + {file = "h5py-3.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:0d8de8cb619fc597da7cf8cdcbf3b7ff8c5f6db836568afc7dc16d21f59b2b49"}, + {file = "h5py-3.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f084bbe816907dfe59006756f8f2d16d352faff2d107f4ffeb1d8de126fc5dc7"}, + {file = "h5py-3.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1fcb11a2dc8eb7ddcae08afd8fae02ba10467753a857fa07a404d700a93f3d53"}, + {file = "h5py-3.7.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ed43e2cc4f511756fd664fb45d6b66c3cbed4e3bd0f70e29c37809b2ae013c44"}, + {file = "h5py-3.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e7535df5ee3dc3e5d1f408fdfc0b33b46bc9b34db82743c82cd674d8239b9ad"}, + {file = "h5py-3.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:9e2ad2aa000f5b1e73b5dfe22f358ca46bf1a2b6ca394d9659874d7fc251731a"}, + {file = "h5py-3.7.0.tar.gz", hash = "sha256:3fcf37884383c5da64846ab510190720027dca0768def34dd8dcb659dbe5cbf3"}, ] identify = [ - {file = "identify-2.4.12-py2.py3-none-any.whl", hash = "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323"}, - {file = "identify-2.4.12.tar.gz", hash = "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17"}, + {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"}, + {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, @@ -1484,16 +1477,20 @@ importlib-metadata = [ {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, ] importlib-resources = [ - {file = "importlib_resources-5.6.0-py3-none-any.whl", hash = "sha256:a9dd72f6cc106aeb50f6e66b86b69b454766dd6e39b69ac68450253058706bcc"}, - {file = "importlib_resources-5.6.0.tar.gz", hash = "sha256:1b93238cbf23b4cde34240dd8321d99e9bf2eb4bc91c0c99b2886283e7baad85"}, + {file = "importlib_resources-5.8.0-py3-none-any.whl", hash = "sha256:7952325ffd516c05a8ad0858c74dff2c3343f136fe66a6002b2623dd1d43f223"}, + {file = "importlib_resources-5.8.0.tar.gz", hash = "sha256:568c9f16cb204f9decc8d6d24a572eeea27dacbb4cee9e6b03a8025736769751"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] itsdangerous = [ {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, ] jinja2 = [ - {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"}, - {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"}, + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jsonschema = [ {file = "jsonschema-4.4.0-py3-none-any.whl", hash = "sha256:77281a1f71684953ee8b3d488371b162419767973789272434bbc3f29d9c8823"}, @@ -1554,119 +1551,91 @@ nodeenv = [ {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, ] numpy = [ - {file = "numpy-1.21.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:301e408a052fdcda5cdcf03021ebafc3c6ea093021bf9d1aa47c54d48bdad166"}, - {file = "numpy-1.21.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7e8f6216f180f3fd4efb73de5d1eaefb5f5a1ee5b645c67333033e39440e63a"}, - {file = "numpy-1.21.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc7a7d7b0ed72589fd8b8486b9b42a564f10b8762be8bd4d9df94b807af4a089"}, - {file = "numpy-1.21.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ca1d7c8aef6e996112d0ce873ac9dfa1eaf4a1196b4ff7ff73880a09923ba7"}, - {file = "numpy-1.21.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4b2fb01f1b4ddbe2453468ea0719f4dbb1f5caa712c8b21bb3dd1480cd30d9"}, - {file = "numpy-1.21.5-cp310-cp310-win_amd64.whl", hash = "sha256:cc1b30205d138d1005adb52087ff45708febbef0e420386f58664f984ef56954"}, - {file = "numpy-1.21.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:08de8472d9f7571f9d51b27b75e827f5296295fa78817032e84464be8bb905bc"}, - {file = "numpy-1.21.5-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4fe6a006557b87b352c04596a6e3f12a57d6e5f401d804947bd3188e6b0e0e76"}, - {file = "numpy-1.21.5-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3d893b0871322eaa2f8c7072cdb552d8e2b27645b7875a70833c31e9274d4611"}, - {file = "numpy-1.21.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341dddcfe3b7b6427a28a27baa59af5ad51baa59bfec3264f1ab287aa3b30b13"}, - {file = "numpy-1.21.5-cp37-cp37m-win32.whl", hash = "sha256:ca9c23848292c6fe0a19d212790e62f398fd9609aaa838859be8459bfbe558aa"}, - {file = "numpy-1.21.5-cp37-cp37m-win_amd64.whl", hash = "sha256:025b497014bc33fc23897859350f284323f32a2fff7654697f5a5fc2a19e9939"}, - {file = "numpy-1.21.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a5098df115340fb17fc93867317a947e1dcd978c3888c5ddb118366095851f8"}, - {file = "numpy-1.21.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:311283acf880cfcc20369201bd75da907909afc4666966c7895cbed6f9d2c640"}, - {file = "numpy-1.21.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b545ebadaa2b878c8630e5bcdb97fc4096e779f335fc0f943547c1c91540c815"}, - {file = "numpy-1.21.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c5562bcc1a9b61960fc8950ade44d00e3de28f891af0acc96307c73613d18f6e"}, - {file = "numpy-1.21.5-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eed2afaa97ec33b4411995be12f8bdb95c87984eaa28d76cf628970c8a2d689a"}, - {file = "numpy-1.21.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61bada43d494515d5b122f4532af226fdb5ee08fe5b5918b111279843dc6836a"}, - {file = "numpy-1.21.5-cp38-cp38-win32.whl", hash = "sha256:7b9d6b14fc9a4864b08d1ba57d732b248f0e482c7b2ff55c313137e3ed4d8449"}, - {file = "numpy-1.21.5-cp38-cp38-win_amd64.whl", hash = "sha256:dbce7adeb66b895c6aaa1fad796aaefc299ced597f6fbd9ceddb0dd735245354"}, - {file = "numpy-1.21.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:507c05c7a37b3683eb08a3ff993bd1ee1e6c752f77c2f275260533b265ecdb6c"}, - {file = "numpy-1.21.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:00c9fa73a6989895b8815d98300a20ac993c49ac36c8277e8ffeaa3631c0dbbb"}, - {file = "numpy-1.21.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69a5a8d71c308d7ef33ef72371c2388a90e3495dbb7993430e674006f94797d5"}, - {file = "numpy-1.21.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2d8adfca843bc46ac199a4645233f13abf2011a0b2f4affc5c37cd552626f27b"}, - {file = "numpy-1.21.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c293d3c0321996cd8ffe84215ffe5d269fd9d1d12c6f4ffe2b597a7c30d3e593"}, - {file = "numpy-1.21.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c978544be9e04ed12016dd295a74283773149b48f507d69b36f91aa90a643e5"}, - {file = "numpy-1.21.5-cp39-cp39-win32.whl", hash = "sha256:2a9add27d7fc0fdb572abc3b2486eb3b1395da71e0254c5552b2aad2a18b5441"}, - {file = "numpy-1.21.5-cp39-cp39-win_amd64.whl", hash = "sha256:1964db2d4a00348b7a60ee9d013c8cb0c566644a589eaa80995126eac3b99ced"}, - {file = "numpy-1.21.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a7c4b701ca418cd39e28ec3b496e6388fe06de83f5f0cb74794fa31cfa384c02"}, - {file = "numpy-1.21.5.zip", hash = "sha256:6a5928bc6241264dce5ed509e66f33676fc97f464e7a919edc672fb5532221ee"}, + {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25"}, + {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fdffbfb6832cd0b300995a2b08b8f6fa9f6e856d562800fea9182316d99c4e8e"}, + {file = "numpy-1.21.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3820724272f9913b597ccd13a467cc492a0da6b05df26ea09e78b171a0bb9da6"}, + {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f17e562de9edf691a42ddb1eb4a5541c20dd3f9e65b09ded2beb0799c0cf29bb"}, + {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f30427731561ce75d7048ac254dbe47a2ba576229250fb60f0fb74db96501a1"}, + {file = "numpy-1.21.6-cp310-cp310-win32.whl", hash = "sha256:d4bf4d43077db55589ffc9009c0ba0a94fa4908b9586d6ccce2e0b164c86303c"}, + {file = "numpy-1.21.6-cp310-cp310-win_amd64.whl", hash = "sha256:d136337ae3cc69aa5e447e78d8e1514be8c3ec9b54264e680cf0b4bd9011574f"}, + {file = "numpy-1.21.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6aaf96c7f8cebc220cdfc03f1d5a31952f027dda050e5a703a0d1c396075e3e7"}, + {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:67c261d6c0a9981820c3a149d255a76918278a6b03b6a036800359aba1256d46"}, + {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6be4cb0ef3b8c9250c19cc122267263093eee7edd4e3fa75395dfda8c17a8e2"}, + {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4068a8c44014b2d55f3c3f574c376b2494ca9cc73d2f1bd692382b6dffe3db"}, + {file = "numpy-1.21.6-cp37-cp37m-win32.whl", hash = "sha256:7c7e5fa88d9ff656e067876e4736379cc962d185d5cd808014a8a928d529ef4e"}, + {file = "numpy-1.21.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bcb238c9c96c00d3085b264e5c1a1207672577b93fa666c3b14a45240b14123a"}, + {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:82691fda7c3f77c90e62da69ae60b5ac08e87e775b09813559f8901a88266552"}, + {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:643843bcc1c50526b3a71cd2ee561cf0d8773f062c8cbaf9ffac9fdf573f83ab"}, + {file = "numpy-1.21.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:357768c2e4451ac241465157a3e929b265dfac85d9214074985b1786244f2ef3"}, + {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f411b2c3f3d76bba0865b35a425157c5dcf54937f82bbeb3d3c180789dd66a6"}, + {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4aa48afdce4660b0076a00d80afa54e8a97cd49f457d68a4342d188a09451c1a"}, + {file = "numpy-1.21.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a96eef20f639e6a97d23e57dd0c1b1069a7b4fd7027482a4c5c451cd7732f4"}, + {file = "numpy-1.21.6-cp38-cp38-win32.whl", hash = "sha256:5c3c8def4230e1b959671eb959083661b4a0d2e9af93ee339c7dada6759a9470"}, + {file = "numpy-1.21.6-cp38-cp38-win_amd64.whl", hash = "sha256:bf2ec4b75d0e9356edea834d1de42b31fe11f726a81dfb2c2112bc1eaa508fcf"}, + {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4391bd07606be175aafd267ef9bea87cf1b8210c787666ce82073b05f202add1"}, + {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:67f21981ba2f9d7ba9ade60c9e8cbaa8cf8e9ae51673934480e45cf55e953673"}, + {file = "numpy-1.21.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee5ec40fdd06d62fe5d4084bef4fd50fd4bb6bfd2bf519365f569dc470163ab0"}, + {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1dbe1c91269f880e364526649a52eff93ac30035507ae980d2fed33aaee633ac"}, + {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d9caa9d5e682102453d96a0ee10c7241b72859b01a941a397fd965f23b3e016b"}, + {file = "numpy-1.21.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58459d3bad03343ac4b1b42ed14d571b8743dc80ccbf27444f266729df1d6f5b"}, + {file = "numpy-1.21.6-cp39-cp39-win32.whl", hash = "sha256:7f5ae4f304257569ef3b948810816bc87c9146e8c446053539947eedeaa32786"}, + {file = "numpy-1.21.6-cp39-cp39-win_amd64.whl", hash = "sha256:e31f0bb5928b793169b87e3d1e070f2342b22d5245c755e2b81caa29756246c3"}, + {file = "numpy-1.21.6-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd1c8f6bd65d07d3810b90d02eba7997e32abbdf1277a481d698969e921a3be0"}, + {file = "numpy-1.21.6.zip", hash = "sha256:ecb55251139706669fdec2ff073c98ef8e9a84473e51e716211b41aa0f18e656"}, ] octue = [ - {file = "octue-0.27.0-py3-none-any.whl", hash = "sha256:08f406fbe775e2733e0635f8f96c7c9964523fe8aeb07279771881b358cabecd"}, - {file = "octue-0.27.0.tar.gz", hash = "sha256:ee7ac545aa41924f6bce8309392af308e84b1128af81e7a2ca6c0dea99ec60b0"}, + {file = "octue-0.29.2-py3-none-any.whl", hash = "sha256:8331538b751fd124dce1bbc83417bf18759a0100240141e7bd9209f83aee9ccf"}, + {file = "octue-0.29.2.tar.gz", hash = "sha256:a38bc418c949bef3ee5f7bd8f3217c96251ee185a19d16a2682adb80b8767695"}, ] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -pandas = [ - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:62d5b5ce965bae78f12c1c0df0d387899dd4211ec0bdc52822373f13a3a022b9"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adfeb11be2d54f275142c8ba9bf67acee771b7186a5745249c7d5a06c670136b"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a8c055d58873ad81cae290d974d13dd479b82cbb975c3e1fa2cf1920715296"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd541ab09e1f80a2a1760032d665f6e032d8e44055d602d65eeea6e6e85498cb"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2651d75b9a167cc8cc572cf787ab512d16e316ae00ba81874b560586fa1325e0"}, - {file = "pandas-1.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:aaf183a615ad790801fa3cf2fa450e5b6d23a54684fe386f7e3208f8b9bfbef6"}, - {file = "pandas-1.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:344295811e67f8200de2390093aeb3c8309f5648951b684d8db7eee7d1c81fb7"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552020bf83b7f9033b57cbae65589c01e7ef1544416122da0c79140c93288f56"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cce0c6bbeb266b0e39e35176ee615ce3585233092f685b6a82362523e59e5b4"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d28a3c65463fd0d0ba8bbb7696b23073efee0510783340a44b08f5e96ffce0c"}, - {file = "pandas-1.3.5-cp37-cp37m-win32.whl", hash = "sha256:a62949c626dd0ef7de11de34b44c6475db76995c2064e2d99c6498c3dba7fe58"}, - {file = "pandas-1.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:8025750767e138320b15ca16d70d5cdc1886e8f9cc56652d89735c016cd8aea6"}, - {file = "pandas-1.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fe95bae4e2d579812865db2212bb733144e34d0c6785c0685329e5b60fcb85dd"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f261553a1e9c65b7a310302b9dbac31cf0049a51695c14ebe04e4bfd4a96f02"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6dbec5f3e6d5dc80dcfee250e0a2a652b3f28663492f7dab9a24416a48ac39"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3bc49af96cd6285030a64779de5b3688633a07eb75c124b0747134a63f4c05f"}, - {file = "pandas-1.3.5-cp38-cp38-win32.whl", hash = "sha256:b6b87b2fb39e6383ca28e2829cddef1d9fc9e27e55ad91ca9c435572cdba51bf"}, - {file = "pandas-1.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:a395692046fd8ce1edb4c6295c35184ae0c2bbe787ecbe384251da609e27edcb"}, - {file = "pandas-1.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd971a3f08b745a75a86c00b97f3007c2ea175951286cdda6abe543e687e5f2f"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37f06b59e5bc05711a518aa10beaec10942188dccb48918bb5ae602ccbc9f1a0"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c21778a688d3712d35710501f8001cdbf96eb70a7c587a3d5613573299fdca6"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3345343206546545bc26a05b4602b6a24385b5ec7c75cb6059599e3d56831da2"}, - {file = "pandas-1.3.5-cp39-cp39-win32.whl", hash = "sha256:c69406a2808ba6cf580c2255bcf260b3f214d2664a3a4197d0e640f573b46fd3"}, - {file = "pandas-1.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:32e1a26d5ade11b547721a72f9bfc4bd113396947606e00d5b4a5b79b3dcb006"}, - {file = "pandas-1.3.5.tar.gz", hash = "sha256:1e4285f5de1012de20ca46b188ccf33521bff61ba5c5ebd78b4fb28e5416a9f1"}, -] pathspec = [ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] platformdirs = [ - {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, - {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, - {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, + {file = "pre_commit-2.19.0-py2.py3-none-any.whl", hash = "sha256:10c62741aa5704faea2ad69cb550ca78082efe5697d6f04e5710c3c229afdd10"}, + {file = "pre_commit-2.19.0.tar.gz", hash = "sha256:4233a1e38621c87d9dda9808c6606d7e7ba0e087cd56d3fe03202a01d2919615"}, ] proto-plus = [ - {file = "proto-plus-1.20.3.tar.gz", hash = "sha256:f28b225bc9e6c14e206fb7f8e996a46fb2ccd902648e512d496abb6a716a4ae5"}, - {file = "proto_plus-1.20.3-py3-none-any.whl", hash = "sha256:b06be21c3848fbc20387d1d6891a9b97dfa1cdd0f10d3d42ef70b5700ec0f423"}, + {file = "proto-plus-1.20.5.tar.gz", hash = "sha256:81794eb1be333c67986333948df70ebb8cdf538e039f8cfa92fd2a9d7176d405"}, + {file = "proto_plus-1.20.5-py3-none-any.whl", hash = "sha256:fa29fec8a91cf178bc1d8bf9263769421d2dba7787eae42b67235676e211c158"}, ] protobuf = [ - {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, - {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, - {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, - {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, - {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, - {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, - {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, - {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, - {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, - {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, - {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, - {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, - {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, - {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, - {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, - {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, - {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, - {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, - {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, - {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, - {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, - {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, + {file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"}, + {file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"}, + {file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"}, + {file = "protobuf-3.20.1-cp310-cp310-win32.whl", hash = "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c"}, + {file = "protobuf-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7"}, + {file = "protobuf-3.20.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153"}, + {file = "protobuf-3.20.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f"}, + {file = "protobuf-3.20.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20"}, + {file = "protobuf-3.20.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531"}, + {file = "protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e"}, + {file = "protobuf-3.20.1-cp37-cp37m-win32.whl", hash = "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c"}, + {file = "protobuf-3.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067"}, + {file = "protobuf-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf"}, + {file = "protobuf-3.20.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab"}, + {file = "protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c"}, + {file = "protobuf-3.20.1-cp38-cp38-win32.whl", hash = "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7"}, + {file = "protobuf-3.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739"}, + {file = "protobuf-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7"}, + {file = "protobuf-3.20.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f"}, + {file = "protobuf-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9"}, + {file = "protobuf-3.20.1-cp39-cp39-win32.whl", hash = "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8"}, + {file = "protobuf-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91"}, + {file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"}, + {file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -1715,8 +1684,8 @@ pyflakes = [ {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pyrsistent = [ {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, @@ -1745,6 +1714,10 @@ pyserial = [ {file = "pyserial-3.5-py2.py3-none-any.whl", hash = "sha256:c4451db6ba391ca6ca299fb3ec7bae67a5c55dde170964c7a14ceefec02f2cf0"}, {file = "pyserial-3.5.tar.gz", hash = "sha256:3c77e014170dfffbd816e6ffc205e9842efb10be9f58ec16d3e8675b4925cddb"}, ] +pytest = [ + {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, + {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1796,82 +1769,6 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] -regex = [ - {file = "regex-2022.3.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:42eb13b93765c6698a5ab3bcd318d8c39bb42e5fa8a7fcf7d8d98923f3babdb1"}, - {file = "regex-2022.3.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9beb03ff6fe509d6455971c2489dceb31687b38781206bcec8e68bdfcf5f1db2"}, - {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a5a1fdc9f148a8827d55b05425801acebeeefc9e86065c7ac8b8cc740a91ff"}, - {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb374a2a4dba7c4be0b19dc7b1adc50e6c2c26c3369ac629f50f3c198f3743a4"}, - {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c33ce0c665dd325200209340a88438ba7a470bd5f09f7424e520e1a3ff835b52"}, - {file = "regex-2022.3.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04c09b9651fa814eeeb38e029dc1ae83149203e4eeb94e52bb868fadf64852bc"}, - {file = "regex-2022.3.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab5d89cfaf71807da93c131bb7a19c3e19eaefd613d14f3bce4e97de830b15df"}, - {file = "regex-2022.3.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e2630ae470d6a9f8e4967388c1eda4762706f5750ecf387785e0df63a4cc5af"}, - {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:df037c01d68d1958dad3463e2881d3638a0d6693483f58ad41001aa53a83fcea"}, - {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:940570c1a305bac10e8b2bc934b85a7709c649317dd16520471e85660275083a"}, - {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7f63877c87552992894ea1444378b9c3a1d80819880ae226bb30b04789c0828c"}, - {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3e265b388cc80c7c9c01bb4f26c9e536c40b2c05b7231fbb347381a2e1c8bf43"}, - {file = "regex-2022.3.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:058054c7a54428d5c3e3739ac1e363dc9347d15e64833817797dc4f01fb94bb8"}, - {file = "regex-2022.3.15-cp310-cp310-win32.whl", hash = "sha256:76435a92e444e5b8f346aed76801db1c1e5176c4c7e17daba074fbb46cb8d783"}, - {file = "regex-2022.3.15-cp310-cp310-win_amd64.whl", hash = "sha256:174d964bc683b1e8b0970e1325f75e6242786a92a22cedb2a6ec3e4ae25358bd"}, - {file = "regex-2022.3.15-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6e1d8ed9e61f37881c8db383a124829a6e8114a69bd3377a25aecaeb9b3538f8"}, - {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b52771f05cff7517f7067fef19ffe545b1f05959e440d42247a17cd9bddae11b"}, - {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:673f5a393d603c34477dbad70db30025ccd23996a2d0916e942aac91cc42b31a"}, - {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8923e1c5231549fee78ff9b2914fad25f2e3517572bb34bfaa3aea682a758683"}, - {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764e66a0e382829f6ad3bbce0987153080a511c19eb3d2f8ead3f766d14433ac"}, - {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd00859291658fe1fda48a99559fb34da891c50385b0bfb35b808f98956ef1e7"}, - {file = "regex-2022.3.15-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa2ce79f3889720b46e0aaba338148a1069aea55fda2c29e0626b4db20d9fcb7"}, - {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:34bb30c095342797608727baf5c8aa122406aa5edfa12107b8e08eb432d4c5d7"}, - {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:25ecb1dffc5e409ca42f01a2b2437f93024ff1612c1e7983bad9ee191a5e8828"}, - {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:aa5eedfc2461c16a092a2fabc5895f159915f25731740c9152a1b00f4bcf629a"}, - {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:7d1a6e403ac8f1d91d8f51c441c3f99367488ed822bda2b40836690d5d0059f5"}, - {file = "regex-2022.3.15-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3e4d710ff6539026e49f15a3797c6b1053573c2b65210373ef0eec24480b900b"}, - {file = "regex-2022.3.15-cp36-cp36m-win32.whl", hash = "sha256:0100f0ded953b6b17f18207907159ba9be3159649ad2d9b15535a74de70359d3"}, - {file = "regex-2022.3.15-cp36-cp36m-win_amd64.whl", hash = "sha256:f320c070dea3f20c11213e56dbbd7294c05743417cde01392148964b7bc2d31a"}, - {file = "regex-2022.3.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fc8c7958d14e8270171b3d72792b609c057ec0fa17d507729835b5cff6b7f69a"}, - {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ca6dcd17f537e9f3793cdde20ac6076af51b2bd8ad5fe69fa54373b17b48d3c"}, - {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0214ff6dff1b5a4b4740cfe6e47f2c4c92ba2938fca7abbea1359036305c132f"}, - {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a98ae493e4e80b3ded6503ff087a8492db058e9c68de371ac3df78e88360b374"}, - {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b1cc70e31aacc152a12b39245974c8fccf313187eead559ee5966d50e1b5817"}, - {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4829db3737480a9d5bfb1c0320c4ee13736f555f53a056aacc874f140e98f64"}, - {file = "regex-2022.3.15-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:303b15a3d32bf5fe5a73288c316bac5807587f193ceee4eb6d96ee38663789fa"}, - {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:dc7b7c16a519d924c50876fb152af661a20749dcbf653c8759e715c1a7a95b18"}, - {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ce3057777a14a9a1399b81eca6a6bfc9612047811234398b84c54aeff6d536ea"}, - {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:48081b6bff550fe10bcc20c01cf6c83dbca2ccf74eeacbfac240264775fd7ecf"}, - {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dcbb7665a9db9f8d7642171152c45da60e16c4f706191d66a1dc47ec9f820aed"}, - {file = "regex-2022.3.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c155a1a80c5e7a8fa1d9bb1bf3c8a953532b53ab1196092749bafb9d3a7cbb60"}, - {file = "regex-2022.3.15-cp37-cp37m-win32.whl", hash = "sha256:04b5ee2b6d29b4a99d38a6469aa1db65bb79d283186e8460542c517da195a8f6"}, - {file = "regex-2022.3.15-cp37-cp37m-win_amd64.whl", hash = "sha256:797437e6024dc1589163675ae82f303103063a0a580c6fd8d0b9a0a6708da29e"}, - {file = "regex-2022.3.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8afcd1c2297bc989dceaa0379ba15a6df16da69493635e53431d2d0c30356086"}, - {file = "regex-2022.3.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0066a6631c92774391f2ea0f90268f0d82fffe39cb946f0f9c6b382a1c61a5e5"}, - {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8248f19a878c72d8c0a785a2cd45d69432e443c9f10ab924c29adda77b324ae"}, - {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d1f3ea0d1924feb4cf6afb2699259f658a08ac6f8f3a4a806661c2dfcd66db1"}, - {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:794a6bc66c43db8ed06698fc32aaeaac5c4812d9f825e9589e56f311da7becd9"}, - {file = "regex-2022.3.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d1445824944e642ffa54c4f512da17a953699c563a356d8b8cbdad26d3b7598"}, - {file = "regex-2022.3.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f553a1190ae6cd26e553a79f6b6cfba7b8f304da2071052fa33469da075ea625"}, - {file = "regex-2022.3.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:75a5e6ce18982f0713c4bac0704bf3f65eed9b277edd3fb9d2b0ff1815943327"}, - {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f16cf7e4e1bf88fecf7f41da4061f181a6170e179d956420f84e700fb8a3fd6b"}, - {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dad3991f0678facca1a0831ec1ddece2eb4d1dd0f5150acb9440f73a3b863907"}, - {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:491fc754428514750ab21c2d294486223ce7385446f2c2f5df87ddbed32979ae"}, - {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6504c22c173bb74075d7479852356bb7ca80e28c8e548d4d630a104f231e04fb"}, - {file = "regex-2022.3.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01c913cf573d1da0b34c9001a94977273b5ee2fe4cb222a5d5b320f3a9d1a835"}, - {file = "regex-2022.3.15-cp38-cp38-win32.whl", hash = "sha256:029e9e7e0d4d7c3446aa92474cbb07dafb0b2ef1d5ca8365f059998c010600e6"}, - {file = "regex-2022.3.15-cp38-cp38-win_amd64.whl", hash = "sha256:947a8525c0a95ba8dc873191f9017d1b1e3024d4dc757f694e0af3026e34044a"}, - {file = "regex-2022.3.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:591d4fba554f24bfa0421ba040cd199210a24301f923ed4b628e1e15a1001ff4"}, - {file = "regex-2022.3.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9809404528a999cf02a400ee5677c81959bc5cb938fdc696b62eb40214e3632"}, - {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f08a7e4d62ea2a45557f561eea87c907222575ca2134180b6974f8ac81e24f06"}, - {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a86cac984da35377ca9ac5e2e0589bd11b3aebb61801204bd99c41fac516f0d"}, - {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:286908cbe86b1a0240a867aecfe26a439b16a1f585d2de133540549831f8e774"}, - {file = "regex-2022.3.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b7494df3fdcc95a1f76cf134d00b54962dd83189520fd35b8fcd474c0aa616d"}, - {file = "regex-2022.3.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b1ceede92400b3acfebc1425937454aaf2c62cd5261a3fabd560c61e74f6da3"}, - {file = "regex-2022.3.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0317eb6331146c524751354ebef76a7a531853d7207a4d760dfb5f553137a2a4"}, - {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c144405220c5ad3f5deab4c77f3e80d52e83804a6b48b6bed3d81a9a0238e4c"}, - {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5b2e24f3ae03af3d8e8e6d824c891fea0ca9035c5d06ac194a2700373861a15c"}, - {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f2c53f3af011393ab5ed9ab640fa0876757498aac188f782a0c620e33faa2a3d"}, - {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:060f9066d2177905203516c62c8ea0066c16c7342971d54204d4e51b13dfbe2e"}, - {file = "regex-2022.3.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:530a3a16e57bd3ea0dff5ec2695c09632c9d6c549f5869d6cf639f5f7153fb9c"}, - {file = "regex-2022.3.15-cp39-cp39-win32.whl", hash = "sha256:78ce90c50d0ec970bd0002462430e00d1ecfd1255218d52d08b3a143fe4bde18"}, - {file = "regex-2022.3.15-cp39-cp39-win_amd64.whl", hash = "sha256:c5adc854764732dbd95a713f2e6c3e914e17f2ccdc331b9ecb777484c31f73b6"}, - {file = "regex-2022.3.15.tar.gz", hash = "sha256:0a7b75cc7bb4cc0334380053e4671c560e31272c9d2d5a6c4b8e9ae2c9bd0f82"}, -] requests = [ {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, @@ -1880,64 +1777,41 @@ rsa = [ {file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"}, {file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"}, ] -scipy = [ - {file = "scipy-1.7.3-1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c9e04d7e9b03a8a6ac2045f7c5ef741be86727d8f49c45db45f244bdd2bcff17"}, - {file = "scipy-1.7.3-1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b0e0aeb061a1d7dcd2ed59ea57ee56c9b23dd60100825f98238c06ee5cc4467e"}, - {file = "scipy-1.7.3-1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b78a35c5c74d336f42f44106174b9851c783184a85a3fe3e68857259b37b9ffb"}, - {file = "scipy-1.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:173308efba2270dcd61cd45a30dfded6ec0085b4b6eb33b5eb11ab443005e088"}, - {file = "scipy-1.7.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:21b66200cf44b1c3e86495e3a436fc7a26608f92b8d43d344457c54f1c024cbc"}, - {file = "scipy-1.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceebc3c4f6a109777c0053dfa0282fddb8893eddfb0d598574acfb734a926168"}, - {file = "scipy-1.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7eaea089345a35130bc9a39b89ec1ff69c208efa97b3f8b25ea5d4c41d88094"}, - {file = "scipy-1.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:304dfaa7146cffdb75fbf6bb7c190fd7688795389ad060b970269c8576d038e9"}, - {file = "scipy-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:033ce76ed4e9f62923e1f8124f7e2b0800db533828c853b402c7eec6e9465d80"}, - {file = "scipy-1.7.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4d242d13206ca4302d83d8a6388c9dfce49fc48fdd3c20efad89ba12f785bf9e"}, - {file = "scipy-1.7.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8499d9dd1459dc0d0fe68db0832c3d5fc1361ae8e13d05e6849b358dc3f2c279"}, - {file = "scipy-1.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca36e7d9430f7481fc7d11e015ae16fbd5575615a8e9060538104778be84addf"}, - {file = "scipy-1.7.3-cp37-cp37m-win32.whl", hash = "sha256:e2c036492e673aad1b7b0d0ccdc0cb30a968353d2c4bf92ac8e73509e1bf212c"}, - {file = "scipy-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:866ada14a95b083dd727a845a764cf95dd13ba3dc69a16b99038001b05439709"}, - {file = "scipy-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:65bd52bf55f9a1071398557394203d881384d27b9c2cad7df9a027170aeaef93"}, - {file = "scipy-1.7.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:f99d206db1f1ae735a8192ab93bd6028f3a42f6fa08467d37a14eb96c9dd34a3"}, - {file = "scipy-1.7.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5f2cfc359379c56b3a41b17ebd024109b2049f878badc1e454f31418c3a18436"}, - {file = "scipy-1.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb7ae2c4dbdb3c9247e07acc532f91077ae6dbc40ad5bd5dca0bb5a176ee9bda"}, - {file = "scipy-1.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c2d250074cfa76715d58830579c64dff7354484b284c2b8b87e5a38321672c"}, - {file = "scipy-1.7.3-cp38-cp38-win32.whl", hash = "sha256:87069cf875f0262a6e3187ab0f419f5b4280d3dcf4811ef9613c605f6e4dca95"}, - {file = "scipy-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:7edd9a311299a61e9919ea4192dd477395b50c014cdc1a1ac572d7c27e2207fa"}, - {file = "scipy-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eef93a446114ac0193a7b714ce67659db80caf940f3232bad63f4c7a81bc18df"}, - {file = "scipy-1.7.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:eb326658f9b73c07081300daba90a8746543b5ea177184daed26528273157294"}, - {file = "scipy-1.7.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:93378f3d14fff07572392ce6a6a2ceb3a1f237733bd6dcb9eb6a2b29b0d19085"}, - {file = "scipy-1.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edad1cf5b2ce1912c4d8ddad20e11d333165552aba262c882e28c78bbc09dbf6"}, - {file = "scipy-1.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d1cc2c19afe3b5a546ede7e6a44ce1ff52e443d12b231823268019f608b9b12"}, - {file = "scipy-1.7.3-cp39-cp39-win32.whl", hash = "sha256:2c56b820d304dffcadbbb6cbfbc2e2c79ee46ea291db17e288e73cd3c64fefa9"}, - {file = "scipy-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f78181a153fa21c018d346f595edd648344751d7f03ab94b398be2ad083ed3e"}, - {file = "scipy-1.7.3.tar.gz", hash = "sha256:ab5875facfdef77e0a47d5fd39ea178b58e60e454a4c85aa1e52fcb80db7babf"}, -] shapely = [ - {file = "Shapely-1.8.1.post1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0ca96a3314b7a38a3bb385531469de1fcf2b2c2979ec2aa4f37b4c70632cf1ad"}, - {file = "Shapely-1.8.1.post1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:493902923fdd135316161a4ece5294ba3ce81accaa54540d2af3b93f7231143a"}, - {file = "Shapely-1.8.1.post1-cp310-cp310-win_amd64.whl", hash = "sha256:b82fc74d5efb11a71283c4ed69b4e036997cc70db4b73c646207ddf0476ade44"}, - {file = "Shapely-1.8.1.post1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:89bc5f3abc1ccbc7682c2e1664153c4f8f125fa9c24bff4abca48685739d5636"}, - {file = "Shapely-1.8.1.post1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:44cb895b1710f7559c28d69dfa08cafe4f58cd4b7a87091a55bdf6711ad9ad66"}, - {file = "Shapely-1.8.1.post1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:437fff3b6274be26ffa3e450de711ee01e436324b5a405952add2146227e3eb5"}, - {file = "Shapely-1.8.1.post1-cp36-cp36m-win32.whl", hash = "sha256:dc0f46212f84c57d13189fc33cf61e13eee292704d7652e931e4b51c54b0c73c"}, - {file = "Shapely-1.8.1.post1-cp36-cp36m-win_amd64.whl", hash = "sha256:9248aad099ecf228fbdd877b0c668823dd83c48798cf04d49a1be75167e3a7ce"}, - {file = "Shapely-1.8.1.post1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bab5ff7c576588acccd665ecce2a0fe7b47d4ce0398f2d5c1e5b2e27d09398d2"}, - {file = "Shapely-1.8.1.post1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2381ce0aff67d569eb509bcc051264aa5fbdc1fdd54f4c09963d0e09f16a8f1b"}, - {file = "Shapely-1.8.1.post1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b4d35e72022b2dbf152d476b0362596011c674ff68be9fc8f2e68e71d86502ca"}, - {file = "Shapely-1.8.1.post1-cp37-cp37m-win32.whl", hash = "sha256:5a420e7112b55a1587412a5b03ebf59e302ddd354da68516d3721718f6b8a7c5"}, - {file = "Shapely-1.8.1.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:c4c366e18edf91196a399f8f0f046f93516002e6d8af0b57c23e7c7d91944b16"}, - {file = "Shapely-1.8.1.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2020fda37c708d44a613c020cea09e81e476f96866f348afc2601e66c0e71db1"}, - {file = "Shapely-1.8.1.post1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69d5352fb977655c85d2f40a05ae24fc5053cccee77d0a8b1f773e54804e723e"}, - {file = "Shapely-1.8.1.post1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:83f3c8191d30ae0e3dd557434c48ca591d75342d5a3f42fc5148ec42796be624"}, - {file = "Shapely-1.8.1.post1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e792635e92c9aacd1452a589a4fa2970114b6a9b1165e09655481f6e58970f5"}, - {file = "Shapely-1.8.1.post1-cp38-cp38-win32.whl", hash = "sha256:8cf7331f61780506976fe2175e069d898e1b04ace73be21aad55c3ee92e58e3a"}, - {file = "Shapely-1.8.1.post1-cp38-cp38-win_amd64.whl", hash = "sha256:f109064bdb0753a6bac6238538cfeeb4a09739e2d556036b343b2eabeb9520b2"}, - {file = "Shapely-1.8.1.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aea1e87450adffba3d04ccbaa790df719bb7aa23b05ac797ad16be236a5d0db8"}, - {file = "Shapely-1.8.1.post1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a3602ba2e7715ddd5d4114173dec83d3181bfb2497e8589676c284aa739fd67"}, - {file = "Shapely-1.8.1.post1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:679789d774cfe09ca05118cab78c0a6a42985b3ed23bc93606272a4509b4df28"}, - {file = "Shapely-1.8.1.post1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:363df36370f28fdc7789857929f6ff27e659f64087b4c89f7a47ed43bd3bfe4d"}, - {file = "Shapely-1.8.1.post1-cp39-cp39-win32.whl", hash = "sha256:bc6063875182515d3888180cc4cbdbaa6443e4a4386c4bb25499e9875b75dcac"}, - {file = "Shapely-1.8.1.post1-cp39-cp39-win_amd64.whl", hash = "sha256:54aeb2a57978ce731fd52289d0e1deee7c232d41aed53091f38776378f644184"}, - {file = "Shapely-1.8.1.post1.tar.gz", hash = "sha256:93ff06ff05fbe2be843b93c7b1ad8292e56e665ba01b4708f75ae8a757972e9f"}, + {file = "Shapely-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c9e3400b716c51ba43eea1678c28272580114e009b6c78cdd00c44df3e325fa"}, + {file = "Shapely-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce0b5c5f7acbccf98b3460eecaa40e9b18272b2a734f74fcddf1d7696e047e95"}, + {file = "Shapely-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a40bf497b57a6625b83996aed10ce2233bca0e5471b8af771b186d681433ac5"}, + {file = "Shapely-1.8.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6bdc7728f1e5df430d8c588661f79f1eed4a2728c8b689e12707cfec217f68f8"}, + {file = "Shapely-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a60861b5ca2c488ebcdc706eca94d325c26d1567921c74acc83df5e6913590c7"}, + {file = "Shapely-1.8.2-cp310-cp310-win32.whl", hash = "sha256:840be3f27a1152851c54b968f2e12d718c9f13b7acd51c482e58a70f60f29e31"}, + {file = "Shapely-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:c60f3758212ec480675b820b13035dda8af8f7cc560d2cc67999b2717fb8faef"}, + {file = "Shapely-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:56413f7d32c70b63f239eb0865b24c0c61029e38757de456cc4ab3c416559a0b"}, + {file = "Shapely-1.8.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:256bdf8080bb7bb504d47b2c76919ecebab9708cc1b26266b3ec32b42448f642"}, + {file = "Shapely-1.8.2-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0a0d7752b145343838bd36ed09382d85f5befe426832d7384c5b051c147acbd"}, + {file = "Shapely-1.8.2-cp36-cp36m-win32.whl", hash = "sha256:62056e64b12b6d483d79f8e34bf058d2fe734d51c9227c1713705399434eff3b"}, + {file = "Shapely-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:8e3ed52a081da58eb4a885c157c594876633dbd4eb283f13ba5bf39c82322d76"}, + {file = "Shapely-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7c8eda45085ccdd7f9805ea4a93fdd5eb0b6039a61d5f0cefb960487e6dc17a1"}, + {file = "Shapely-1.8.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:beee3949ddf381735049cfa6532fb234d5d20a5be910c4f2fb7c7295fd7960e3"}, + {file = "Shapely-1.8.2-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e07b0bd2a0e61a8afd4d1c1bd23f3550b711f01274ffb53de99358fd781eefd8"}, + {file = "Shapely-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:78966332a89813b237de357a03f612fd451a871fe6e26c12b6b71645fe8eee39"}, + {file = "Shapely-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8fe641f1f61b3d43dd61b5a85d2ef023e6e19bf8f204a5160a1cb1ec645cbc09"}, + {file = "Shapely-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cec89a5617c0137f4678282e983c3d63bf838fb00cdf318cc555b4d8409f7130"}, + {file = "Shapely-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:68c8e18dc9dc8a198c3addc8c9596f64137101f566f04b96ecfca0b214cb8b12"}, + {file = "Shapely-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f12695662c3ad1e6031b3de98f191963d0f09de6d1a4988acd907405644032ba"}, + {file = "Shapely-1.8.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:15a856fbb588ad5d042784e00918c662902776452008c771ecba2ff615cd197a"}, + {file = "Shapely-1.8.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d74de394684d66e25e780b0359fda85be7766af85940fa2dfad728b1a815c71f"}, + {file = "Shapely-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f3fac625690f01f35af665649e993f15f924e740b5c0ac0376900655815521"}, + {file = "Shapely-1.8.2-cp38-cp38-win32.whl", hash = "sha256:1d95842cc6bbbeab673061b63e70b07be9a375c15a60f4098f8fbd29f43af1b4"}, + {file = "Shapely-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:a58e1f362f2091743e5e13212f5d5d16251a4bb63dd0ed587c652d3be9620d3a"}, + {file = "Shapely-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5254240eefc44139ab0d128faf671635d8bdd9c23955ee063d4d6b8f20073ae0"}, + {file = "Shapely-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75042e8039c79dd01f102bb288beace9dc2f49fc44a2dea875f9b697aa8cd30d"}, + {file = "Shapely-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c0fd457ce477b1dced507a72f1e2084c9191bfcb8a1e09886990ebd02acf024"}, + {file = "Shapely-1.8.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6fcb28836ae93809de1dde73c03c9c24bab0ba2b2bf419ddb2aeb72c96d110e9"}, + {file = "Shapely-1.8.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44d2832c1b706bf43101fda92831a083467cc4b4923a7ed17319ab599c1025d8"}, + {file = "Shapely-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:137f1369630408024a62ff79a437a5657e6c5b76b9cd352dde704b425acdb298"}, + {file = "Shapely-1.8.2-cp39-cp39-win32.whl", hash = "sha256:2e02da2e988e74d61f15c720f9f613fab51942aae2dfeacdcb78eadece00e1f3"}, + {file = "Shapely-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:3423299254deec075e79fb7dc7909d702104e4167149de7f45510c3a6342eeea"}, + {file = "Shapely-1.8.2.tar.gz", hash = "sha256:572af9d5006fd5e3213e37ee548912b0341fb26724d6dc8a4e3950c10197ebb6"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1951,61 +1825,65 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] tox = [ - {file = "tox-3.24.5-py2.py3-none-any.whl", hash = "sha256:be3362472a33094bce26727f5f771ca0facf6dafa217f65875314e9a6600c95c"}, - {file = "tox-3.24.5.tar.gz", hash = "sha256:67e0e32c90e278251fea45b696d0fef3879089ccbe979b0c556d35d5a70e2993"}, + {file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"}, + {file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"}, ] twined = [ {file = "twined-0.5.0-py3-none-any.whl", hash = "sha256:446b753e7dc7e7561822759eb601233edb5d1ca7122a63dc9cf9363145e1ba7d"}, {file = "twined-0.5.0.tar.gz", hash = "sha256:f6ef8390142b1d064972f07bcbc51e6357094a9b3f841604a8dc442905cf7d31"}, ] typed-ast = [ - {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, - {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, - {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"}, - {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"}, - {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"}, - {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"}, - {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"}, - {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"}, - {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"}, - {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"}, - {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"}, - {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"}, - {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"}, - {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"}, - {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"}, - {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"}, - {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"}, - {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"}, - {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"}, - {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"}, - {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"}, - {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"}, - {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"}, - {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, + {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, + {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] urllib3 = [ {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, ] virtualenv = [ - {file = "virtualenv-20.14.0-py2.py3-none-any.whl", hash = "sha256:1e8588f35e8b42c6ec6841a13c5e88239de1e6e4e4cedfd3916b306dc826ec66"}, - {file = "virtualenv-20.14.0.tar.gz", hash = "sha256:8e5b402037287126e81ccde9432b95a8be5b19d36584f64957060a3488c11ca8"}, + {file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"}, + {file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"}, ] werkzeug = [ - {file = "Werkzeug-2.1.0-py3-none-any.whl", hash = "sha256:094ecfc981948f228b30ee09dbfe250e474823b69b9b1292658301b5894bbf08"}, - {file = "Werkzeug-2.1.0.tar.gz", hash = "sha256:9b55466a3e99e13b1f0686a66117d39bda85a992166e0a79aedfcf3586328f7a"}, + {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, + {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] wtforms = [ {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, ] zipp = [ - {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, - {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, + {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, + {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, ] diff --git a/poll.py b/poll.py new file mode 100644 index 00000000..645b3ac8 --- /dev/null +++ b/poll.py @@ -0,0 +1,16 @@ +# On an rPi, you get many addresses because it has lots of ports. Run this address to find which port is connected to +# the gateway receiver. +import os + + +input("Make sure device is removed, then press a key...") +devices_before = os.listdir("/dev") + +input("Now plug in the device, then hit any key...") +devices_after = os.listdir("/dev") + +print("Newly plugged in devices:\n") + +for line in devices_after: + if line not in devices_before: + print(line) diff --git a/pyproject.toml b/pyproject.toml index b90a9f28..c88f92ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "data-gateway" -version = "0.11.10" +version = "0.12.0" repository = "https://github.com/aerosense-ai/data-gateway" description = "A data gateway that runs on-nacelle for relaying data streams from aerosense nodes to cloud." readme = "README.md" @@ -18,13 +18,22 @@ classifiers = [ "Operating System :: OS Independent", ] + [tool.poetry.dependencies] -python = ">=3.7.1,<3.11" +python = ">=3.7.1,<3.10" click = ">=7.1.2,<9" google-cloud-storage = "^1.35" pyserial = "^3.5" python-slugify = "^5" -octue = {version = "0.27.0", extras = ["hdf5"]} +octue = "0.29.2" + +# On raspberry pi, using the pypi binary (which is built against glibc) fails with: +# https://stackoverflow.com/questions/72185264/glibc-2-29-not-found +# The solution is to use the wheel that's built specifically for raspberry pi: https://www.piwheels.org/project/grpcio/ +# However, piwheels only has a built wheel for 1.45.0; so unless we pin that exact version, poetry specifies the latest (currently 1.46.3) +# meaning it gets installed from pypi by default. +grpcio = "1.45.0" + [tool.poetry.dev-dependencies] # Testing. @@ -32,22 +41,23 @@ gcp-storage-emulator = "2021.6.2" tox = "^3.23" # Packages for testing the cloud functions, excluding octue (the requirements for the cloud function are specified in # `cloud_functions/requirements.txt`). -blake3 = "0.2.1" flask_wtf = "^1" google-cloud-bigquery = "^2.29" -pandas = "^1.2" -scipy = "^1.6" shapely = "^1.8" wtforms = { extras = ["email"], version = "^3" } # Code quality. flake8 = "^4" pre-commit = "^2.17" coverage = "^6.3" -black = "21.6b0" +h5py = "^3.7.0" +pytest = "^7.1.2" +black = "22.3.0" + [tool.poetry.scripts] gateway = "data_gateway.cli:gateway_cli" + [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" diff --git a/requirements-pi-dev.txt b/requirements-pi-dev.txt new file mode 100644 index 00000000..6e002918 --- /dev/null +++ b/requirements-pi-dev.txt @@ -0,0 +1,89 @@ +appdirs==1.4.4; python_version >= "3.6" +atomicwrites==1.4.0; python_version >= "3.7" and python_full_version < "3.0.0" and sys_platform == "win32" or sys_platform == "win32" and python_version >= "3.7" and python_full_version >= "3.4.0" +attrs==21.4.0; python_full_version >= "3.7.1" and python_version >= "3.7" and python_full_version < "4.0.0" +black==22.3.0; python_full_version >= "3.6.2" +cachetools==5.2.0; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +certifi==2022.5.18.1; python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_version >= "3.6" and python_version < "3.11" and python_full_version >= "3.6.0" +cffi==1.15.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +cfgv==3.3.1; python_full_version >= "3.6.1" and python_version >= "3.7" +charset-normalizer==2.0.12; python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11" +click==8.1.3; python_version >= "3.7" +colorama==0.4.4; platform_system == "Windows" and python_version >= "3.7" and python_full_version >= "3.7.1" and sys_platform == "win32" and python_full_version < "4.0.0" +coolname==1.1.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" +coverage==6.4.1; python_version >= "3.7" +distlib==0.3.4; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +dnspython==2.2.1; python_version >= "3.6" and python_version < "4.0" and (python_version >= "3.7" and python_full_version < "3.0.0" or python_version >= "3.7" and python_full_version >= "3.5.0") +email-validator==1.2.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_version >= "3.7" and python_full_version >= "3.5.0" +filelock==3.7.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +flake8==4.0.1; python_version >= "3.6" +flask-wtf==1.0.1; python_version >= "3.6" +flask==2.0.3; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +fs==2.4.13; python_version >= "3.6" +gcp-storage-emulator==2021.6.2; python_version >= "3.6" +google-api-core==2.8.2; python_full_version >= "3.7.1" and python_version >= "3.6" and python_version < "3.11" and python_full_version < "4.0.0" +google-auth==2.7.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +google-cloud-bigquery==2.34.3; python_version >= "3.6" and python_version < "3.11" +google-cloud-core==2.3.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_version < "3.11" and python_full_version < "4.0.0" +google-cloud-pubsub==2.13.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +google-cloud-secret-manager==2.11.1; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +google-cloud-storage==1.44.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0") +google-crc32c==1.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +google-resumable-media==2.3.3; python_full_version >= "3.7.1" and python_version >= "3.6" and python_version < "3.11" and python_full_version < "4.0.0" +googleapis-common-protos==1.56.2; python_full_version >= "3.7.1" and python_version >= "3.6" and python_version < "3.11" and python_full_version < "4.0.0" +grpc-google-iam-v1==0.12.4; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +grpcio-status==1.45.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_version < "3.11" and python_full_version < "4.0.0" +grpcio==1.45.0; python_version >= "3.6" +gunicorn==20.1.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.5" +h5py==3.7.0; python_version >= "3.7" +identify==2.5.1; python_version >= "3.7" +idna==3.3; python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.7" and python_version < "3.11" +importlib-metadata==4.2.0; python_version < "3.8" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version < "3.8" and python_version >= "3.6") and python_full_version >= "3.7.1" and python_full_version < "4.0.0" and (python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version >= "3.7" and python_version < "3.8") +importlib-resources==5.8.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" and python_version < "3.9" +iniconfig==1.1.1; python_version >= "3.7" +itsdangerous==2.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +jinja2==3.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +jsonschema==4.4.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +markupsafe==2.1.1; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +mccabe==0.6.1; python_version >= "3.6" +mypy-extensions==0.4.3; python_full_version >= "3.6.2" +nodeenv==1.6.0; python_version >= "3.7" +numpy==1.21.6; python_version >= "3.7" and python_version < "3.11" +octue==0.29.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" +packaging==21.3; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") and python_version < "3.11" +pathspec==0.9.0; python_full_version >= "3.6.2" +platformdirs==2.5.2; python_version >= "3.7" and python_full_version >= "3.6.2" and (python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7") +pluggy==1.0.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +pre-commit==2.19.0; python_version >= "3.7" +proto-plus==1.20.5; python_version >= "3.6" and python_version < "3.11" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" +protobuf==3.20.1; python_full_version >= "3.7.1" and python_version >= "3.7" and python_version < "3.11" and python_full_version < "4.0.0" and (python_version >= "3.7" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.7" and python_version < "3.11") +py==1.11.0; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +pyasn1-modules==0.2.8; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +pyasn1==0.4.8; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") and python_version < "4" +pycodestyle==2.8.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6" +pycparser==2.21; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +pyflakes==2.4.0; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.4.0" and python_version >= "3.6" +pyparsing==3.0.9; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.6") and python_version < "3.11" +pyrsistent==0.18.1; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +pyserial==3.5 +pytest==7.1.2; python_version >= "3.7" +python-dateutil==2.8.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_version >= "3.6" and python_version < "3.11" and python_full_version >= "3.3.0") +python-dotenv==0.20.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +python-slugify==5.0.2; python_version >= "3.6" +pytz==2022.1; python_version >= "3.6" +pyyaml==6.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +requests==2.27.1; python_version >= "3.6" and python_version < "3.11" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" +rsa==4.8; python_version >= "3.6" and python_version < "4" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") +shapely==1.8.2; python_version >= "3.6" +six==1.16.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version >= "3.6" and python_version < "3.11") and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_version >= "3.6" and python_version < "3.11" and python_full_version >= "3.3.0") and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.6") and (python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7") and python_version < "3.11" +text-unidecode==1.3; python_version >= "3.6" +toml==0.10.2; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +tomli==2.0.1; python_version < "3.11" and python_full_version >= "3.6.2" and python_version >= "3.7" +tox==3.25.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") +twined==0.5.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +typed-ast==1.5.4; python_version < "3.8" and implementation_name == "cpython" and python_full_version >= "3.6.2" and python_version >= "3.6" +typing-extensions==4.2.0; python_version < "3.8" and python_full_version >= "3.7.1" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version < "3.8" and python_version >= "3.6") and python_full_version < "4.0.0" +urllib3==1.26.9; python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.11" or python_full_version >= "3.6.0" and python_version < "3.11" and python_version >= "3.6" +virtualenv==20.14.1; python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version >= "3.7" +werkzeug==2.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +wtforms==3.0.1; python_version >= "3.7" +zipp==3.8.0; python_version < "3.8" and python_version >= "3.7" and (python_version >= "3.6" and python_full_version < "3.0.0" and python_version < "3.8" or python_full_version >= "3.5.0" and python_version < "3.8" and python_version >= "3.6") and python_full_version >= "3.7.1" and python_full_version < "4.0.0" diff --git a/requirements-pi.txt b/requirements-pi.txt new file mode 100644 index 00000000..9271f83a --- /dev/null +++ b/requirements-pi.txt @@ -0,0 +1,52 @@ +attrs==21.4.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +cachetools==5.2.0; python_version >= "3.7" and python_version < "4.0" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +certifi==2022.5.18.1; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6" +cffi==1.15.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +charset-normalizer==2.0.12; python_full_version >= "3.6.0" and python_version >= "3.6" +click==8.1.3; python_version >= "3.7" +colorama==0.4.4; platform_system == "Windows" and python_version >= "3.7" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" +coolname==1.1.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" +flask==2.0.3; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +google-api-core==2.8.2; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" +google-auth==2.7.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +google-cloud-core==2.3.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" +google-cloud-pubsub==2.13.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +google-cloud-secret-manager==2.11.1; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +google-cloud-storage==1.44.0; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.6.0") +google-crc32c==1.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +google-resumable-media==2.3.3; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" +googleapis-common-protos==1.56.2; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" +grpc-google-iam-v1==0.12.4; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +grpcio-status==1.45.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" +grpcio==1.45.0; python_version >= "3.6" +gunicorn==20.1.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.5" +idna==3.3; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6" +importlib-metadata==4.2.0; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" +importlib-resources==5.8.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" and python_version < "3.9" +itsdangerous==2.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +jinja2==3.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +jsonschema==4.4.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +markupsafe==2.1.1; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +octue==0.29.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" +packaging==21.3; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +proto-plus==1.20.5; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +protobuf==3.20.1; python_full_version >= "3.7.1" and python_version >= "3.7" and python_full_version < "4.0.0" and (python_version >= "3.7" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.7") +pyasn1-modules==0.2.8; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +pyasn1==0.4.8; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") and python_version < "4" +pycparser==2.21; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +pyparsing==3.0.9; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +pyrsistent==0.18.1; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +pyserial==3.5 +python-dateutil==2.8.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" +python-dotenv==0.20.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +python-slugify==5.0.2; python_version >= "3.6" +pyyaml==6.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +requests==2.27.1; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" +rsa==4.8; python_version >= "3.6" and python_version < "4" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +six==1.16.0; python_full_version >= "3.7.1" and python_version >= "3.6" and python_full_version < "4.0.0" and (python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version >= "3.6") +text-unidecode==1.3; python_version >= "3.6" +twined==0.5.0; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.6" +typing-extensions==4.2.0; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" +urllib3==1.26.9; python_version >= "3.6" and python_full_version < "3.0.0" or python_full_version >= "3.6.0" and python_version < "4" and python_version >= "3.6" +werkzeug==2.1.2; python_full_version >= "3.7.1" and python_full_version < "4.0.0" and python_version >= "3.7" +zipp==3.8.0; python_version < "3.8" and python_version >= "3.7" and python_full_version >= "3.7.1" and python_full_version < "4.0.0" diff --git a/tests/__init__.py b/tests/__init__.py index 40c120c5..a971758c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,3 +1,6 @@ +import multiprocessing +import os + from octue.log_handlers import apply_log_handler from data_gateway.configuration import Configuration @@ -5,10 +8,13 @@ apply_log_handler(include_process_name=True) +if os.name != "nt": + # Set the multiprocessing start method to "fork" so that patching works across different processes. + multiprocessing.set_start_method("fork") -TEST_BUCKET_NAME = "a-bucket-name" -PACKET_KEY = Configuration().packet_key.to_bytes(1, "little") +TEST_BUCKET_NAME = "a-bucket-name" +ZEROTH_NODE_LEADING_BYTE = Configuration().get_leading_byte(0) LENGTH = bytes([244]) RANDOM_BYTES = [ diff --git a/tests/base.py b/tests/base.py index 50ce4609..811cc769 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,4 +1,3 @@ -import datetime import json import os import unittest @@ -32,7 +31,7 @@ def random_sensor_data(self, rows, cols, first_sample_time, last_sample_time): return random_data def random_window(self, sensors=None, window_duration=None): - """Generate a window dict. with random data for given sensors and duration of window_duration [sec.] + """Generate a window dict with random data for given sensors and duration of window_duration [sec.] :param list sensors: List with sensor names :param float window_duration: Unit: s @@ -41,14 +40,16 @@ def random_window(self, sensors=None, window_duration=None): """ sensors = sensors or ["Constat"] window_duration = window_duration or 1 - window = {"sensor_time_offset": datetime.datetime.now().timestamp(), "sensor_data": {}} + window = {"0": {}} + + node_configuration = self.VALID_CONFIGURATION["nodes"]["0"] for sensor in sensors: - rows = int(window_duration // self.VALID_CONFIGURATION["period"][sensor]) + 1 - cols = self.VALID_CONFIGURATION["number_of_sensors"][sensor] + rows = int(window_duration // node_configuration["periods"][sensor]) + 1 + cols = node_configuration["number_of_sensors"][sensor] # Compute last sample time within the window duration - last_sample_time = (rows - 1) * self.VALID_CONFIGURATION["period"][sensor] - window["sensor_data"][sensor] = self.random_sensor_data(rows, cols, 0, last_sample_time) + last_sample_time = (rows - 1) * node_configuration["periods"][sensor] + window["0"][sensor] = self.random_sensor_data(rows, cols, 0, last_sample_time) return window diff --git a/tests/test_cloud_functions/test_big_query.py b/tests/test_cloud_functions/test_big_query.py index 44d93dc1..33b069db 100644 --- a/tests/test_cloud_functions/test_big_query.py +++ b/tests/test_cloud_functions/test_big_query.py @@ -39,6 +39,7 @@ def test_insert_sensor_data(self): with patch("big_query.bigquery.Client", return_value=mock_big_query_client): BigQueryDataset(project_name="my-project", dataset_name="my-dataset").add_sensor_data( data=data, + node_id="0", configuration_id="dbfed555-1b70-4191-96cb-c22071464b90", installation_reference="turbine-1", label="my-test", @@ -49,6 +50,7 @@ def test_insert_sensor_data(self): expected_rows = [ { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "microphone", "sensor_value": [1, 2, 3, 4], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -57,6 +59,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "barometer", "sensor_value": [6, 7, 8, 9], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -65,6 +68,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "barometer_thermometer", "sensor_value": [11, 12, 13, 14], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -73,6 +77,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "accelerometer", "sensor_value": [16, 17, 18, 19], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -81,6 +86,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "gyroscope", "sensor_value": [21, 22, 23, 24], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -89,6 +95,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "magnetometer", "sensor_value": [26, 27, 28, 29], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -97,6 +104,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "battery_voltmeter", "sensor_value": [31, 32, 33, 34], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -105,6 +113,7 @@ def test_insert_sensor_data(self): }, { "datetime": datetime.datetime(2021, 11, 10, 15, 55, 20, 639327), + "node_id": "0", "sensor_type_reference": "connection_statistics", "sensor_value": [36, 37, 38, 39], "configuration_id": "dbfed555-1b70-4191-96cb-c22071464b90", @@ -156,9 +165,7 @@ def test_add_installation(self): BigQueryDataset(project_name="my-project", dataset_name="my-dataset").add_installation( reference="my-installation", turbine_id="my-turbine", - blade_id="my-blade", - hardware_version="1.0.0", - sensor_coordinates={"my-sensor": [[0, 1, 2], [3, 8, 7]]}, + receiver_firmware_version="1.0.0", ) self.assertEqual( @@ -166,9 +173,7 @@ def test_add_installation(self): { "reference": "my-installation", "turbine_id": "my-turbine", - "blade_id": "my-blade", - "hardware_version": "1.0.0", - "sensor_coordinates": '{"my-sensor": [[0, 1, 2], [3, 8, 7]]}', + "receiver_firmware_version": "1.0.0", "location": None, }, ) @@ -186,18 +191,18 @@ def test_add_installation_raises_error_if_installation_already_exists(self): dataset.add_installation( reference="my-installation", turbine_id="my-turbine", - blade_id="my-blade", - hardware_version="1.0.0", - sensor_coordinates={"my-sensor": [[0, 1, 2], [3, 8, 7]]}, + receiver_firmware_version="1.0.0", ) def test_add_configuration(self): - """Test that a configuration can be added.""" + """Test that a configuration can be added. The sha256 hash is used in the tests but blake3 is used in + production. This is to avoid the need to install rust to install blake3 as a development dependency. + """ mock_big_query_client = MockBigQueryClient(expected_query_result=[]) with patch("big_query.bigquery.Client", return_value=mock_big_query_client): BigQueryDataset(project_name="my-project", dataset_name="my-dataset").add_configuration( - configuration={"blah": "blah", "installation_data": {"stuff": "data"}} + configuration={"nodes": {"0": {"blah": "blah"}}, "gateway": {"stuff": "data"}} ) del mock_big_query_client.rows[0][0]["id"] @@ -205,10 +210,10 @@ def test_add_configuration(self): self.assertEqual( mock_big_query_client.rows[0][0], { - "software_configuration": '{"blah": "blah"}', - "software_configuration_hash": "a9a553b17102e3f08a1ca32486086cdb8699f8f50c358b0fed8071b1d4c11bb2", - "installation_data": '{"stuff": "data"}', - "installation_data_hash": "9fe86ad8340043710ee7c69ac555ad2e6981dc7a2c6d18f1757b899de5de1240", + "nodes_configuration": '{"0": {"blah": "blah"}}', + "nodes_configuration_hash": "1aea08f4603f76a55d3267dd40c310e14787a8d64663a72cfc62f58152e44504", + "gateway_configuration": '{"stuff": "data"}', + "gateway_configuration_hash": "6076cf0f824bcf1a887a96c75c1a33ec720ea271776f03e8168df3feed983c91", }, ) @@ -224,7 +229,7 @@ def test_add_configuration_raises_error_if_installation_already_exists(self): with self.assertRaises(ConfigurationAlreadyExists): configuration_id = dataset.add_configuration( - configuration={"blah": "blah", "installation_data": {"stuff": "data"}} + configuration={"nodes": {"0": {"blah": "blah"}}, "gateway": {"stuff": "data"}} ) self.assertEqual(configuration_id, existing_configuration_id) diff --git a/tests/test_cloud_functions/test_deployment.py b/tests/test_cloud_functions/test_deployment.py index 57ffbaf1..961776b0 100644 --- a/tests/test_cloud_functions/test_deployment.py +++ b/tests/test_cloud_functions/test_deployment.py @@ -10,7 +10,7 @@ from octue.cloud.storage.client import GoogleCloudStorageClient from octue.utils.encoders import OctueJSONEncoder -from data_gateway import MICROPHONE_SENSOR_NAME +from data_gateway.configuration import DEFAULT_SENSOR_NAMES from tests.base import DatasetMixin @@ -27,12 +27,12 @@ def test_upload_window(self): """Test that a window can be uploaded to a cloud bucket, its data processed by the test cloud function, and the results uploaded to a test BigQuery instance. """ - window = self.random_window(sensors=["Constat", MICROPHONE_SENSOR_NAME], window_duration=1) + window = self.random_window(sensors=["Constat", DEFAULT_SENSOR_NAMES[0]], window_duration=1) upload_path = storage.path.generate_gs_path(os.environ["TEST_BUCKET_NAME"], "window-0.json") test_label = f"test-{uuid.uuid4()}" configuration = copy.deepcopy(self.VALID_CONFIGURATION) - configuration["session_data"]["label"] = test_label + configuration["session"]["label"] = test_label self.storage_client.upload_from_string( string=json.dumps(window, cls=OctueJSONEncoder), diff --git a/tests/test_cloud_functions/test_main.py b/tests/test_cloud_functions/test_main.py index fc88c97e..ea185975 100644 --- a/tests/test_cloud_functions/test_main.py +++ b/tests/test_cloud_functions/test_main.py @@ -77,7 +77,7 @@ def test_upload_window(self): # Check configuration without user data was added. expected_configuration = copy.deepcopy(self.VALID_CONFIGURATION) - del expected_configuration["session_data"] + del expected_configuration["session"] self.assertIn("add_configuration", mock_dataset.mock_calls[1][0]) self.assertEqual(mock_dataset.mock_calls[1].args[0], expected_configuration) @@ -120,7 +120,7 @@ def test_upload_window_with_microphone_data(self): mock_big_query_client.rows[0][0], { "path": expected_microphone_cloud_path, - "project_name": "destination-project", + "node_id": "0", "configuration_id": configuration_id, "installation_reference": "aventa_turbine", "label": "my-test-1", @@ -133,14 +133,14 @@ def test_upload_window_with_microphone_data(self): self.assertTrue( np.equal( np.array(f["dataset"])[:, 1:], - window["sensor_data"][MICROPHONE_SENSOR_NAME][:, 1:], + window["0"][MICROPHONE_SENSOR_NAME][:, 1:], ).all() ) # Check non-microphone sensor data was added to BigQuery. self.assertEqual(mock_big_query_client.rows[1][0]["sensor_type_reference"], "connection_statistics") self.assertEqual(mock_big_query_client.rows[1][0]["configuration_id"], configuration_id) - self.assertEqual(len(mock_big_query_client.rows[1]), len(window["sensor_data"]["Constat"])) + self.assertEqual(len(mock_big_query_client.rows[1]), len(window["0"]["Constat"])) def test_upload_window_for_existing_configuration(self): """Test that uploading a window with a configuration that already exists in BigQuery does not fail.""" @@ -338,16 +338,20 @@ def test_create_installation_with_invalid_data(self): with self.app.test_client() as client: for expected_error_field, data in ( - ("reference", {"reference": "not slugified", "hardware_version": "0.0.1"}), - ("reference", {"reference": None, "hardware_version": "0.0.1"}), - ("hardware_version", {"reference": "is-slugified", "hardware_version": None}), + ("reference", {"reference": "not slugified", "receiver_firmware_version": "0.0.1"}), + ("reference", {"reference": None, "receiver_firmware_version": "0.0.1"}), + ("receiver_firmware_version", {"reference": "is-slugified", "receiver_firmware_version": None}), ( "longitude", - {"reference": "is-slugified", "hardware_version": "0.0.1", "longitude": "not-a-number"}, + { + "reference": "is-slugified", + "receiver_firmware_version": "0.0.1", + "longitude": "not-a-number", + }, ), ( "latitude", - {"reference": "is-slugified", "hardware_version": "0.0.1", "latitude": "not-a-number"}, + {"reference": "is-slugified", "receiver_firmware_version": "0.0.1", "latitude": "not-a-number"}, ), ): with self.subTest(expected_error_field=expected_error_field, data=data): @@ -368,9 +372,8 @@ def test_error_raised_if_installation_reference_already_exists(self): response = client.post( json={ "reference": "hello", - "hardware_version": "0.0.1", + "receiver_firmware_version": "0.0.1", "turbine_id": "0", - "blade_id": "0", "sensor_coordinates": {"blah_sensor": [[0, 0, 0]]}, } ) @@ -387,9 +390,8 @@ def test_error_raised_if_internal_server_error_occurs(self): response = client.post( json={ "reference": "hello", - "hardware_version": "0.0.1", + "receiver_firmware_version": "0.0.1", "turbine_id": "0", - "blade_id": "0", "sensor_coordinates": {"blah_sensor": [[0, 0, 0]]}, } ) @@ -402,10 +404,8 @@ def test_create_installation_with_valid_data_for_all_fields(self): """ data = { "reference": "hello", - "hardware_version": "0.0.1", + "receiver_firmware_version": "0.0.1", "turbine_id": "0", - "blade_id": "0", - "sensor_coordinates": {"blah_sensor": [[0, 0, 0]]}, "latitude": 0, "longitude": 1, } @@ -426,9 +426,8 @@ def test_create_installation_with_only_required_inputs(self): response = client.post( json={ "reference": "hello", - "hardware_version": "0.0.1", + "receiver_firmware_version": "0.0.1", "turbine_id": "0", - "blade_id": "0", "sensor_coordinates": {"blah_sensor": [[0, 0, 0]]}, } ) diff --git a/tests/test_data_gateway/test_cli.py b/tests/test_data_gateway/test_cli.py index 60e7389a..2eb403d9 100644 --- a/tests/test_data_gateway/test_cli.py +++ b/tests/test_data_gateway/test_cli.py @@ -2,6 +2,7 @@ import os import tempfile import time +import unittest from unittest import mock from unittest.mock import call @@ -12,7 +13,7 @@ from data_gateway.configuration import Configuration from data_gateway.dummy_serial import DummySerial from data_gateway.exceptions import DataMustBeSavedError -from tests import LENGTH, PACKET_KEY, RANDOM_BYTES +from tests import LENGTH, RANDOM_BYTES, ZEROTH_NODE_LEADING_BYTE from tests.base import BaseTestCase @@ -193,13 +194,18 @@ def test_start_and_stop_in_interactive_mode(self): self.assertIsNone(result.exception) self.assertEqual(result.exit_code, 0) + @unittest.skipIf( + condition=os.name == "nt", + reason="Unittest mock patches are needed across processes for this test to work. For that to happen, the " + "'fork' multiprocessing start method is needed, which isn't available on Windows.", + ) def test_save_locally(self): """Ensure `--save-locally` mode writes data to disk.""" with EnvironmentVariableRemover("GOOGLE_APPLICATION_CREDENTIALS"): serial_port = DummySerial(port="test") sensor_type = bytes([34]) - serial_port.write(data=b"".join((PACKET_KEY, sensor_type, LENGTH, RANDOM_BYTES[0]))) - serial_port.write(data=b"".join((PACKET_KEY, sensor_type, LENGTH, RANDOM_BYTES[1]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, sensor_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, sensor_type, LENGTH, RANDOM_BYTES[1]))) with tempfile.TemporaryDirectory() as temporary_directory: with mock.patch("serial.Serial", return_value=serial_port): @@ -223,9 +229,9 @@ def test_save_locally(self): with open(os.path.join(temporary_directory, session_subdirectory, "window-0.json")) as f: data = json.loads(f.read()) - self.assertEqual(len(data), 2) - self.assertTrue(len(data["sensor_data"]["Baros_P"][0]) > 1) - self.assertTrue(len(data["sensor_data"]["Baros_T"][0]) > 1) + self.assertEqual(len(data["0"]), 2) + self.assertTrue(len(data["0"]["Baros_P"][0]) > 1) + self.assertTrue(len(data["0"]["Baros_T"][0]) > 1) self.assertIsNone(result.exception) self.assertEqual(result.exit_code, 0) @@ -264,12 +270,10 @@ def test_create_installation_slugifies_and_lowercases_names(self): with open(temporary_file.name, "w") as f: json.dump( { - "installation_data": { + "gateway": { "installation_reference": "My Installation_1", "turbine_id": 0, - "blade_id": 0, - "hardware_version": "1.7.19", - "sensor_coordinates": {}, + "receiver_firmware_version": "1.7.19", } }, f, @@ -290,9 +294,7 @@ def test_create_installation_slugifies_and_lowercases_names(self): json={ "reference": "my-installation-1", "turbine_id": 0, - "blade_id": 0, - "hardware_version": "1.7.19", - "sensor_coordinates": "{}", + "receiver_firmware_version": "1.7.19", }, ) @@ -303,12 +305,10 @@ def test_create_installation_with_longitude_and_latitude(self): with open(temporary_file.name, "w") as f: json.dump( { - "installation_data": { + "gateway": { "installation_reference": "My Installation_1", "turbine_id": 0, - "blade_id": 0, - "hardware_version": "1.7.19", - "sensor_coordinates": {}, + "receiver_firmware_version": "1.7.19", "longitude": 3.25604, "latitude": 178.24833, } @@ -318,7 +318,9 @@ def test_create_installation_with_longitude_and_latitude(self): with mock.patch("requests.post", return_value=mock.Mock(status_code=200)) as mock_post: result = CliRunner().invoke( - gateway_cli, ["create-installation", f"--config-file={temporary_file.name}"], input="Y" + gateway_cli, + ["create-installation", f"--config-file={temporary_file.name}"], + input="Y", ) self.assertIsNone(result.exception) @@ -329,9 +331,7 @@ def test_create_installation_with_longitude_and_latitude(self): json={ "reference": "my-installation-1", "turbine_id": 0, - "blade_id": 0, - "hardware_version": "1.7.19", - "sensor_coordinates": "{}", + "receiver_firmware_version": "1.7.19", "longitude": 3.25604, "latitude": 178.24833, }, @@ -346,12 +346,10 @@ def test_create_installation_raises_error_if_status_code_is_not_200(self): with open(temporary_file.name, "w") as f: json.dump( { - "installation_data": { + "gateway": { "installation_reference": "My Installation_1", "turbine_id": 0, - "blade_id": 0, - "hardware_version": "1.7.19", - "sensor_coordinates": {}, + "receiver_firmware_version": "1.7.19", } }, f, diff --git a/tests/test_data_gateway/test_configuration.py b/tests/test_data_gateway/test_configuration.py index b9a6f397..9303b803 100644 --- a/tests/test_data_gateway/test_configuration.py +++ b/tests/test_data_gateway/test_configuration.py @@ -15,12 +15,12 @@ def test_constructing_from_valid_configuration_dictionary(self): """Ensure a valid dictionary can be used to build a configuration.""" Configuration.from_dict(self.VALID_CONFIGURATION) - def test_dictionary_has_to_have_all_attributes_for_configuration_construction(self): - """Test that a dictionary has to include all the attributes of a configuration to be able to construct one (i.e. - that default arguments are unavailable when constructing from a dictionary). + def test_dictionary_has_to_have_all_sub_configurations_for_configuration_construction(self): + """Test that a dictionary has to include all the sub-configurations of a configuration to be able to construct + one (i.e. that default arguments for the sub-configurations are unavailable when constructing from a dictionary). """ invalid_configuration = self.VALID_CONFIGURATION.copy() - del invalid_configuration["baudrate"] + del invalid_configuration["gateway"] with self.assertRaises(KeyError): Configuration.from_dict(invalid_configuration) diff --git a/tests/test_data_gateway/test_data_gateway.py b/tests/test_data_gateway/test_data_gateway.py index 06eb5f14..df9e14e9 100644 --- a/tests/test_data_gateway/test_data_gateway.py +++ b/tests/test_data_gateway/test_data_gateway.py @@ -12,7 +12,7 @@ from data_gateway.data_gateway import DataGateway from data_gateway.dummy_serial import DummySerial from data_gateway.persistence import TimeBatcher -from tests import LENGTH, PACKET_KEY, RANDOM_BYTES, TEST_BUCKET_NAME +from tests import LENGTH, RANDOM_BYTES, TEST_BUCKET_NAME, ZEROTH_NODE_LEADING_BYTE from tests.base import BaseTestCase @@ -39,8 +39,8 @@ def test_configuration_file_is_persisted(self): serial_port = DummySerial(port="test") packet_type = bytes([34]) - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[0]))) - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[1]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[1]))) try: data_gateway = DataGateway( @@ -77,8 +77,8 @@ def test_sensors_individually(self): (bytes([52]), "Constat"), ]: with self.subTest(sensor_name=sensor_name): - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[0]))) - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[1]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[1]))) data_gateway = DataGateway( serial_port, @@ -92,11 +92,14 @@ def test_sensors_individually(self): data_gateway.start(stop_when_no_more_data_after=0.1) self._check_data_is_written_to_files( - data_gateway.packet_reader.local_output_directory, sensor_names=[sensor_name] + data_gateway.packet_reader.local_output_directory, + node_id="0", + sensor_names=[sensor_name], ) self._check_windows_are_uploaded_to_cloud( data_gateway.packet_reader.cloud_output_directory, + node_id="0", sensor_names=[sensor_name], number_of_windows_to_check=1, ) @@ -112,11 +115,11 @@ def test_data_gateway_with_connections_statistics_in_sleep_mode(self): serial_port = DummySerial(port="test") # Enter sleep state - serial_port.write(data=b"".join((PACKET_KEY, bytes([56]), bytes([1]), bytes([1])))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, bytes([56]), bytes([1]), bytes([1])))) packet_type = bytes([52]) - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[0]))) - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[1]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[1]))) with tempfile.TemporaryDirectory() as temporary_directory: data_gateway = DataGateway( @@ -133,8 +136,11 @@ def test_data_gateway_with_connections_statistics_in_sleep_mode(self): data_gateway.start(stop_when_no_more_data_after=0.1) self._check_data_is_written_to_files( - data_gateway.packet_reader.local_output_directory, sensor_names=["Constat"] + data_gateway.packet_reader.local_output_directory, + node_id="0", + sensor_names=["Constat"], ) + self.assertEqual(0, mock_logger.warning.call_count) def test_all_sensors_together(self): @@ -144,8 +150,8 @@ def test_all_sensors_together(self): sensor_names = ("Baros_P", "Baros_T", "Diff_Baros", "Mics", "Acc", "Gyro", "Mag") for packet_type in packet_types: - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[0]))) - serial_port.write(data=b"".join((PACKET_KEY, packet_type, LENGTH, RANDOM_BYTES[1]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[1]))) try: data_gateway = DataGateway( @@ -160,11 +166,14 @@ def test_all_sensors_together(self): data_gateway.start(stop_when_no_more_data_after=0.1) self._check_data_is_written_to_files( - data_gateway.packet_reader.local_output_directory, sensor_names=sensor_names + data_gateway.packet_reader.local_output_directory, + node_id="0", + sensor_names=sensor_names, ) self._check_windows_are_uploaded_to_cloud( data_gateway.packet_reader.cloud_output_directory, + node_id="0", sensor_names=sensor_names, number_of_windows_to_check=1, ) @@ -172,6 +181,64 @@ def test_all_sensors_together(self): finally: self._delete_temporary_output_directories() + def test_with_two_nodes(self): + """Test receiving data from different sensors on two separate nodes.""" + serial_port = DummySerial(port="test") + + packet_types = { + "0": (bytes([34]), bytes([36])), + "1": (bytes([38]), bytes([42]), bytes([44]), bytes([46])), + } + + sensor_names = { + "0": ("Baros_P", "Baros_T", "Diff_Baros"), + "1": ("Mics", "Acc", "Gyro", "Mag"), + } + + # Give both nodes the default node configuration. + configuration = Configuration() + configuration.nodes["1"] = configuration.nodes["0"] + + for packet_type in packet_types["0"]: + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((ZEROTH_NODE_LEADING_BYTE, packet_type, LENGTH, RANDOM_BYTES[1]))) + + first_node_leading_byte = configuration.get_leading_byte(node_id="1") + + for packet_type in packet_types["1"]: + serial_port.write(data=b"".join((first_node_leading_byte, packet_type, LENGTH, RANDOM_BYTES[0]))) + serial_port.write(data=b"".join((first_node_leading_byte, packet_type, LENGTH, RANDOM_BYTES[1]))) + + try: + with patch("data_gateway.data_gateway.DataGateway._load_configuration", return_value=configuration): + data_gateway = DataGateway( + serial_port, + save_locally=True, + output_directory=self._generate_temporary_output_directory_name(), + window_size=self.WINDOW_SIZE, + bucket_name=TEST_BUCKET_NAME, + stop_sensors_on_exit=False, + ) + + data_gateway.start(stop_when_no_more_data_after=0.1) + + for node_id in packet_types.keys(): + self._check_data_is_written_to_files( + data_gateway.packet_reader.local_output_directory, + node_id=node_id, + sensor_names=sensor_names[node_id], + ) + + self._check_windows_are_uploaded_to_cloud( + data_gateway.packet_reader.cloud_output_directory, + node_id=node_id, + sensor_names=sensor_names[node_id], + number_of_windows_to_check=1, + ) + + finally: + self._delete_temporary_output_directories() + def _generate_temporary_output_directory_name(self): """Generate a temporary output directory name. A regular `tempfile` temporary directory cannot be used as, on Windows, the path will contain a colon, which is invalid in a cloud path. The output directories are needed @@ -194,8 +261,17 @@ def _delete_temporary_output_directories(self): except FileNotFoundError: pass - def _check_windows_are_uploaded_to_cloud(self, output_directory, sensor_names, number_of_windows_to_check=5): - """Check that non-trivial windows from a packet reader for a particular sensor are uploaded to cloud storage.""" + def _check_windows_are_uploaded_to_cloud( + self, + output_directory, + node_id, + sensor_names, + number_of_windows_to_check=5, + ): + """Check that non-trivial windows from a packet reader for a particular sensor are uploaded to cloud storage. + + :return None: + """ window_paths = [ blob.name for blob in self.storage_client.scandir( @@ -212,11 +288,14 @@ def _check_windows_are_uploaded_to_cloud(self, output_directory, sensor_names, n ) for name in sensor_names: - lines = data["sensor_data"][name] + lines = data[node_id][name] self.assertTrue(len(lines[0]) > 1) - def _check_data_is_written_to_files(self, output_directory, sensor_names): - """Check that non-trivial data is written to the given file.""" + def _check_data_is_written_to_files(self, output_directory, node_id, sensor_names): + """Check that non-trivial data is written to the given file. + + :return None: + """ windows = [file for file in os.listdir(output_directory) if file.startswith(TimeBatcher._file_prefix)] self.assertTrue(len(windows) > 0) @@ -225,5 +304,5 @@ def _check_data_is_written_to_files(self, output_directory, sensor_names): data = json.load(f) for name in sensor_names: - lines = data["sensor_data"][name] + lines = data[node_id][name] self.assertTrue(len(lines[0]) > 1) diff --git a/tests/test_data_gateway/test_packet_reader.py b/tests/test_data_gateway/test_packet_reader.py index e6844d42..3c45846d 100644 --- a/tests/test_data_gateway/test_packet_reader.py +++ b/tests/test_data_gateway/test_packet_reader.py @@ -3,7 +3,7 @@ from unittest.mock import patch from data_gateway.packet_reader import PacketReader -from tests import LENGTH, PACKET_KEY, RANDOM_BYTES +from tests import LENGTH, RANDOM_BYTES, ZEROTH_NODE_LEADING_BYTE from tests.base import BaseTestCase @@ -11,7 +11,14 @@ class TestPacketReader(BaseTestCase): def test_error_is_logged_if_unknown_sensor_type_packet_is_received(self): """Test that an error is logged if an unknown sensor type packet is received.""" queue = multiprocessing.Queue() - queue.put({"packet_type": bytes([0]), "packet": b"".join((PACKET_KEY, bytes([0]), LENGTH, RANDOM_BYTES[0]))}) + + queue.put( + { + "node_id": "0", + "packet_type": bytes([0]), + "packet": b"".join((ZEROTH_NODE_LEADING_BYTE, bytes([0]), LENGTH, RANDOM_BYTES[0])), + } + ) packet_reader = PacketReader( save_locally=False, @@ -26,7 +33,7 @@ def test_error_is_logged_if_unknown_sensor_type_packet_is_received(self): stop_when_no_more_data_after=0.1, ) - self.assertIn("Received packet with unknown type: ", mock_logger.method_calls[1].args[0]) + self.assertIn("unknown type: ", mock_logger.method_calls[1].args[0]) def test_update_handles_fails_if_start_and_end_handles_are_incorrect(self): """Test that an error is raised if the start and end handles are incorrect when trying to update handles.""" @@ -41,9 +48,9 @@ def test_update_handles_fails_if_start_and_end_handles_are_incorrect(self): ) with patch("data_gateway.packet_reader.logger") as mock_logger: - packet_reader.update_handles(packet) + packet_reader.update_handles(packet, 0) - self.assertIn("Handle error", mock_logger.method_calls[0].args[0]) + self.assertIn("Error while updating handles for node", mock_logger.method_calls[0].args[0]) def test_update_handles(self): """Test that the handles can be updated.""" @@ -57,7 +64,7 @@ def test_update_handles(self): ) with patch("data_gateway.packet_reader.logger") as mock_logger: - packet_reader.update_handles(packet) + packet_reader.update_handles(packet, 0) self.assertIn("Successfully updated handles", mock_logger.method_calls[0].args[0]) @@ -76,7 +83,7 @@ def test_packet_reader_with_info_packets(self): for index, packet_type in enumerate(packet_types): for packet in packets[index]: - queue.put({"packet_type": str(int.from_bytes(packet_type, "little")), "packet": packet}) + queue.put({"node_id": "0", "packet_type": str(int.from_bytes(packet_type, "little")), "packet": packet}) with tempfile.TemporaryDirectory() as temporary_directory: packet_reader = PacketReader( @@ -101,10 +108,8 @@ def test_packet_reader_with_info_packets(self): ("Command declined, %s", "Bad block detection ongoing"), ("Command declined, %s", "Task already registered, cannot register again"), ("Command declined, %s", "Task is not registered, cannot de-register"), - ("Command declined, %s", "Connection Parameter update unfinished"), - ("\n%s\n", "Exiting sleep"), - ("\n%s\n", "Entering sleep"), - ("Battery info",), - ("Voltage : %fV\n Cycle count: %f\nState of charge: %f%%", 0.0, 0.0, 0.0), + ("Command declined, %s", "Connection parameter update unfinished"), + ("Sleep state updated on node %s: %s", "0", "Exiting sleep"), + ("Sleep state updated on node %s: %s", "0", "Entering sleep"), ]: self.assertIn(message, log_messages) diff --git a/tests/test_data_gateway/test_persistence.py b/tests/test_data_gateway/test_persistence.py index 6ac93074..76400bb7 100644 --- a/tests/test_data_gateway/test_persistence.py +++ b/tests/test_data_gateway/test_persistence.py @@ -24,56 +24,62 @@ class TestBatchingWriter(BaseTestCase): def test_data_is_batched(self): """Test that data is batched as expected.""" + node_id = "0" + with tempfile.TemporaryDirectory() as temporary_directory: writer = BatchingFileWriter( - sensor_names=["test"], + node_ids=[node_id], output_directory=os.path.join(temporary_directory, "this-session"), window_size=600, ) - writer.add_to_current_window(sensor_name="test", data="blah,") - self.assertEqual(writer.current_window["sensor_data"]["test"], ["blah,"]) + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="blah") + self.assertEqual(writer.current_window[node_id]["test"], ["blah"]) def test_data_is_written_to_disk_in_windows(self): """Test that data is written to disk as time windows.""" + node_id = "0" + with tempfile.TemporaryDirectory() as temporary_directory: writer = BatchingFileWriter( - sensor_names=["test"], + node_ids=[node_id], output_directory=os.path.join(temporary_directory, "this-session"), window_size=0.01, ) with writer: - writer.add_to_current_window(sensor_name="test", data="ping") - writer.add_to_current_window(sensor_name="test", data="pong") - self.assertEqual(len(writer.current_window["sensor_data"]["test"]), 2) + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="ping") + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="pong") + self.assertEqual(len(writer.current_window[node_id]["test"]), 2) time.sleep(writer.window_size * 2) - writer.add_to_current_window(sensor_name="test", data="ding") - writer.add_to_current_window(sensor_name="test", data="dong") - self.assertEqual(len(writer.current_window["sensor_data"]["test"]), 2) + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="ding") + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="dong") + self.assertEqual(len(writer.current_window[node_id]["test"]), 2) - self.assertEqual(len(writer.current_window["sensor_data"]["test"]), 0) + self.assertEqual(len(writer.current_window[node_id]["test"]), 0) with open(os.path.join(writer.output_directory, "window-0.json")) as f: - self.assertEqual(json.load(f)["sensor_data"], {"test": ["ping", "pong"]}) + self.assertEqual(json.load(f)[node_id], {"test": ["ping", "pong"]}) with open(os.path.join(writer.output_directory, "window-1.json")) as f: - self.assertEqual(json.load(f)["sensor_data"], {"test": ["ding", "dong"]}) + self.assertEqual(json.load(f)[node_id], {"test": ["ding", "dong"]}) def test_oldest_window_is_deleted_when_storage_limit_reached(self): """Check that (only) the oldest window is deleted when the storage limit is reached.""" + node_id = "0" + with tempfile.TemporaryDirectory() as temporary_directory: writer = BatchingFileWriter( - sensor_names=["test"], + node_ids=[node_id], output_directory=os.path.join(temporary_directory, "this-session"), window_size=0.01, storage_limit=1, ) with writer: - writer.add_to_current_window(sensor_name="test", data="ping,") + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="ping") first_window_path = os.path.join(writer.output_directory, "window-0.json") @@ -81,7 +87,7 @@ def test_oldest_window_is_deleted_when_storage_limit_reached(self): self.assertTrue(os.path.exists(first_window_path)) with writer: - writer.add_to_current_window(sensor_name="test", data="pong,\n") + writer.add_to_current_window(node_id=node_id, sensor_name="test", data="pong\n") # Check first (oldest) file has now been deleted. self.assertFalse(os.path.exists(first_window_path)) @@ -91,20 +97,21 @@ def test_oldest_window_is_deleted_when_storage_limit_reached(self): def test_that_csv_files_are_written(self): """Test that data is written to disk as CSV-files if the `save_csv_files` option is `True`.""" - with tempfile.TemporaryDirectory() as temporary_directory: + node_id = "0" + with tempfile.TemporaryDirectory() as temporary_directory: writer = BatchingFileWriter( - sensor_names=["sensor1", "sensor2"], + node_ids=[node_id], output_directory=os.path.join(temporary_directory, "this-session"), save_csv_files=True, window_size=0.01, ) with writer: - writer.add_to_current_window(sensor_name="sensor1", data=[1, 2, 3]) - writer.add_to_current_window(sensor_name="sensor2", data=[1, 2, 3]) - writer.add_to_current_window(sensor_name="sensor1", data=[4, 5, 6]) - writer.add_to_current_window(sensor_name="sensor2", data=[4, 5, 6]) + writer.add_to_current_window(node_id=node_id, sensor_name="sensor1", data=[1, 2, 3]) + writer.add_to_current_window(node_id=node_id, sensor_name="sensor2", data=[1, 2, 3]) + writer.add_to_current_window(node_id=node_id, sensor_name="sensor1", data=[4, 5, 6]) + writer.add_to_current_window(node_id=node_id, sensor_name="sensor2", data=[4, 5, 6]) with open(os.path.join(writer.output_directory, "sensor1.csv")) as f: reader = csv.reader(f) @@ -126,40 +133,44 @@ def setUpClass(cls): def test_data_is_batched(self): """Test that data is batched as expected.""" + node_id = "0" + uploader = BatchingUploader( - sensor_names=["test"], + node_ids=[node_id], bucket_name=TEST_BUCKET_NAME, window_size=600, output_directory=storage.path.join(tempfile.TemporaryDirectory().name, "this-session"), ) - uploader.add_to_current_window(sensor_name="test", data="blah,") - self.assertEqual(uploader.current_window["sensor_data"]["test"], ["blah,"]) + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="blah") + self.assertEqual(uploader.current_window[node_id]["test"], ["blah"]) def test_data_is_uploaded_in_windows_and_can_be_retrieved_from_cloud_storage(self): """Test that data is uploaded in time windows that can be retrieved from cloud storage.""" + node_id = "0" + try: uploader = BatchingUploader( - sensor_names=["test"], + node_ids=[node_id], bucket_name=TEST_BUCKET_NAME, window_size=0.01, output_directory=f"this-session-{uuid.uuid4()}", ) with uploader: - uploader.add_to_current_window(sensor_name="test", data="ping") - uploader.add_to_current_window(sensor_name="test", data="pong") - self.assertEqual(len(uploader.current_window["sensor_data"]["test"]), 2) + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="ping") + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="pong") + self.assertEqual(len(uploader.current_window[node_id]["test"]), 2) time.sleep(uploader.window_size) - uploader.add_to_current_window(sensor_name="test", data="ding") - uploader.add_to_current_window(sensor_name="test", data="dong") - self.assertEqual(len(uploader.current_window["sensor_data"]["test"]), 2) + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="ding") + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="dong") + self.assertEqual(len(uploader.current_window[node_id]["test"]), 2) time.sleep(uploader.window_size) - self.assertEqual(len(uploader.current_window["sensor_data"]["test"]), 0) + self.assertEqual(len(uploader.current_window[node_id]["test"]), 0) self.assertEqual( json.loads( @@ -168,7 +179,7 @@ def test_data_is_uploaded_in_windows_and_can_be_retrieved_from_cloud_storage(sel TEST_BUCKET_NAME, uploader.output_directory, "window-0.json" ), ) - )["sensor_data"], + )[node_id], {"test": ["ping", "pong"]}, ) @@ -179,7 +190,7 @@ def test_data_is_uploaded_in_windows_and_can_be_retrieved_from_cloud_storage(sel TEST_BUCKET_NAME, uploader.output_directory, "window-1.json" ), ) - )["sensor_data"], + )[node_id], {"test": ["ding", "dong"]}, ) @@ -188,6 +199,8 @@ def test_data_is_uploaded_in_windows_and_can_be_retrieved_from_cloud_storage(sel def test_window_is_written_to_disk_if_upload_fails(self): """Test that a window is written to disk if it fails to upload to the cloud.""" + node_id = "0" + try: with mock.patch.object( Blob, @@ -195,7 +208,7 @@ def test_window_is_written_to_disk_if_upload_fails(self): side_effect=Exception("This is deliberately raised in this test to simulate cloud upload failing."), ): uploader = BatchingUploader( - sensor_names=["test"], + node_ids=[node_id], bucket_name=TEST_BUCKET_NAME, window_size=0.01, output_directory=f"this-session-{uuid.uuid4()}", @@ -203,8 +216,8 @@ def test_window_is_written_to_disk_if_upload_fails(self): ) with uploader: - uploader.add_to_current_window(sensor_name="test", data="ping") - uploader.add_to_current_window(sensor_name="test", data="pong") + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="ping") + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="pong") # Check that the upload has failed. with self.assertRaises(google.api_core.exceptions.NotFound): @@ -216,13 +229,15 @@ def test_window_is_written_to_disk_if_upload_fails(self): # Check that a backup file has been written. with open(os.path.join(uploader.output_directory, ".backup", "window-0.json")) as f: - self.assertEqual(json.load(f)["sensor_data"], {"test": ["ping", "pong"]}) + self.assertEqual(json.load(f)[node_id], {"test": ["ping", "pong"]}) finally: shutil.rmtree(uploader.output_directory) def test_backup_files_are_uploaded_on_next_upload_attempt(self): """Test that backup files from a failed upload are uploaded on the next upload attempt.""" + node_id = "0" + try: with mock.patch.object( Blob, @@ -230,7 +245,7 @@ def test_backup_files_are_uploaded_on_next_upload_attempt(self): side_effect=Exception("This is deliberately raised in this test to simulate cloud upload failing."), ): uploader = BatchingUploader( - sensor_names=["test"], + node_ids=[node_id], bucket_name=TEST_BUCKET_NAME, window_size=10, output_directory=f"this-session-{uuid.uuid4()}", @@ -238,8 +253,8 @@ def test_backup_files_are_uploaded_on_next_upload_attempt(self): ) with uploader: - uploader.add_to_current_window(sensor_name="test", data="ping") - uploader.add_to_current_window(sensor_name="test", data="pong") + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="ping") + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="pong") # Check that the upload has failed. with self.assertRaises(google.api_core.exceptions.NotFound): @@ -253,10 +268,10 @@ def test_backup_files_are_uploaded_on_next_upload_attempt(self): # Check that a backup file has been written. with open(backup_path) as f: - self.assertEqual(json.load(f)["sensor_data"], {"test": ["ping", "pong"]}) + self.assertEqual(json.load(f)[node_id], {"test": ["ping", "pong"]}) with uploader: - uploader.add_to_current_window(sensor_name="test", data=["ding", "dong"]) + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data=["ding", "dong"]) # Check that both windows are now in cloud storage. self.assertEqual( @@ -266,7 +281,7 @@ def test_backup_files_are_uploaded_on_next_upload_attempt(self): TEST_BUCKET_NAME, uploader.output_directory, "window-0.json" ), ) - )["sensor_data"], + )[node_id], {"test": ["ping", "pong"]}, ) @@ -277,7 +292,7 @@ def test_backup_files_are_uploaded_on_next_upload_attempt(self): TEST_BUCKET_NAME, uploader.output_directory, "window-1.json" ), ) - )["sensor_data"], + )[node_id], {"test": [["ding", "dong"]]}, ) @@ -289,19 +304,25 @@ def test_backup_files_are_uploaded_on_next_upload_attempt(self): def test_metadata_is_added_to_uploaded_files(self): """Test that metadata is added to uploaded files and can be retrieved.""" - uploader = BatchingUploader( - sensor_names=["test"], - bucket_name=TEST_BUCKET_NAME, - window_size=0.01, - output_directory=f"this-session-{uuid.uuid4()}", - metadata={"big": "rock"}, - ) + node_id = "0" - with uploader: - uploader.add_to_current_window(sensor_name="test", data="ping,") + try: + uploader = BatchingUploader( + node_ids=[node_id], + bucket_name=TEST_BUCKET_NAME, + window_size=0.01, + output_directory=f"this-session-{uuid.uuid4()}", + metadata={"big": "rock"}, + ) - metadata = self.storage_client.get_metadata( - cloud_path=storage.path.generate_gs_path(TEST_BUCKET_NAME, uploader.output_directory, "window-0.json"), - ) + with uploader: + uploader.add_to_current_window(node_id=node_id, sensor_name="test", data="ping") + + metadata = self.storage_client.get_metadata( + cloud_path=storage.path.generate_gs_path(TEST_BUCKET_NAME, uploader.output_directory, "window-0.json"), + ) - self.assertEqual(metadata["custom_metadata"], {"big": "rock"}) + self.assertEqual(metadata["custom_metadata"], {"big": "rock"}) + + finally: + shutil.rmtree(uploader.output_directory) diff --git a/tests/valid_configuration.json b/tests/valid_configuration.json index 1dd4d452..cc81a028 100644 --- a/tests/valid_configuration.json +++ b/tests/valid_configuration.json @@ -1,138 +1,159 @@ { - "mics_freq": 15625, - "mics_bm": 1023, - "baros_freq": 100, - "diff_baros_freq": 1000, - "baros_bm": 1023, - "acc_freq": 100, - "acc_range": 16, - "gyro_freq": 100, - "gyro_range": 2000, - "mag_freq": 12.5, - "analog_freq": 16384, - "constat_period": 45, - "serial_buffer_rx_size": 100000, - "serial_buffer_tx_size": 1280, - "baudrate": 2300000, - "endian": "little", - "max_timestamp_slack": 0.005, - "max_period_drift": 0.02, - "packet_key": 254, - "type_handle_def": 255, - "mics_samples_per_packet": 8, - "imu_samples_per_packet": 40, - "analog_samples_per_packet": 60, - "baros_samples_per_packet": 1, - "diff_baros_samples_per_packet": 24, - "constat_samples_per_packet": 24, - "sensor_names": [ - "Mics", - "Baros_P", - "Baros_T", - "Diff_Baros", - "Acc", - "Gyro", - "Mag", - "Analog Vbat", - "Constat" - ], - "default_handles": { - "34": "Abs. baros", - "36": "Diff. baros", - "38": "Mic 0", - "40": "Mic 1", - "42": "IMU Accel", - "44": "IMU Gyro", - "46": "IMU Magnetometer", - "48": "Analog1", - "50": "Analog2", - "52": "Constat", - "54": "Cmd Decline", - "56": "Sleep State", - "58": "Info Message" - }, - "decline_reason": { - "0": "Bad block detection ongoing", - "1": "Task already registered, cannot register again", - "2": "Task is not registered, cannot de-register", - "3": "Connection Parameter update unfinished" - }, - "sleep_state": { - "0": "Exiting sleep", - "1": "Entering sleep" - }, - "info_type": { - "0": "Battery info" - }, - "samples_per_packet": { - "Mics": 8, - "Diff_Baros": 24, - "Baros_P": 1, - "Baros_T": 1, - "Acc": 40, - "Gyro": 40, - "Mag": 40, - "Analog Vbat": 60, - "Constat": 24 - }, - "number_of_sensors": { - "Mics": 10, - "Baros_P": 40, - "Baros_T": 40, - "Diff_Baros": 5, - "Acc": 3, - "Gyro": 3, - "Mag": 3, - "Analog Vbat": 1, - "Constat": 4 - }, - "sensor_conversion_constants":{ - "Mics": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], - "Diff_Baros": [1, 1, 1, 1, 1], - "Baros_P": [40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96], - "Baros_T": [100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100], - "Acc": [1, 1, 1], - "Gyro": [1, 1, 1], - "Mag": [1, 1, 1], - "Analog Vbat": [1], - "Constat": [1, 1, 1, 1] - }, - "period": { - "Mics": 6.4e-05, - "Baros_P": 0.01, - "Baros_T": 0.01, - "Diff_Baros": 0.001, - "Acc": 0.01, - "Gyro": 0.01, - "Mag": 0.08, - "Analog Vbat": 6.103515625e-05, - "Constat": 0.045 - }, - "sensor_commands": { - "start": ["startBaros", "startDiffBaros", "startIMU", "startMics"], - "stop": ["stopBaros", "stopDiffBaros", "stopIMU", "stopMics"], - "configuration": ["configBaros", "configAccel", "configGyro", "configMics"], - "utilities": [ - "getBattery", - "setConnInterval", - "tpcBoostIncrease", - "tpcBoostDecrease", - "tpcBoostHeapMemThr1", - "tpcBoostHeapMemThr2", - "tpcBoostHeapMemThr4" - ] - }, - "installation_data": { + "gateway": { + "baudrate": 2300000, + "endian": "little", "installation_reference": "aventa_turbine", - "turbine_id": "0", - "blade_id": "0", - "hardware_version": "1.2.3", - "sensor_coordinates": { - "Mics": [[0, 0, 0], [0, 0, 1]], - "Baros_p": [[1, 0, 0], [1, 0, 1], [1, 2, 0]] + "latitude": 0, + "longitude": 0, + "packet_key": 254, + "packet_key_offset": 245, + "receiver_firmware_version": "1.2.3", + "serial_buffer_rx_size": 100000, + "serial_buffer_tx_size": 1280, + "turbine_id": "unknown" + }, + "nodes": { + "0": { + "acc_freq": 100, + "acc_range": 16, + "analog_freq": 16384, + "baros_bm": 1023, + "baros_freq": 100, + "blade_id": "0", + "constat_period": 45, + "decline_reason": { + "0": "Bad block detection ongoing", + "1": "Task already registered, cannot register again", + "2": "Task is not registered, cannot de-register", + "3": "Connection Parameter update unfinished" + }, + "diff_baros_freq": 1000, + "default_handles": { + "34": "Abs. baros", + "36": "Diff. baros", + "38": "Mic 0", + "40": "Mic 1", + "42": "IMU Accel", + "44": "IMU Gyro", + "46": "IMU Magnetometer", + "48": "Analog1", + "50": "Analog2", + "52": "Constat", + "54": "Cmd Decline", + "56": "Sleep State", + "58": "Info Message" + }, + "gyro_freq": 100, + "gyro_range": 2000, + "local_info_type": { + "0": "Synchronization not ready as not every sensor node is connected", + "1": "Time synchronization info", + "2": "Time sync exception", + "4": "Time sync coarse data record error", + "8": "Time sync alignment error", + "16": "Time sync coarse data time diff error", + "32": "Device not connected", + "64": "Select message destination successful", + "128": "Time sync success", + "129": "Coarse sync finish", + "130": "Time sync msg sent" + }, + "remote_info_type": { + "0": "Battery info" + }, + "mag_freq": 12.5, + "mics_freq": 15625, + "mics_bm": 1023, + "max_timestamp_slack": 0.005, + "max_period_drift": 0.02, + "node_firmware_version": "unknown", + "number_of_sensors": { + "Mics": 10, + "Baros_P": 40, + "Baros_T": 40, + "Diff_Baros": 5, + "Acc": 3, + "Gyro": 3, + "Mag": 3, + "Analog Vbat": 1, + "Constat": 4 + }, + "periods": { + "Mics": 6.4e-05, + "Baros_P": 0.01, + "Baros_T": 0.01, + "Diff_Baros": 0.001, + "Acc": 0.01, + "Gyro": 0.01, + "Mag": 0.08, + "Analog Vbat": 6.103515625e-05, + "Constat": 0.045 + }, + "samples_per_packet": { + "Mics": 8, + "Diff_Baros": 24, + "Baros_P": 1, + "Baros_T": 1, + "Acc": 40, + "Gyro": 40, + "Mag": 40, + "Analog Vbat": 60, + "Constat": 24 + }, + "sensor_commands": { + "start": ["startBaros", "startDiffBaros", "startIMU", "startMics"], + "stop": ["stopBaros", "stopDiffBaros", "stopIMU", "stopMics"], + "configuration": ["configBaros", "configAccel", "configGyro", "configMics"], + "utilities": [ + "getBattery", + "setConnInterval", + "tpcBoostIncrease", + "tpcBoostDecrease", + "tpcBoostHeapMemThr1", + "tpcBoostHeapMemThr2", + "tpcBoostHeapMemThr4" + ] + }, + "sensor_conversion_constants": { + "Mics": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], + "Diff_Baros": [1, 1, 1, 1, 1], + "Baros_P": [40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96, 40.96], "Baros_T": [100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100], + "Acc": [1, 1, 1], + "Gyro": [1, 1, 1], + "Mag": [1, 1, 1], + "Analog Vbat": [1], + "Constat": [1, 1, 1, 1] + }, + "sensor_coordinates": { + "Mics": [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Baros_P": [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Baros_T": [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Diff_Baros": [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Acc": [[0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Gyro": [[0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Mag": [[0, 0, 0], [0, 0, 0], [0, 0, 0]], + "Analog Vbat": [[0, 0, 0]], + "Constat": [[0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0]] + }, + "sensor_names": [ + "Mics", + "Baros_P", + "Baros_T", + "Diff_Baros", + "Acc", + "Gyro", + "Mag", + "Analog Vbat", + "Constat" + ], + "sleep_state": { + "0": "Exiting sleep", + "1": "Entering sleep" + }, + "type_handle_def": 255 } }, - "session_data": { + "session": { "label": "my-test-1" } }