diff --git a/.bandit b/.bandit
new file mode 100644
index 0000000..663333a
--- /dev/null
+++ b/.bandit
@@ -0,0 +1,2 @@
+[bandit]
+exclude: trollsched/version.py,versioneer.py,trollsched/tests
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 4c38f40..9e1392f 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -3,5 +3,5 @@
- [ ] Closes #xxxx
- [ ] Tests added
- [ ] Tests passed
- - [ ] Passes ``git diff origin/master **/*py | flake8 --diff``
+ - [ ] Passes ``git diff origin/main **/*py | flake8 --diff``
- [ ] Fully documented
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..90e05c4
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,11 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: "github-actions" # See documentation for possible values
+ directory: "/" # Location of package manifests
+ schedule:
+ interval: "weekly"
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
new file mode 100644
index 0000000..3387915
--- /dev/null
+++ b/.github/workflows/ci.yaml
@@ -0,0 +1,35 @@
+name: Run tests
+
+on:
+ - push
+ - pull_request
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: true
+ matrix:
+ python-version: ["3.10", "3.11", "3.12"]
+ experimental: [false]
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v4
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ pip install -U pytest pytest-cov numpy pyresample pyorbital six pyyaml defusedxml
+ - name: Install pytroll-collectors
+ run: |
+ pip install --no-deps -e .
+ - name: Run tests
+ run: |
+ pytest --cov=trollsched trollsched/tests --cov-report=xml
+ - name: Upload coverage to Codecov
+ uses: codecov/codecov-action@v4
+ with:
+ file: ./coverage.xml
+ env_vars: PYTHON_VERSION
diff --git a/.gitignore b/.gitignore
index ded6067..9d420ef 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,3 +34,9 @@ nosetests.xml
.mr.developer.cfg
.project
.pydevproject
+
+tmp
+.idea
+.vscode
+.ropeproject
+*~
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..e5fc6fa
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,20 @@
+exclude: '^$'
+fail_fast: false
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.4.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ args: [--unsafe]
+ - repo: https://github.com/charliermarsh/ruff-pre-commit
+ # Ruff version.
+ rev: 'v0.0.247'
+ hooks:
+ - id: ruff
+ args: [--fix, --exit-non-zero-on-fix]
+ci:
+ # To trigger manually, comment on a pull request with "pre-commit.ci autofix"
+ autofix_prs: false
+ skip: [bandit]
diff --git a/.stickler.yml b/.stickler.yml
index c266a86..e328025 100644
--- a/.stickler.yml
+++ b/.stickler.yml
@@ -1,5 +1,13 @@
linters:
flake8:
+ python: 3
+ config: setup.cfg
fixer: true
fixers:
enable: true
+
+files:
+ ignore:
+ - 'docs/Makefile'
+ - 'docs/make.bat'
+
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 44ae524..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-language: python
-env:
- global:
- # Set defaults to avoid repeating in most cases
- - PYTHON_VERSION=$TRAVIS_PYTHON_VERSION
- - NUMPY_VERSION=stable
- - MAIN_CMD='python setup.py'
- - CONDA_DEPENDENCIES='scipy coveralls coverage codecov mock six appdirs pykdtree pyresample docutils pyyaml matplotlib xarray'
- - PIP_DEPENDENCIES=''
- - SETUP_XVFB=False
- - EVENT_TYPE='push pull_request cron'
- - SETUP_CMD='test'
- - CONDA_CHANNELS='conda-forge'
- - CONDA_CHANNEL_PRIORITY='True'
-matrix:
- include:
- - env: PYTHON_VERSION=2.7
- os: linux
- - env: PYTHON_VERSION=2.7
- os: osx
- language: generic
- - env: PYTHON_VERSION=3.6
- os: linux
- - env: PYTHON_VERSION=3.6
- os: osx
- language: generic
-install:
- - git clone --depth 1 git://github.com/astropy/ci-helpers.git
- - source ci-helpers/travis/setup_conda.sh
- # reactivate environment to set proj environment variables
- - conda deactivate
- - conda activate test
-script: coverage run --source=trollsched setup.py test
-after_success:
-- if [[ $PYTHON_VERSION == 3.6 ]]; then coveralls; fi
-#
-deploy:
- - provider: pypi
- user: adybbroe
- password:
- secure: SY0qo7sZXDjDx0DHvuXrHvL9VTAulgU/T33d6UWXf469jT9DOexuZ2VYLgJbYQen5FSe5JmQE0ZMdId1cb8IPP/77qCgQK6f0lRDa43fSYXhcD+fHzlQskievJrwamkRYx6WBrJbwGAKBNinUgNSaTdbh9XUugziGFiOHUfVppM=
- distributions: sdist bdist_wheel
- skip_existing: true
- on:
- tags: true
- repo: pytroll/pytroll-schedule
-notifications:
- slack:
- rooms:
- - pytroll:96mNSYSI1dBjGyzVXkBT6qFt#pytroll-schedule
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ac7a307..f6e5672 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,79 @@
+###############################################################################
+## Version 0.7.1 (2024/02/16)
+
+
+### Pull Requests Merged
+
+#### Features added
+
+* [PR 85](https://github.com/pytroll/pytroll-schedule/pull/85) - Update CI Python versions
+* [PR 84](https://github.com/pytroll/pytroll-schedule/pull/84) - Update versioneer
+
+In this release 2 pull requests were closed.
+
+## Version 0.7.0 (2023/06/21)
+
+### Issues Closed
+
+* [Issue 74](https://github.com/pytroll/pytroll-schedule/issues/74) - numpy 1.24.0 np.bool removed needs cleanup ([PR 72](https://github.com/pytroll/pytroll-schedule/pull/72) by [@mraspaud](https://github.com/mraspaud))
+* [Issue 68](https://github.com/pytroll/pytroll-schedule/issues/68) - Test failure with PyResample 1.23 ([PR 72](https://github.com/pytroll/pytroll-schedule/pull/72) by [@mraspaud](https://github.com/mraspaud))
+
+In this release 2 issues were closed.
+
+### Pull Requests Merged
+
+#### Features added
+
+* [PR 72](https://github.com/pytroll/pytroll-schedule/pull/72) - Start refactoring pytroll schedule ([74](https://github.com/pytroll/pytroll-schedule/issues/74), [68](https://github.com/pytroll/pytroll-schedule/issues/68))
+* [PR 71](https://github.com/pytroll/pytroll-schedule/pull/71) - Create dependabot.yml
+
+In this release 2 pull requests were closed.
+
+
+## Version 0.6.0 (2021/12/09)
+
+### Issues Closed
+
+* [Issue 62](https://github.com/pytroll/pytroll-schedule/issues/62) - Remove remnants of Python 2 support ([PR 67](https://github.com/pytroll/pytroll-schedule/pull/67) by [@pnuu](https://github.com/pnuu))
+* [Issue 60](https://github.com/pytroll/pytroll-schedule/issues/60) - Deprecated import of Mapping
+* [Issue 59](https://github.com/pytroll/pytroll-schedule/issues/59) - Failures in Schedule tests ([PR 61](https://github.com/pytroll/pytroll-schedule/pull/61) by [@pnuu](https://github.com/pnuu))
+* [Issue 54](https://github.com/pytroll/pytroll-schedule/issues/54) - Deprecated use of abstract base classes ([PR 57](https://github.com/pytroll/pytroll-schedule/pull/57) by [@pnuu](https://github.com/pnuu))
+* [Issue 53](https://github.com/pytroll/pytroll-schedule/issues/53) - The unittests are not run automatically ([PR 55](https://github.com/pytroll/pytroll-schedule/pull/55) by [@pnuu](https://github.com/pnuu))
+* [Issue 52](https://github.com/pytroll/pytroll-schedule/issues/52) - Boundary calculations are broken ([PR 56](https://github.com/pytroll/pytroll-schedule/pull/56) by [@pnuu](https://github.com/pnuu))
+* [Issue 49](https://github.com/pytroll/pytroll-schedule/issues/49) - Three unit tests failed.
+
+In this release 7 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 61](https://github.com/pytroll/pytroll-schedule/pull/61) - Allow `mersi-2` as instrument name ([59](https://github.com/pytroll/pytroll-schedule/issues/59))
+* [PR 56](https://github.com/pytroll/pytroll-schedule/pull/56) - Remove a bug introduced in PR38 ([52](https://github.com/pytroll/pytroll-schedule/issues/52))
+* [PR 51](https://github.com/pytroll/pytroll-schedule/pull/51) - Remove some redundant code and fix a failed unit test.
+* [PR 45](https://github.com/pytroll/pytroll-schedule/pull/45) - Use recent ssl protocol for older python versions
+* [PR 38](https://github.com/pytroll/pytroll-schedule/pull/38) - Fix S3 olci scan duration
+
+#### Features added
+
+* [PR 67](https://github.com/pytroll/pytroll-schedule/pull/67) - Refactor remove legacy code support ([62](https://github.com/pytroll/pytroll-schedule/issues/62))
+* [PR 66](https://github.com/pytroll/pytroll-schedule/pull/66) - Change tested Python versions to 3.8, 3.9 and 3.10
+* [PR 64](https://github.com/pytroll/pytroll-schedule/pull/64) - Use safe loading for YAML config file
+* [PR 61](https://github.com/pytroll/pytroll-schedule/pull/61) - Allow `mersi-2` as instrument name ([59](https://github.com/pytroll/pytroll-schedule/issues/59))
+* [PR 58](https://github.com/pytroll/pytroll-schedule/pull/58) - Fix a test failure on Python 3.7
+* [PR 57](https://github.com/pytroll/pytroll-schedule/pull/57) - Fix an import raising deprecation warning ([54](https://github.com/pytroll/pytroll-schedule/issues/54))
+* [PR 55](https://github.com/pytroll/pytroll-schedule/pull/55) - Add GitHub actions to run unittests ([53](https://github.com/pytroll/pytroll-schedule/issues/53))
+* [PR 50](https://github.com/pytroll/pytroll-schedule/pull/50) - Add a southern hemisphere pass test.
+* [PR 46](https://github.com/pytroll/pytroll-schedule/pull/46) - Give the option to plot multiple polygons
+* [PR 45](https://github.com/pytroll/pytroll-schedule/pull/45) - Use recent ssl protocol for older python versions
+* [PR 44](https://github.com/pytroll/pytroll-schedule/pull/44) - Make plot filename more complete, including the instrument name
+* [PR 42](https://github.com/pytroll/pytroll-schedule/pull/42) - Make it possible to tell cartopy to use offline shapefiles
+* [PR 41](https://github.com/pytroll/pytroll-schedule/pull/41) - Fix nasa ftp retrieval
+* [PR 38](https://github.com/pytroll/pytroll-schedule/pull/38) - Fix S3 olci scan duration
+
+In this release 19 pull requests were closed.
+
+
## Version 0.5.2 (2019/03/19)
diff --git a/README.md b/README.md
index 432a3b9..af2de2c 100644
--- a/README.md
+++ b/README.md
@@ -2,9 +2,9 @@ pytroll-schedule
================
[![Codacy Badge](https://api.codacy.com/project/badge/Grade/9f039d7d640846ca89be8a78fa11e1f6)](https://www.codacy.com/app/adybbroe/pytroll-schedule?utm_source=github.com&utm_medium=referral&utm_content=pytroll/pytroll-schedule&utm_campaign=badger)
-[![Build Status](https://travis-ci.org/pytroll/pytroll-schedule.png?branch=master)](https://travis-ci.org/pytroll/pytroll-schedule)
-[![Coverage Status](https://coveralls.io/repos/github/pytroll/pytroll-schedule/badge.svg?branch=master)](https://coveralls.io/github/pytroll/pytroll-schedule?branch=master)
-[![Code Health](https://landscape.io/github/pytroll/pytroll-schedule/master/landscape.png)](https://landscape.io/github/pytroll/pytroll-schedule/master)
+[![Build Status](https://travis-ci.org/pytroll/pytroll-schedule.png?branch=main)](https://travis-ci.org/pytroll/pytroll-schedule)
+[![Coverage Status](https://coveralls.io/repos/github/pytroll/pytroll-schedule/badge.svg?branch=main)](https://coveralls.io/github/pytroll/pytroll-schedule?branch=main)
+[![Code Health](https://landscape.io/github/pytroll/pytroll-schedule/main/landscape.png)](https://landscape.io/github/pytroll/pytroll-schedule/main)
[![PyPI version](https://badge.fury.io/py/pytroll-schedule.svg)](https://badge.fury.io/py/pytroll-schedule)
diff --git a/RELEASING.md b/RELEASING.md
index 3512521..592498a 100644
--- a/RELEASING.md
+++ b/RELEASING.md
@@ -1,6 +1,6 @@
# Releasing pytroll-schedule
-1. checkout master
+1. checkout main branch
2. pull from repo
3. run the unittests
4. run `loghub` and update the `CHANGELOG.md` file:
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 0000000..e69de29
diff --git a/docs/about.rst b/docs/about.rst
index c023cc2..2b98657 100644
--- a/docs/about.rst
+++ b/docs/about.rst
@@ -5,49 +5,49 @@ About PyTroll-Schedule
Case of One Receiving Station
-----------------------------
-In the case of a single station, the procedure of scheduling is quite
+In the case of a single station, the procedure of scheduling is quite
straightforward. However, let us describe it in detail here, such that the
-background will be set for the more complex case of multiple reception station
+background will be set for the more complex case of multiple reception station
reception scheduling.
-The first step to compute the schedule, is to know which satellites of interest
-are going to be rising above the horizon during the duration of the schedule.
-In order to find such cases, we retrieve the orbital information for each
-satellite of interest and apply orbit prediction using the aiaa sgp4 algorithm
-(ref). In practice, we use norad tle files (ref) as orbital elements, and the
-python implementation of the sgp4 algorithm provided in pyorbital (ref). From
-this, we then obtain a list of the coming overpasses for the station. We define
-an overpass as a risetime and fall time for a given satellite, during which it
+The first step to compute the schedule, is to know which satellites of interest
+are going to be rising above the horizon during the duration of the schedule.
+In order to find such cases, we retrieve the orbital information for each
+satellite of interest and apply orbit prediction using the aiaa sgp4 algorithm
+(ref). In practice, we use norad tle files (ref) as orbital elements, and the
+python implementation of the sgp4 algorithm provided in pyorbital (ref). From
+this, we then obtain a list of the coming overpasses for the station. We define
+an overpass as a risetime and fall time for a given satellite, during which it
will be within reception reach of the station.
-Now, we have to find the possible schedules for the station. The set of all
-overpasses gives us all the reception possibilities for the station. However,
-many of them will be in conflict with at least one other overpass and will be
-a concurrent to the reception race. We say that two overpasses conflict when
-the risetime dog one of them is comprised within the view time of the second.
-In case of conflicts, the scheduling algorithm has to choose one or the other
-overpass. However, in the case of several overpasses conflicting sequentially,
-we have to find the possible paths through the conflicting zone. In order to do
+Now, we have to find the possible schedules for the station. The set of all
+overpasses gives us all the reception possibilities for the station. However,
+many of them will be in conflict with at least one other overpass and will be
+a concurrent to the reception race. We say that two overpasses conflict when
+the risetime dog one of them is comprised within the view time of the second.
+In case of conflicts, the scheduling algorithm has to choose one or the other
+overpass. However, in the case of several overpasses conflicting sequentially,
+we have to find the possible paths through the conflicting zone. In order to do
that, we will use graph theory algorithms.
-We define the graph of the conflicting zone with overpasses as vertices and
-create an edge between two conflicting overpasses. To find the possible
-non-conflicting combinations in this graph is actually searching for maximal
-cliques in the complementary graph, for which we use the Bron-Kerbosch
+We define the graph of the conflicting zone with overpasses as vertices and
+create an edge between two conflicting overpasses. To find the possible
+non-conflicting combinations in this graph is actually searching for maximal
+cliques in the complementary graph, for which we use the Bron-Kerbosch
algorithm.
#illustration click
we obtain thus groups of passes that are not conflicting in the time frame.
-The next step is to find the optimal list of non conflicting passes under the
+The next step is to find the optimal list of non conflicting passes under the
duration on the schedule.
Cases of Connected Stations
---------------------------
-There are several ways to computate schedules for connected stations, two are
+There are several ways to compute schedules for connected stations, two are
implemented in this program.
Several points should be considered:
-* Technical equipement, reception of L-band, Ku-band, X-band?
+* Technical equipment, reception of L-band, Ku-band, X-band?
* Geographic location, nearby or large distance between?
"Master-Slave" Operation
@@ -58,26 +58,26 @@ other, with similar technical systems.
In this case a schedule for one, namely the "master" station, would be computed,
as if it were only this one station.
-In a second step this schedule plan is used as a substraction list when
+In a second step this schedule plan is used as a subtraction list when
computing the schedule for the second, the "slave" station.
Co-operating Stations
*********************
A mode of co-operating stations can consider the distance between different
-geographical locations and differences in technical equipement, most notable
+geographical locations and differences in technical equipment, most notable
different reception capabilities (X- & L-band vs. L-band).
-In this case, each station defines a time span requirement for each pass. Then,
-if a connected station can fulfil this requirement and is scheduling the same
-pass, we can say that the stations are redundant. To avoid such redundancy, we
-can define ways to synchronise the schedule to optimise the intake of data and
+In this case, each station defines a time span requirement for each pass. Then,
+if a connected station can fulfil this requirement and is scheduling the same
+pass, we can say that the stations are redundant. To avoid such redundancy, we
+can define ways to synchronise the schedule to optimise the intake of data and
fulfil the pareto condition.
-A simple protocol can be used to perform this: both A and B provide alternatives
+A simple protocol can be used to perform this: both A and B provide alternatives
and compute the enhanced score for the schedule including the others pass.
-B can delegate the pass only if it can assure that the time span requirement of
+B can delegate the pass only if it can assure that the time span requirement of
A is respected.
-This operation can be extended to more than two stations, all receiving a
+This operation can be extended to more than two stations, all receiving a
single-operation schedule and an individual cooperating-schedule.
diff --git a/docs/config.rst b/docs/config.rst
index 6695dbd..0c03a70 100644
--- a/docs/config.rst
+++ b/docs/config.rst
@@ -153,7 +153,7 @@ Stations
described below. If no sub-keys are given, the scores from the section
``satellites`` are used.
- Alternativly the satellites can be listed as a list, as shown in the
+ Alternatively the satellites can be listed as a list, as shown in the
following example for station "nrk". In this case all names refer to the
section ``satellites``.
diff --git a/docs/usage.rst b/docs/usage.rst
index 5854522..2d4b6a4 100644
--- a/docs/usage.rst
+++ b/docs/usage.rst
@@ -1,6 +1,10 @@
Usage
=====
+To run the schedule script, it is now compulsory to provide a configuration file
+(see the config section on how these are formed). Command line arguments
+override what is provided in the configuration file.
+
Usage of the schedule script::
usage: schedule [-h] [-c CONFIG] [-t TLE] [-l LOG] [-m [MAIL [MAIL ...]]] [-v]
@@ -8,7 +12,7 @@ Usage of the schedule script::
[-s START_TIME] [-d DELAY] [-a AVOID] [--no-aqua-terra-dump]
[--multiproc] [-o OUTPUT_DIR] [-u OUTPUT_URL] [-x] [-r]
[--scisys] [-p] [-g]
-
+
optional arguments:
-h, --help show this help message and exit
-c CONFIG, --config CONFIG
@@ -18,10 +22,10 @@ Usage of the schedule script::
-m [MAIL [MAIL ...]], --mail [MAIL [MAIL ...]]
mail address(es) to send error messages to.
-v, --verbose print debug messages too
-
+
start-parameter:
(or set values in the configuration file)
-
+
--lat LAT Latitude, degrees north
--lon LON Longitude, degrees east
--alt ALT Altitude, km
@@ -32,18 +36,18 @@ Usage of the schedule script::
-d DELAY, --delay DELAY
delay (in seconds) needed between two consecutive
passes (60 seconds by default)
-
+
special:
(additional parameter changing behaviour)
-
+
-a AVOID, --avoid AVOID
xml request file with passes to avoid
--no-aqua-terra-dump do not consider Aqua/Terra-dumps
--multiproc use multiple parallel processes
-
+
output:
(file pattern are taken from configuration file)
-
+
-o OUTPUT_DIR, --output-dir OUTPUT_DIR
where to put generated files
-u OUTPUT_URL, --output-url OUTPUT_URL
diff --git a/generate_schedule_xmlpage.py b/generate_schedule_xmlpage.py
index cd8f1db..59e88af 100644
--- a/generate_schedule_xmlpage.py
+++ b/generate_schedule_xmlpage.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2016, 2018 Adam.Dybbroe
+# Copyright (c) 2016, 2018, 2019 Adam.Dybbroe
# Author(s):
@@ -20,38 +20,34 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""From a schedule request xml file generate png's with swath coverage outline
-and an xml page for visualisation. It uses posttroll to listen for incoming
-schedule request xml files and then triggers the png and xml output generation.
+"""From a schedule request xml file generate png's with swath coverage outline and an xml page for visualisation.
+It uses posttroll to listen for incoming schedule request xml files and then triggers the png and xml output generation.
"""
-import os
-from six.moves.configparser import RawConfigParser
import logging
+import os.path
import sys
-try:
- from urlparse import urlparse
-except ImportError:
- from urllib.parse import urlparse
+from datetime import datetime
+
+import defusedxml.ElementTree as ET
import posttroll.subscriber
from posttroll.publisher import Publish
-import xml.etree.ElementTree as ET
-from datetime import datetime
-import os.path
+from six.moves.configparser import RawConfigParser
+from six.moves.urllib.parse import urlparse
-from trollsched.satpass import Pass
+from trollsched import INSTRUMENT, SATELLITE_NAMES
from trollsched.drawing import save_fig
-from trollsched import (SATELLITE_NAMES, INSTRUMENT)
+from trollsched.satpass import Pass
LOG = logging.getLogger(__name__)
-CFG_DIR = os.environ.get('PYTROLL_SCHEDULE_CONFIG_DIR', './')
+CFG_DIR = os.environ.get("PYTROLL_SCHEDULE_CONFIG_DIR", "./")
CONF = RawConfigParser()
CFG_FILE = os.path.join(CFG_DIR, "pytroll_schedule_config.cfg")
LOG.debug("Config file = " + str(CFG_FILE))
if not os.path.exists(CFG_FILE):
- raise IOError('Config file %s does not exist!' % CFG_FILE)
+ raise IOError("Config file %s does not exist!" % CFG_FILE)
CONF.read(CFG_FILE)
OPTIONS = {}
@@ -60,85 +56,85 @@
#: Default time format
-_DEFAULT_TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
+_DEFAULT_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
#: Default log format
-_DEFAULT_LOG_FORMAT = '[%(levelname)s: %(asctime)s : %(name)s] %(message)s'
+_DEFAULT_LOG_FORMAT = "[%(levelname)s: %(asctime)s : %(name)s] %(message)s"
def process_xmlrequest(filename, plotdir, output_file, excluded_satellites):
-
+ """Process the xml request."""
tree = ET.parse(filename)
root = tree.getroot()
for child in root:
- if child.tag == 'pass':
+ if child.tag == "pass":
LOG.debug("Pass: %s", str(child.attrib))
- platform_name = SATELLITE_NAMES.get(child.attrib['satellite'], child.attrib['satellite'])
+ platform_name = SATELLITE_NAMES.get(child.attrib["satellite"], child.attrib["satellite"])
instrument = INSTRUMENT.get(platform_name)
if not instrument:
- LOG.error('Instrument unknown! Platform = %s', platform_name)
+ LOG.error("Instrument unknown! Platform = %s", platform_name)
continue
if platform_name in excluded_satellites:
- LOG.debug('Platform name excluded: %s', platform_name)
+ LOG.debug("Platform name excluded: %s", platform_name)
continue
try:
overpass = Pass(platform_name,
- datetime.strptime(child.attrib['start-time'],
- '%Y-%m-%d-%H:%M:%S'),
- datetime.strptime(child.attrib['end-time'],
- '%Y-%m-%d-%H:%M:%S'),
+ datetime.strptime(child.attrib["start-time"],
+ "%Y-%m-%d-%H:%M:%S"),
+ datetime.strptime(child.attrib["end-time"],
+ "%Y-%m-%d-%H:%M:%S"),
instrument=instrument)
except KeyError as err:
- LOG.warning('Failed on satellite %s: %s', platform_name, str(err))
+ LOG.warning("Failed on satellite %s: %s", platform_name, str(err))
continue
save_fig(overpass, directory=plotdir)
- child.set('img', overpass.fig)
- child.set('rec', 'True')
+ child.set("img", overpass.fig)
+ child.set("rec", "True")
LOG.debug("Plot saved - plotdir = %s, platform_name = %s", plotdir, platform_name)
- tree.write(output_file, encoding='utf-8', xml_declaration=True)
+ tree.write(output_file, encoding="utf-8", xml_declaration=True)
with open(output_file) as fpt:
lines = fpt.readlines()
lines.insert(
1, "")
- with open(output_file, 'w') as fpt:
+ with open(output_file, "w") as fpt:
fpt.writelines(lines)
def start_plotting(jobreg, message, **kwargs):
- """Read the xmlschedule request file and make the png images of swath outlines
- and generate the output xml file for web publication
+ """Make a web-publishable version of the xml schedule.
+ Read the xmlschedule request file and make the png images of swath outlines
+ and generate the output xml file for web publication.
"""
- excluded_satellites = kwargs.get('excluded_satellites', [])
+ excluded_satellites = kwargs.get("excluded_satellites", [])
LOG.info("")
LOG.info("job-registry dict: " + str(jobreg))
LOG.info("\tMessage:")
LOG.info(message)
- urlobj = urlparse(message.data['uri'])
+ urlobj = urlparse(message.data["uri"])
# path, fname = os.path.split(urlobj.path)
process_xmlrequest(urlobj.path,
- OPTIONS['path_plots'], OPTIONS['xmlfilepath'],
+ OPTIONS["path_plots"], OPTIONS["xmlfilepath"],
excluded_satellites)
return jobreg
def schedule_page_generator(excluded_satellite_list=None):
- """Listens and triggers processing"""
-
+ """Listens and triggers processing."""
LOG.info(
"*** Start the generation of the schedule xml page with swath outline plots")
- with posttroll.subscriber.Subscribe('', [OPTIONS['posttroll_topic'], ],
+ with posttroll.subscriber.Subscribe("", [OPTIONS["posttroll_topic"], ],
True) as subscr:
- with Publish('schedule_page_generator', 0) as publisher:
+ with Publish("schedule_page_generator", 0) as publisher:
job_registry = {}
for msg in subscr.recv():
job_registry = start_plotting(
@@ -155,7 +151,7 @@ def schedule_page_generator(excluded_satellite_list=None):
import argparse
parser = argparse.ArgumentParser()
- parser.add_argument("-x", "--excluded_satellites", nargs='*',
+ parser.add_argument("-x", "--excluded_satellites", nargs="*",
help="List of platform names to exclude",
default=[])
opts = parser.parse_args()
@@ -168,11 +164,11 @@ def schedule_page_generator(excluded_satellite_list=None):
formatter = logging.Formatter(fmt=_DEFAULT_LOG_FORMAT,
datefmt=_DEFAULT_TIME_FORMAT)
handler.setFormatter(formatter)
- logging.getLogger('').addHandler(handler)
- logging.getLogger('').setLevel(logging.DEBUG)
- logging.getLogger('posttroll').setLevel(logging.INFO)
+ logging.getLogger("").addHandler(handler)
+ logging.getLogger("").setLevel(logging.DEBUG)
+ logging.getLogger("posttroll").setLevel(logging.INFO)
- LOG = logging.getLogger('schedule_page_generator')
+ LOG = logging.getLogger("schedule_page_generator")
LOG.info("Exclude the following satellite platforms: %s", str(no_sats))
schedule_page_generator(no_sats)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..16cf2f1
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,10 @@
+[tool.ruff]
+select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID"]
+ignore = ["B905"] # only available from python 3.10
+line-length = 120
+
+[tool.ruff.per-file-ignores]
+"trollsched/tests/*" = ["S101"]
+
+[tool.ruff.pydocstyle]
+convention = "google"
diff --git a/setup.cfg b/setup.cfg
index fd74800..0bc6805 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,5 +1,5 @@
[bdist_rpm]
-requires=numpy pyresample pyorbital
+requires=numpy pyresample pyorbital pyyaml
release=1
# See the docstring in versioneer.py for instructions. Note that you must
@@ -10,7 +10,7 @@ release=1
VCS = git
style = pep440
versionfile_source = trollsched/version.py
-versionfile_build =
+versionfile_build =
tag_prefix = v
parentdir_prefix =
@@ -19,8 +19,11 @@ universal=1
[flake8]
max-line-length = 120
+exclude =
+ trollsched/version.py
+ versioneer.py
[coverage:run]
-omit =
+omit =
trollsched/version.py
versioneer.py
diff --git a/setup.py b/setup.py
index a08755b..ee4e128 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2014 - 2018 PyTroll Community
+# Copyright (c) 2014 - 2019 PyTroll Community
# Author(s):
@@ -21,27 +21,21 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""
-"""
-# workaround python bug: http://bugs.python.org/issue15881#msg170215
-import multiprocessing
+"""The setup file."""
+
from setuptools import setup
-import sys
-import versioneer
-requires = ['numpy', 'pyresample', 'pyorbital']
-test_requires = ['satpy']
+import versioneer
-if sys.version_info < (2, 7):
- # multiprocessing is not in the standard library
- requires.append('argparse')
+requires = ["numpy", "pyresample", "pyorbital", "pyyaml", "defusedxml"]
+test_requires = []
-setup(name='pytroll-schedule',
+setup(name="pytroll-schedule",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
- description='Scheduling satellite passes in Python',
- author='Martin Raspaud',
- author_email='martin.raspaud@smhi.se',
+ description="Scheduling satellite passes in Python",
+ author="Martin Raspaud",
+ author_email="martin.raspaud@smhi.se",
classifiers=["Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU General Public License v3 " +
@@ -50,12 +44,12 @@
"Programming Language :: Python",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Astronomy"],
- test_suite='trollsched.tests.suite',
+ test_suite="trollsched.tests.suite",
entry_points={
- 'console_scripts': ['schedule = trollsched.schedule:run',
- 'compare_scheds = trollsched.compare:run']},
- scripts=['generate_schedule_xmlpage.py'],
- packages=['trollsched'],
+ "console_scripts": ["schedule = trollsched.schedule:run",
+ "compare_scheds = trollsched.compare:run"]},
+ scripts=["generate_schedule_xmlpage.py"],
+ packages=["trollsched"],
tests_require=test_requires,
install_requires=requires,
zip_safe=False,
diff --git a/trollsched/__init__.py b/trollsched/__init__.py
index 8d5374c..fc9fd37 100644
--- a/trollsched/__init__.py
+++ b/trollsched/__init__.py
@@ -1,11 +1,12 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2014, 2018 PyTroll Community
+# Copyright (c) 2014 - 2019 PyTroll Community
# Author(s):
# Martin Raspaud
+# Adam Dybbroe
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -20,12 +21,11 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Package file.
-"""
+"""Package file."""
-from .version import get_versions
-__version__ = get_versions()['version']
-del get_versions
+from . import version
+
+__version__ = version.get_versions()['version']
# shortest allowed pass in minutes
@@ -38,8 +38,12 @@
'avhrr': 2048,
'mhs': 90,
'amsua': 30,
+ 'mwhs2': 98,
+ 'atms': 96,
'ascat': 42,
- 'viirs': 6400
+ 'viirs': 6400,
+ 'atms': 96,
+ 'mwhs-2': 98
}
SATELLITE_NAMES = {'npp': 'Suomi NPP',
@@ -52,7 +56,8 @@
'metopb': 'Metop-B',
'metopa': 'Metop-A',
'noaa20': 'NOAA-20',
- 'fengyun3d': 'FY-3D'
+ 'fengyun3d': 'FY-3D',
+ 'fengyun3c': 'FY-3C'
}
INSTRUMENT = {'Suomi NPP': 'viirs',
@@ -65,4 +70,8 @@
'Metop-A': 'avhrr',
'Metop-B': 'avhrr',
'Metop-C': 'avhrr',
- 'FY-3D': 'avhrr'}
+ 'FY-3D': 'avhrr',
+ 'FY-3C': 'avhrr'}
+
+from . import version
+__version__ = version.get_versions()['version']
diff --git a/trollsched/boundary.py b/trollsched/boundary.py
index 0aae5ec..d729a63 100644
--- a/trollsched/boundary.py
+++ b/trollsched/boundary.py
@@ -21,38 +21,48 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""The Boundary classes.
-"""
+"""The SwathBoundary class."""
import logging
import logging.handlers
import numpy as np
-
-from pyresample.boundary import Boundary
from pyorbital import geoloc, geoloc_instrument_definitions
+from pyresample.boundary import Boundary
logger = logging.getLogger(__name__)
-INSTRUMENT = {'avhrr/3': 'avhrr',
- 'avhrr/2': 'avhrr',
- 'avhrr-3': 'avhrr'}
+INSTRUMENT = {"avhrr/3": "avhrr",
+ "avhrr/2": "avhrr",
+ "avhrr-3": "avhrr",
+ "mwhs-2": "mwhs2"}
class SwathBoundary(Boundary):
-
- """Boundaries for satellite overpasses.
- """
+ """Boundaries for satellite overpasses."""
def get_instrument_points(self, overpass, utctime,
scans_nb, scanpoints, scan_step=1):
- """Get the boundary points for a given overpass.
- """
+ """Get the boundary points for a given overpass."""
+ instrument, scan_angle = self.get_instrument_and_angle(overpass)
+
+ sgeom = self.create_instrument_geometry(instrument, scans_nb, scanpoints, scan_step, scan_angle)
+
+ times = sgeom.times(utctime)
+
+ pixel_pos = geoloc.compute_pixels((self.orb.tle._line1,
+ self.orb.tle._line2),
+ sgeom, times)
+ lons, lats, alts = geoloc.get_lonlatalt(pixel_pos, times)
+
+ del alts
+ return (lons.reshape(-1, len(scanpoints)),
+ lats.reshape(-1, len(scanpoints)))
+
+ def get_instrument_and_angle(self, overpass):
+ """Get the instrument and angle for an overpass."""
instrument = overpass.instrument
- # logger.debug("Instrument: %s", str(instrument))
- # cheating at the moment.
- # scan_angle = 55.37
if instrument == "modis":
scan_angle = 55.0
instrument = "avhrr"
@@ -65,12 +75,18 @@ def get_instrument_points(self, overpass, utctime,
elif overpass.satellite == "noaa 16":
scan_angle = 55.25
instrument = "avhrr"
- elif instrument == "mersi2":
+ elif instrument.startswith("mersi"):
scan_angle = 55.4
instrument = "avhrr"
+ elif overpass.satellite.name.startswith("aws"):
+ scan_angle = 55.25
+ instrument = "avhrr"
else:
scan_angle = 55.25
+ return instrument, scan_angle
+ def create_instrument_geometry(self, instrument, scans_nb, scanpoints, scan_step, scan_angle):
+ """Create an instrument geometry object."""
instrument_fun = getattr(geoloc_instrument_definitions,
INSTRUMENT.get(instrument, instrument))
@@ -78,29 +94,31 @@ def get_instrument_points(self, overpass, utctime,
sgeom = instrument_fun(scans_nb, scanpoints, scan_angle=scan_angle, frequency=100)
elif instrument in ["ascat", ]:
sgeom = instrument_fun(scans_nb, scanpoints)
+ elif instrument in ["amsua", "mhs"]:
+ sgeom = instrument_fun(scans_nb, scanpoints)
+ elif instrument in ["mwhs2", ]:
+ sgeom = instrument_fun(scans_nb, scanpoints)
elif instrument in ["olci", ]:
sgeom = instrument_fun(scans_nb, scanpoints)
- elif instrument == 'viirs':
+ elif instrument == "viirs":
sgeom = instrument_fun(scans_nb, scanpoints, scan_step=scan_step)
+ elif instrument in ["mhs", "atms", "mwhs-2"]:
+ sgeom = instrument_fun(scans_nb, scanpoints)
else:
logger.warning("Instrument not tested: %s", instrument)
sgeom = instrument_fun(scans_nb)
-
- times = sgeom.times(utctime)
-
- pixel_pos = geoloc.compute_pixels((self.orb.tle._line1,
- self.orb.tle._line2),
- sgeom, times)
- lons, lats, alts = geoloc.get_lonlatalt(pixel_pos, times)
-
- del alts
- return (lons.reshape(-1, len(scanpoints)),
- lats.reshape(-1, len(scanpoints)))
+ return sgeom
def __init__(self, overpass, scan_step=50, frequency=200):
- # compute area covered by pass
+ """Initialize the boundary.
- Boundary.__init__(self)
+ Arguments:
+ overpass: the overpass to use
+ scan_step: how many scans we should skip for a smaller boundary
+ frequency: how much to decimate the top and bottom rows of the boundary.
+ """
+ # compute area covered by pass
+ super().__init__()
self.overpass = overpass
self.orb = overpass.orb
@@ -111,43 +129,28 @@ def __init__(self, overpass, scan_step=50, frequency=200):
(overpass.falltime - overpass.risetime).microseconds / 1000000.0)
logger.debug("Instrument = %s", self.overpass.instrument)
- if self.overpass.instrument == 'viirs':
- sec_scan_duration = 1.779166667
- along_scan_reduce_factor = 1
- elif self.overpass.instrument.startswith("avhrr"):
- sec_scan_duration = 1./6.
- along_scan_reduce_factor = 0.1
- elif self.overpass.instrument == 'ascat':
- sec_scan_duration = 3.74747474747
- along_scan_reduce_factor = 1
- # Overwrite the scan step
- scan_step = 1
- else:
- # Assume AVHRR!
- logmsg = ("Instrument scan duration not known. Setting it to AVHRR. Instrument: ")
- logger.info(logmsg + "%s", str(self.overpass.instrument))
- sec_scan_duration = 1./6.
- along_scan_reduce_factor = 0.1
+ scan_step, sec_scan_duration, along_scan_reduce_factor = self.get_steps_and_duration(scan_step)
# From pass length in seconds and the seconds for one scan derive the number of scans in the swath:
- scans_nb = scanlength_seconds/sec_scan_duration * along_scan_reduce_factor
+ scans_nb = scanlength_seconds / sec_scan_duration * along_scan_reduce_factor
# Devide by the scan step to a reduced number of scans:
- scans_nb = np.floor(scans_nb/scan_step)
+ scans_nb = np.floor(scans_nb / scan_step)
scans_nb = int(max(scans_nb, 1))
sides_lons, sides_lats = self.get_instrument_points(self.overpass,
overpass.risetime,
scans_nb,
- np.array([0, self.overpass.number_of_fovs-1]),
+ np.array([0, self.overpass.number_of_fovs - 1]),
scan_step=scan_step)
side_shape = sides_lons[::-1, 0].shape[0]
nmod = 1
+
if side_shape != scans_nb:
nmod = side_shape // scans_nb
- logger.debug('Number of scan lines (%d) does not match number of scans (%d)',
+ logger.debug("Number of scan lines (%d) does not match number of scans (%d)",
side_shape, scans_nb)
- logger.info('Take every %d th element on the sides...', nmod)
+ logger.info("Take every %d th element on the sides...", nmod)
self.left_lons = sides_lons[::-1, 0][::nmod]
self.left_lats = sides_lats[::-1, 0][::nmod]
@@ -163,7 +166,7 @@ def __init__(self, overpass, scan_step=50, frequency=200):
else:
start_idx = 0
- reduced = np.hstack([0, mid_range[start_idx::], maxval - 1]).astype('int')
+ reduced = np.hstack([0, mid_range[start_idx::], maxval - 1]).astype("int")
lons, lats = self.get_instrument_points(self.overpass,
overpass.falltime,
@@ -182,26 +185,80 @@ def __init__(self, overpass, scan_step=50, frequency=200):
self.top_lons = lons[0]
self.top_lats = lats[0]
+ return
+
+ def get_steps_and_duration(self, scan_step):
+ """Get the steps and duration for the instrument."""
+ if self.overpass.instrument == "viirs":
+ sec_scan_duration = 1.779166667
+ along_scan_reduce_factor = 1
+ elif self.overpass.instrument.startswith("avhrr"):
+ sec_scan_duration = 1. / 6.
+ along_scan_reduce_factor = 0.1
+ elif self.overpass.instrument == "ascat":
+ sec_scan_duration = 3.74747474747
+ along_scan_reduce_factor = 1
+ # Overwrite the scan step
+ scan_step = 1
+ elif self.overpass.instrument == "amsua":
+ sec_scan_duration = 8.
+ along_scan_reduce_factor = 1
+ # Overwrite the scan step
+ scan_step = 1
+ elif self.overpass.instrument == "mhs":
+ sec_scan_duration = 8./3.
+ along_scan_reduce_factor = 1
+ # Overwrite the scan step
+ scan_step = 1
+ elif self.overpass.instrument == "mwhs2":
+ sec_scan_duration = 8./3.
+ along_scan_reduce_factor = 1
+ # Overwrite the scan step
+ scan_step = 1
+ elif self.overpass.instrument == "olci":
+ # 3 minutes of data is 4091 300meter lines:
+ sec_scan_duration = 0.04399902224395014
+ along_scan_reduce_factor = 1
+ # Overwrite the scan step
+ scan_step = 100
+ elif self.overpass.instrument == "atms":
+ sec_scan_duration = 8/3.
+ along_scan_reduce_factor = 1
+ # Overwrite the scan step
+ scan_step = 1
+
+ else:
+ # Assume AVHRR!
+ logmsg = ("Instrument scan duration not known. Setting it to AVHRR. Instrument: ")
+ logger.info(logmsg + "%s", str(self.overpass.instrument))
+ sec_scan_duration = 1. / 6.
+ along_scan_reduce_factor = 0.1
+ return scan_step, sec_scan_duration, along_scan_reduce_factor
+
def decimate(self, ratio):
- l = len(self.top_lons)
- start = (l % ratio) / 2
- points = np.concatenate(([0], np.arange(start, l, ratio), [l - 1]))
+ """Remove points from the boundary."""
+ length = len(self.top_lons)
+ start = (length % ratio) / 2
+ points = np.concatenate(([0], np.arange(start, length, ratio), [length - 1]))
self.top_lons = self.top_lons[points]
self.top_lats = self.top_lats[points]
self.bottom_lons = self.bottom_lons[points]
self.bottom_lats = self.bottom_lats[points]
- l = len(self.right_lons)
- start = (l % ratio) / 2
- points = np.concatenate(([0], np.arange(start, l, ratio), [l - 1]))
+ length = len(self.right_lons)
+ start = (length % ratio) / 2
+ points = np.concatenate(([0], np.arange(start, length, ratio), [length - 1]))
self.right_lons = self.right_lons[points]
self.right_lats = self.right_lats[points]
self.left_lons = self.left_lons[points]
self.left_lats = self.left_lats[points]
+ return
+
def contour(self):
+ """Get the contour lon/lats."""
lons = np.concatenate((self.top_lons,
self.right_lons[1:-1],
self.bottom_lons,
diff --git a/trollsched/combine.py b/trollsched/combine.py
index d9c913e..666f76d 100644
--- a/trollsched/combine.py
+++ b/trollsched/combine.py
@@ -114,9 +114,10 @@ def count_neq_passes(pl):
wl.append(0)
else:
wl.append(n[1] or grl[s].weight(pl[s].index(p[0]) + 1, pl[s].index(n[0]) + 1))
- except:
+ except Exception:
logger.error(
- "Collecting weights: stat %d - parnode %s %s - newnode %s %s", s, parnode, p, newnode, n, exc_info=1)
+ "Collecting weights: stat %d - parnode %s %s - newnode %s %s",
+ s, parnode, p, newnode, n, exc_info=1)
raise
# Apply vertix-count to the sum of collected weights.
# vertix-count: number of vertices with reference to same
@@ -198,7 +199,7 @@ def overlap_any(this, test_list):
# current passes node.
try:
gn = g.neighbours(passes_list[statnr].index(p[0]) + 1)
- except:
+ except Exception:
print("len(passes_list)", len(passes_list), " len(graph_set)",
len(graph_set), " statnr", statnr, " p", p)
print("passes_list", passes_list)
@@ -281,7 +282,7 @@ def overlap_any(this, test_list):
else:
print("uh-oh, something curious happened ...")
- except:
+ except Exception:
print("\nCATCH\ngn:", gn, "-> n", n, " col:", col,
"-> cx", cx, "statnr", statnr, "statnr+i", statnr + 1)
print("len(passes_list -n -cx)", len(passes_list[statnr]), len(passes_list[statnr + 1]))
@@ -410,7 +411,8 @@ def main():
# print_matrix(graph[station].adj_matrix, ly=5)
# print_matrix(graph[station].weight_matrix, ly=5, lx=-1)
-# allpasses[station] = get_passes_from_xml_file(os.path.join(opts.report, "acquisition-schedule-report." + station + ".xml"))
+# allpasses[station] = get_passes_from_xml_file(os.path.join(opts.report,
+# "acquisition-schedule-report." + station + ".xml"))
# print len(allpasses[station]),allpasses[station]
# for v in graph[station].neighbours(1):
@@ -435,7 +437,7 @@ def main():
combined_stations(opts, pattern, station_list, graph, allpasses, start_time, start, forward)
- except:
+ except Exception:
logger.exception("Something wrong happened!")
raise
diff --git a/trollsched/compare.py b/trollsched/compare.py
index 53d6716..2fa7914 100644
--- a/trollsched/compare.py
+++ b/trollsched/compare.py
@@ -20,49 +20,47 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Compare the request file and the confirmation file.
-"""
+"""Compare the request file and the confirmation file."""
+import glob
import logging
import logging.handlers
-import sys
import os
-import glob
logger = logging.getLogger(__name__)
+
def xml_compare(x1_, x2_, reporter=None, skiptags=None):
- """Compare xml objects.
- """
+ """Compare xml objects."""
if x1_.tag != x2_.tag:
if reporter:
- reporter('Tags do not match: %s and %s' % (x1_.tag, x2_.tag))
+ reporter("Tags do not match: %s and %s" % (x1_.tag, x2_.tag))
return False
for name, value in x1_.attrib.items():
if x2_.attrib.get(name) != value:
if reporter:
- reporter('Attributes do not match: %s=%r, %s=%r'
+ reporter("Attributes do not match: %s=%r, %s=%r"
% (name, value, name, x2_.attrib.get(name)))
return False
for name in x2_.attrib.keys():
if name not in x1_.attrib:
if reporter:
- reporter('x2_ has an attribute x1_ is missing: %s'
+ reporter("x2_ has an attribute x1_ is missing: %s"
% name)
return False
if not text_compare(x1_.text, x2_.text):
if reporter:
- reporter('text: %r != %r' % (x1_.text, x2_.text))
+ reporter("text: %r != %r" % (x1_.text, x2_.text))
return False
if not text_compare(x1_.tail, x2_.tail):
if reporter:
- reporter('tail: %r != %r' % (x1_.tail, x2_.tail))
+ reporter("tail: %r != %r" % (x1_.tail, x2_.tail))
return False
cl1 = x1_.getchildren()
cl2 = x2_.getchildren()
if len(cl1) != len(cl2):
if reporter:
- reporter('not the same number of passes, %i != %i'
+ reporter("not the same number of passes, %i != %i"
% (len(cl1), len(cl2)))
return False
i = 0
@@ -72,25 +70,24 @@ def xml_compare(x1_, x2_, reporter=None, skiptags=None):
continue
if not xml_compare(c1, c2, reporter=reporter):
if reporter:
- reporter('element %i do not match: %s'
+ reporter("element %i do not match: %s"
% (i, c1.tag))
return False
return True
def text_compare(t1_, t2_):
- """Compare text fields.
- """
+ """Compare text fields."""
if not t1_ and not t2_:
return True
- if t1_ == '*' or t2_ == '*':
+ if t1_ == "*" or t2_ == "*":
return True
- return (t1_ or '').strip() == (t2_ or '').strip()
+ return (t1_ or "").strip() == (t2_ or "").strip()
+
def compare(file1, file2):
- """Compare two xml files, request and confirmation.
- """
- import xml.etree.ElementTree as ET
+ """Compare two xml files, request and confirmation."""
+ import defusedxml.ElementTree as ET
xml1 = ET.parse(file1).getroot()
xml2 = ET.parse(file2).getroot()
if xml_compare(xml1, xml2, logger.error,
@@ -120,6 +117,7 @@ def compare(file1, file2):
# self.process_IN_CLOSE_WRITE(event)
def run():
+ """Run the comparison."""
import argparse
parser = argparse.ArgumentParser()
@@ -139,7 +137,7 @@ def run():
" corresponding confirmation, from the given directory")
parser.add_argument("-c", "--confirmation",
help="directory for the confirmation files")
-
+
opts = parser.parse_args()
if opts.log:
@@ -150,16 +148,15 @@ def run():
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :"
" %(name)s] %(message)s",
- '%Y-%m-%d %H:%M:%S'))
+ "%Y-%m-%d %H:%M:%S"))
if opts.verbose:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
handler.setLevel(loglevel)
- logging.getLogger('').setLevel(loglevel)
- logging.getLogger('').addHandler(handler)
-
+ logging.getLogger("").setLevel(loglevel)
+ logging.getLogger("").addHandler(handler)
if opts.mail:
mhandler = logging.handlers.SMTPHandler("localhost",
@@ -167,7 +164,7 @@ def run():
opts.mail,
"Scheduler confirmation")
mhandler.setLevel(logging.WARNING)
- logging.getLogger('').addHandler(mhandler)
+ logging.getLogger("").addHandler(mhandler)
logger = logging.getLogger("compare")
logger.debug("DEBUG on")
@@ -184,8 +181,6 @@ def run():
# notifier.loop()
-
-
if opts.most_recent:
logger.debug("looking for most recent file in " +
os.path.join(opts.most_recent, "*request*.xml"))
@@ -195,13 +190,13 @@ def run():
reqdir, newfile = os.path.split(newest)
confdir = opts.confirmation or reqdir
confname = os.path.join(confdir,
- newfile[:-15] + "confirmation" + newfile[-8:])
+ newfile[:-15] + "confirmation" + newfile[-8:])
logger.debug("against " + confname)
try:
compare(newest, confname)
except IOError:
- logger.exception("Something went wrong!")
+ logger.exception("Something went wrong!")
-if __name__ == '__main__':
+if __name__ == "__main__":
run()
diff --git a/trollsched/drawing.py b/trollsched/drawing.py
index 0ab663b..8876746 100644
--- a/trollsched/drawing.py
+++ b/trollsched/drawing.py
@@ -1,11 +1,11 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2018 Adam.Dybbroe
+# Copyright (c) 2018 - 2020 Pytroll Community
# Author(s):
-# Adam.Dybbroe
+# Adam.Dybbroe
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -40,6 +40,11 @@
logger.warning("Failed loading Cartopy, will try Basemap instead")
BASEMAP_NOT_CARTOPY = True
+if not BASEMAP_NOT_CARTOPY:
+ import cartopy
+ cartopy.config['pre_existing_data_dir'] = os.environ.get(
+ "CARTOPY_PRE_EXISTING_DATA_DIR", cartopy.config['pre_existing_data_dir'])
+
class MapperBasemap(object):
"""A class to generate nice plots with basemap.
@@ -143,9 +148,19 @@ def save_fig(pass_obj,
overwrite=False,
labels=None,
extension=".png",
- outline='-r'):
+ outline='-r',
+ plot_parameters=None,
+ plot_title=None,
+ poly_color=None):
"""Save the pass as a figure. Filename is automatically generated.
"""
+ poly = poly or []
+ poly_color = poly_color or []
+ if not isinstance(poly, (list, tuple)):
+ poly = [poly]
+ if not isinstance(poly_color, (list, tuple)):
+ poly_color = [poly_color]
+
mpl.use('Agg')
import matplotlib.pyplot as plt
plt.clf()
@@ -156,30 +171,42 @@ def save_fig(pass_obj,
if not os.path.exists(directory):
logger.debug("Create plot dir " + directory)
os.makedirs(directory)
- filename = os.path.join(
- directory,
- (rise + pass_obj.satellite.name.replace(" ", "_") + fall + extension))
-
- pass_obj.fig = filename
- if not overwrite and os.path.exists(filename):
- return filename
- logger.debug("Filename = <%s>", filename)
- with Mapper() as mapper:
+ filename = '{rise}_{satname}_{instrument}_{fall}{extension}'.format(rise=rise,
+ satname=pass_obj.satellite.name.replace(
+ " ", "_"),
+ instrument=pass_obj.instrument.replace(
+ "/", "-"),
+ fall=fall, extension=extension)
+ filepath = os.path.join(directory, filename)
+ pass_obj.fig = filepath
+ if not overwrite and os.path.exists(filepath):
+ return filepath
+
+ logger.debug("Filename = <%s>", filepath)
+ plot_parameters = plot_parameters or {}
+ with Mapper(**plot_parameters) as mapper:
mapper.nightshade(pass_obj.uptime, alpha=0.2)
+ for i, polygon in enumerate(poly):
+ try:
+ col = poly_color[i]
+ except IndexError:
+ col = '-b'
+ draw(polygon, mapper, col)
logger.debug("Draw: outline = <%s>", outline)
draw(pass_obj.boundary.contour_poly, mapper, outline)
- if poly is not None:
- draw(poly, mapper, "-b")
logger.debug("Title = %s", str(pass_obj))
- plt.title(str(pass_obj))
+ if not plot_title:
+ plt.title(str(pass_obj))
+ else:
+ plt.title(plot_title)
for label in labels or []:
plt.figtext(*label[0], **label[1])
logger.debug("Save plot...")
- plt.savefig(filename)
+ plt.savefig(filepath)
logger.debug("Return...")
- return filename
+ return filepath
def show(pass_obj,
diff --git a/trollsched/graph.py b/trollsched/graph.py
index 2e40cbe..c567cce 100644
--- a/trollsched/graph.py
+++ b/trollsched/graph.py
@@ -20,18 +20,20 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Graph manipulation.
-"""
+"""Graph manipulation."""
import numpy as np
-class Graph(object):
+
+class Graph():
+ """A graph class."""
def __init__(self, n_vertices=None, adj_matrix=None):
+ """Set up the graph."""
if n_vertices is not None:
self.order = n_vertices
self.vertices = np.arange(self.order)
- self.adj_matrix = np.zeros((self.order, self.order), np.bool)
- self.weight_matrix = np.zeros((self.order, self.order), np.float)
+ self.adj_matrix = np.zeros((self.order, self.order), bool)
+ self.weight_matrix = np.zeros((self.order, self.order), float)
elif adj_matrix is not None:
self.order = adj_matrix.shape[0]
self.vertices = np.arange(self.order)
@@ -39,26 +41,27 @@ def __init__(self, n_vertices=None, adj_matrix=None):
self.weight_matrix = np.zeros_like(adj_matrix)
def weight(self, u, v):
- """weight of the *u*-*v* edge.
- """
+ """Weight of the *u*-*v* edge."""
return self.weight_matrix[u, v]
def neighbours(self, v):
+ """Find neighbours."""
return self.vertices[self.adj_matrix[v, :] != 0]
def add_edge(self, v1, v2, weight=1):
+ """Add an edge."""
self.weight_matrix[v1, v2] = weight
self.weight_matrix[v2, v1] = weight
self.adj_matrix[v1, v2] = True
self.adj_matrix[v2, v1] = True
def add_arc(self, v1, v2, weight=1):
+ """Add an arc."""
self.adj_matrix[v1, v2] = True
self.weight_matrix[v1, v2] = weight
def bron_kerbosch(self, r, p, x):
- """Get the maximal cliques.
- """
+ """Get the maximal cliques."""
if len(p) == 0 and len(x) == 0:
yield r
for v in p:
@@ -70,7 +73,9 @@ def bron_kerbosch(self, r, p, x):
x = x | set((v, ))
def dag_longest_path(self, v1, v2=None):
- """Give the longest path from *v1* to all other vertices or *v2* if
+ """Find the longest path between v1 and v2.
+
+ Give the longest path from *v1* to all other vertices or *v2* if
specified. Assumes the vertices are sorted topologically and that the
graph is directed and acyclic (DAG).
"""
@@ -80,7 +85,9 @@ def dag_longest_path(self, v1, v2=None):
return dist, path
def dag_shortest_path(self, v1, v2=None):
- """Give the sortest path from *v1* to all other vertices or *v2* if
+ """Find the shortest path between v1 and v2.
+
+ Give the shortest path from *v1* to all other vertices or *v2* if
specified. Assumes the vertices are sorted topologically and that the
graph is directed and acyclic (DAG). *v1* and *v2* are the indices of
the vertices in the vertice list.
@@ -110,11 +117,13 @@ def dag_shortest_path(self, v1, v2=None):
return dists[v2], path
def save(self, filename):
+ """Save a file."""
np.savez_compressed(filename,
adj=self.adj_matrix,
weights=self.weight_matrix)
def load(self, filename):
+ """Load a file."""
stuff = np.load(filename)
self.adj_matrix = stuff["adj"]
self.weight_matrix = stuff["weights"]
@@ -122,10 +131,9 @@ def load(self, filename):
self.vertices = np.arange(self.order)
def export(self, filename="./sched.gv", labels=None):
- """dot sched.gv -Tpdf -otruc.pdf
- """
+ """dot sched.gv -Tpdf -otruc.pdf."""
with open(filename, "w") as fd_:
- fd_.write("digraph schedule { \n size=\"80, 10\";\n center=\"1\";\n")
+ fd_.write('digraph schedule { \n size="80, 10";\n center="1";\n')
for v1 in range(1, self.order - 1):
for v2 in range(1, self.order - 1):
if self.adj_matrix[v1, v2]:
diff --git a/trollsched/helper_functions.py b/trollsched/helper_functions.py
index dd2886d..f63712f 100644
--- a/trollsched/helper_functions.py
+++ b/trollsched/helper_functions.py
@@ -60,12 +60,12 @@ def sun_pos(dt=None):
axial_tilt = 23.4
ref_solstice = datetime(2016, 6, 21, 22, 22)
days_per_year = 365.2425
- seconds_per_day = 24*60*60.0
+ seconds_per_day = 24 * 60 * 60.0
- days_since_ref = (dt - ref_solstice).total_seconds()/seconds_per_day
- lat = axial_tilt*np.cos(2*np.pi*days_since_ref/days_per_year)
+ days_since_ref = (dt - ref_solstice).total_seconds() / seconds_per_day
+ lat = axial_tilt * np.cos(2 * np.pi * days_since_ref / days_per_year)
sec_since_midnight = (dt - datetime(dt.year, dt.month, dt.day)).seconds
- lng = -(sec_since_midnight/seconds_per_day - 0.5)*360
+ lng = -(sec_since_midnight / seconds_per_day - 0.5) * 360
return lat, lng
diff --git a/trollsched/satpass.py b/trollsched/satpass.py
index c735586..858162f 100644
--- a/trollsched/satpass.py
+++ b/trollsched/satpass.py
@@ -20,8 +20,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Satellite passes.
-"""
+"""Satellite passes."""
import ftplib
import glob
@@ -29,39 +28,36 @@
import logging.handlers
import operator
import os
-import six
import socket
+from datetime import datetime, timedelta
from functools import reduce as fctools_reduce
-try:
- from urllib.parse import urlparse
-except ImportError:
- from urlparse import urlparse
+from tempfile import gettempdir, mkstemp
+from urllib.parse import urlparse
-from datetime import datetime, timedelta
-from tempfile import mkstemp
import numpy as np
-
from pyorbital import orbital, tlefile
from pyresample.boundary import AreaDefBoundary
+
+from trollsched import MIN_PASS, NOAA20_NAME, NUMBER_OF_FOVS
from trollsched.boundary import SwathBoundary
-from trollsched import (MIN_PASS, NOAA20_NAME, NUMBER_OF_FOVS)
logger = logging.getLogger(__name__)
-VIIRS_PLATFORM_NAMES = ['SUOMI NPP', 'SNPP',
- 'NOAA-20', 'NOAA 20']
-MERSI2_PLATFORM_NAMES = ['FENGYUN 3D', 'FENGYUN-3D', 'FY-3D',
- 'FENGYUN 3E', 'FENGYUN-3E', 'FY-3E']
+VIIRS_PLATFORM_NAMES = ["SUOMI NPP", "SNPP",
+ "NOAA-20", "NOAA 20"]
+MERSI_PLATFORM_NAMES = ["FENGYUN 3C", "FENGYUN-3C", "FY-3C"]
+MERSI2_PLATFORM_NAMES = ["FENGYUN 3D", "FENGYUN-3D", "FY-3D",
+ "FENGYUN 3E", "FENGYUN-3E", "FY-3E"]
-class SimplePass(object):
- """A pass: satellite, risetime, falltime, (orbital)
- """
+class SimplePass:
+ """A pass: satellite, risetime, falltime, (orbital)."""
buffer = timedelta(minutes=2)
def __init__(self, satellite, risetime, falltime):
- if not hasattr(satellite, 'name'):
+ """Initialize the simple pass."""
+ if not hasattr(satellite, "name"):
from trollsched.schedule import Satellite
self.satellite = Satellite(satellite, 0, 0)
else:
@@ -74,20 +70,25 @@ def __init__(self, satellite, risetime, falltime):
self.fig = None
def __hash__(self):
+ """Hash the pass."""
return super.__hash__(self)
- def overlaps(self, other, delay=timedelta(seconds=0)):
- """Check if two passes overlap in time.
- """
+ def overlaps(self, other, delay=None):
+ """Check if two passes overlap in time."""
+ if delay is None:
+ delay = timedelta(seconds=0)
return ((self.risetime < other.falltime + delay) and (self.falltime + delay > other.risetime))
def __lt__(self, other):
+ """Check if this pass starts earlier than the other one."""
return self.uptime < other.uptime
def __gt__(self, other):
+ """Check if this pass startss later than the other one."""
return self.uptime > other.uptime
def __cmp__(self, other):
+ """Compare two passes."""
if self.uptime < other.uptime:
return -1
if self.uptime > other.uptime:
@@ -105,64 +106,70 @@ def __eq__(self, other):
other, Pass):
return (self.satellite.name == other.satellite.name and
self.orb.get_orbit_number(self.risetime) == other.orb.get_orbit_number(other.risetime))
- tol = timedelta(seconds=1)
- return (other is not None and abs(self.risetime - other.risetime) < tol and
- abs(self.falltime - other.falltime) < tol and
- self.satellite == other.satellite)
+ timedelta(seconds=1)
+ return (other is not None and
+ self.satellite.name == other.satellite.name and
+ self.overlaps(other))
def __str__(self):
+ """Give a string version of the pass."""
return (self.satellite.name + " " + self.risetime.isoformat() + " " +
self.falltime.isoformat())
def __repr__(self):
+ """Represent the pass."""
return str(self)
def duration(self):
- """Get the duration of an overpass.
- """
+ """Get the duration of an overpass."""
return self.falltime - self.risetime
def seconds(self):
- """Get the duration of an overpass.
- """
+ """Get the duration of an overpass."""
duration = self.duration()
return (duration.days * 24 * 60 * 60 + duration.seconds +
duration.microseconds * 1e-6)
class Pass(SimplePass):
- """A pass: satellite, risetime, falltime, (orbital)
- """
+ """A pass: satellite, risetime, falltime, (orbital)."""
def __init__(self, satellite, risetime, falltime, **kwargs):
+ """Initialize the pass."""
SimplePass.__init__(self, satellite, risetime, falltime)
logger.debug("kwargs: %s", str(kwargs))
- orb = kwargs.get('orb', None)
- uptime = kwargs.get('uptime', None)
- instrument = kwargs.get('instrument', None)
- tle1 = kwargs.get('tle1', None)
- tle2 = kwargs.get('tle2', None)
+ orb = kwargs.get("orb", None)
+ uptime = kwargs.get("uptime", None)
+ instrument = kwargs.get("instrument", None)
+ tle1 = kwargs.get("tle1", None)
+ tle2 = kwargs.get("tle2", None)
logger.debug("instrument: %s", str(instrument))
if isinstance(instrument, (list, set)):
- if 'avhrr' in instrument:
- logger.warning("Instrument is a sequence Assume avhrr...")
- instrument = 'avhrr'
- elif 'viirs' in instrument:
+ if "avhrr" in instrument:
+ logger.warning("Instrument is a sequence! Assume avhrr...")
+ instrument = "avhrr"
+ elif "viirs" in instrument:
logger.warning("Instrument is a sequence! Assume viirs...")
- instrument = 'viirs'
- elif 'modis' in instrument:
+ instrument = "viirs"
+ elif "modis" in instrument:
logger.warning("Instrument is a sequence! Assume modis...")
- instrument = 'modis'
+ instrument = "modis"
+ elif "mersi" in instrument:
+ logger.warning("Instrument is a sequence! Assume mersi...")
+ instrument = "mersi"
+ elif "mersi-2" in instrument:
+ logger.warning("Instrument is a sequence! Assume mersi-2...")
+ instrument = "mersi-2"
else:
raise TypeError("Instrument is a sequence! Don't know which one to choose!")
default = NUMBER_OF_FOVS.get(instrument, 2048)
- self.number_of_fovs = kwargs.get('number_of_fovs', default)
+ self.number_of_fovs = kwargs.get("number_of_fovs", default)
# The frequency shouldn't actualy depend on the number of FOVS along a scanline should it!?
# frequency = kwargs.get('frequency', int(self.number_of_fovs / 4))
- frequency = kwargs.get('frequency', 300)
+ frequency = kwargs.get("frequency", 300)
self.station = None
self.max_elev = None
@@ -175,18 +182,19 @@ def __init__(self, satellite, risetime, falltime, **kwargs):
try:
self.orb = orbital.Orbital(satellite, line1=tle1, line2=tle2)
except KeyError as err:
- logger.debug('Failed in PyOrbital: %s', str(err))
+ logger.debug("Failed in PyOrbital: %s", str(err))
self.orb = orbital.Orbital(
NOAA20_NAME.get(satellite, satellite),
line1=tle1,
line2=tle2)
- logger.info('Using satellite name %s instead',
+ logger.info("Using satellite name %s instead",
str(NOAA20_NAME.get(satellite, satellite)))
self._boundary = None
@property
def boundary(self):
+ """Get the boundary of the swath."""
if not self._boundary:
self._boundary = SwathBoundary(self, frequency=self.frequency)
return self._boundary
@@ -196,8 +204,7 @@ def boundary(self, value):
self._boundary = SwathBoundary(self, frequency=self.frequency)
def pass_direction(self):
- """Get the direction of the pass in (ascending, descending).
- """
+ """Get the direction of the pass in (ascending, descending)."""
start_lat = self.orb.get_lonlatalt(self.risetime)[1]
end_lat = self.orb.get_lonlatalt(self.falltime)[1]
@@ -207,8 +214,7 @@ def pass_direction(self):
return "ascending"
def slsearch(self, sublat):
- """Find sublatitude.
- """
+ """Find sublatitude."""
def nadirlat(minutes):
return self.orb.get_lonlatalt(self.risetime + timedelta(
@@ -230,9 +236,7 @@ def get_root(fun, start, end):
return self.risetime + timedelta(minutes=sublat_mins)
def area_coverage(self, area_of_interest):
- """Get the ratio of coverage (between 0 and 1) of the pass with the area
- of interest.
- """
+ """Get the ratio of coverage (between 0 and 1) of the pass with the area of interest."""
try:
area_boundary = area_of_interest.poly
except AttributeError:
@@ -245,8 +249,109 @@ def area_coverage(self, area_of_interest):
return 0
return inter.area() / area_boundary.area()
+ def generate_metno_xml(self, coords, root):
+ """Generate a metno xml schedule."""
+ import xml.etree.ElementTree as ET # noqa because defusedxml has no SubElement
+
+ asimuth_at_max_elevation, max_elevation = self.orb.get_observer_look(self.uptime, *coords)
+ pass_direction = self.pass_direction().capitalize()[:1]
+ # anl = self.orb.get_lonlatalt(self.orb.get_last_an_time(self.risetime))[0] % 360
+ asimuth_at_aos, aos_elevation = self.orb.get_observer_look(self.risetime, *coords)
+ orbit = self.orb.get_orbit_number(self.risetime)
+ # aos_epoch=int((self.risetime-datetime(1970,1,1)).total_seconds())
+ sat_lon, sat_lat, alt = self.orb.get_lonlatalt(self.risetime)
+
+ ovpass = ET.SubElement(root, "pass")
+ ovpass.set("satellite", self.satellite.name)
+ ovpass.set("aos", self.risetime.strftime("%Y%m%d%H%M%S"))
+ ovpass.set("los", self.falltime.strftime("%Y%m%d%H%M%S"))
+ ovpass.set("orbit", "{:d}".format(orbit))
+ ovpass.set("max-elevation", "{:.3f}".format(max_elevation))
+ ovpass.set("asimuth-at-max-elevation", "{:.3f}".format(asimuth_at_max_elevation))
+ ovpass.set("asimuth-at-aos", "{:.3f}".format(asimuth_at_aos))
+ ovpass.set("pass-direction", pass_direction)
+ ovpass.set("satellite-lon-at-aos", "{:.3f}".format(sat_lon))
+ ovpass.set("satellite-lat-at-aos", "{:.3f}".format(sat_lat))
+ ovpass.set("tle-epoch", self.orb.orbit_elements.epoch.astype(datetime).strftime("%Y%m%d%H%M%S.%f"))
+ if self.fig:
+ ovpass.set("figure", self.fig)
+
+ return True
+
+ def print_meos(self, coords, line_no):
+ """No. Date Satellite Orbit Max EL AOS Ovlp LOS Durtn Az(AOS/MAX)."""
+ asimuth_at_max_elevation, max_elevation = self.orb.get_observer_look(self.uptime, *coords)
+ pass_direction = self.pass_direction().capitalize()[:1]
+ # anl = self.orb.get_lonlatalt(self.orb.get_last_an_time(self.risetime))[0] % 360
+ asimuth_at_aos, aos_elevation = self.orb.get_observer_look(self.risetime, *coords)
+ orbit = self.orb.get_orbit_number(self.risetime)
+ aos_epoch = int((self.risetime - datetime(1970, 1, 1)).total_seconds())
+ sat_lon, sat_lat, alt = self.orb.get_lonlatalt(self.risetime)
+
+ dur_secs = (self.falltime - self.risetime).seconds
+ dur_hours, dur_reminder = divmod(dur_secs, 3600)
+ dur_minutes, dur_seconds = divmod(dur_reminder, 60)
+ duration = "{:0>2}:{:0>2}".format(dur_minutes, dur_seconds)
+
+ satellite_meos_translation = {"NOAA 19": "NOAA_19",
+ "NOAA 18": "NOAA_18",
+ "NOAA 15": "NOAA_15",
+ "METOP-A": "M02",
+ "METOP-B": "M01",
+ "FENGYUN 3A": "FENGYUN-3A",
+ "FENGYUN 3B": "FENGYUN-3B",
+ "FENGYUN 3C": "FENGYUN-3C",
+ "SUOMI NPP": "NPP"}
+
+ import hashlib
+
+ pass_key = hashlib.md5(("{:s}|{:d}|{:d}|{:.3f}|{:.3f}". # noqa : md5 is insecure, but not sensitive here.
+ format(satellite_meos_translation.get(self.satellite.name.upper(),
+ self.satellite.name.upper()),
+ int(orbit),
+ aos_epoch,
+ sat_lon,
+ sat_lat)).encode("utf-8")).hexdigest()
+
+ line_list = [" {line_no:>2}",
+ "{date}",
+ "{satellite:<10}",
+ "{orbit:>5}",
+ "{elevation:>6.3f} ",
+ "{risetime}",
+ "{overlap:<5s}",
+ "{falltime}",
+ "{duration}",
+ "{asimuth_at_aos:>5.1f}",
+ "{asimuth_at_max:>5.1f}",
+ "-- Undefined(Scheduling not done {aos_epoch} )",
+ "{passkey}",
+ "{pass_direction}"
+ ]
+
+ line = " ".join(line_list).format(
+ # line_no=line_no,
+ line_no=1,
+ date=self.risetime.strftime("%Y%m%d"),
+ satellite=satellite_meos_translation.get(self.satellite.name.upper(),
+ self.satellite.name.upper()),
+ orbit=orbit,
+ elevation=max_elevation,
+ risetime=self.risetime.strftime("%H:%M:%S"),
+ overlap="n/a",
+ falltime=self.falltime.strftime("%H:%M:%S"),
+ duration=duration,
+ asimuth_at_aos=asimuth_at_aos,
+ asimuth_at_max=asimuth_at_max_elevation,
+ aos_epoch=aos_epoch,
+ passkey=pass_key,
+ pass_direction=pass_direction)
+ return line
+
def print_vcs(self, coords):
- """Should look like this::
+ """Print a vcs/scisys/cgi schedule.
+
+ Should look like this::
# SCName RevNum Risetime Falltime Elev Dura ANL Rec Dir Man Ovl OvlSCName
# OvlRev OvlRisetime OrigRisetime OrigFalltime OrigDuration
@@ -255,7 +360,6 @@ def print_vcs(self, coords):
"""
-
max_elevation = self.orb.get_observer_look(self.uptime, *coords)[1]
anl = self.orb.get_lonlatalt(self.orb.get_last_an_time(
self.risetime))[0] % 360
@@ -300,56 +404,54 @@ def get_aqua_terra_dumps(start_time,
satorb,
sat,
dump_url=None):
- """
- Get the Terra and Aqua overpasses taking into account the fact that when
- there are global dumps there is no direct broadcast
- """
+ """Get the Terra and Aqua overpasses.
+ We take into account the fact that when
+ there are global dumps there is no direct broadcast.
+ """
# Get the list of aqua/terra dump info:
dump_info_list = get_aqua_terra_dumpdata_from_ftp(sat, dump_url)
dumps = []
for elem in dump_info_list:
- if elem['los'] >= start_time and elem['aos'] <= end_time:
- uptime = elem['aos'] + (elem['los'] - elem['aos']) / 2
- overpass = Pass(sat, elem['aos'], elem['los'],
+ if elem["los"] >= start_time and elem["aos"] <= end_time:
+ uptime = elem["aos"] + (elem["los"] - elem["aos"]) / 2
+ overpass = Pass(sat, elem["aos"], elem["los"],
orb=satorb, uptime=uptime, instrument="modis")
- overpass.station = elem['station']
- overpass.max_elev = elem['elev']
+ overpass.station = elem["station"]
+ overpass.max_elev = elem["elev"]
dumps.append(overpass)
return dumps
def get_aqua_terra_dumpdata_from_ftp(sat, dump_url):
- """
- Get the information on the internet on the actual global dumps of Terra and Aqua
- """
-
+ """Get the information on the internet on the actual global dumps of Terra and Aqua."""
logger.info("Fetch %s dump info from internet", str(sat.name))
- if isinstance(dump_url, six.text_type):
+ if isinstance(dump_url, str):
url = urlparse(dump_url % sat.name)
else:
url = urlparse(HOST % sat.name)
logger.debug("Connect to ftp server")
try:
- f = ftplib.FTP(url.netloc)
+ f = ftplib.FTP_TLS(url.netloc)
except (socket.error, socket.gaierror) as e:
- logger.error('cannot reach to %s ' % HOST + str(e))
+ logger.error("cannot reach to %s " % HOST + str(e))
f = None
if f is not None:
try:
- f.login('anonymous', 'guest')
+ f.login("anonymous", "guest")
logger.debug("Logged in")
except ftplib.error_perm:
- logger.error('cannot login anonymously')
+ logger.error("cannot login anonymously")
f.quit()
f = None
if f is not None:
data = []
try:
+ f.prot_p() # explicitly call for protected transfer
f.dir(url.path, data.append)
except socket.error as e:
logger.error("Can't get any data: " + str(e))
@@ -360,7 +462,7 @@ def get_aqua_terra_dumpdata_from_ftp(sat, dump_url):
if f is None:
logger.info("Can't access ftp server, using cached data")
- filenames = glob.glob("/tmp/*.rpt")
+ filenames = glob.glob(os.path.join(gettempdir(), "*.rpt"))
filenames = [
x for x in filenames if x.startswith("wotis.") and x.endswith(".rpt")
@@ -375,19 +477,20 @@ def get_aqua_terra_dumpdata_from_ftp(sat, dump_url):
for date in sorted(dates):
lines = []
- if not os.path.exists(os.path.join("/tmp", filedates[date])):
+ if not os.path.exists(os.path.join(gettempdir(), filedates[date])):
try:
- f.retrlines('RETR ' + os.path.join(url.path, filedates[date]),
+ f.prot_p() # explicitly call for protected transfer
+ f.retrlines("RETR " + os.path.join(url.path, filedates[date]),
lines.append)
except ftplib.error_perm:
logger.info("Permission error (???) on ftp server, skipping.")
continue
- with open(os.path.join("/tmp", filedates[date]), "w") as fd_:
+ with open(os.path.join(gettempdir(), filedates[date]), "w") as fd_:
for line in lines:
fd_.write(line + "\n")
else:
- with open(os.path.join("/tmp", filedates[date]), "r") as fd_:
+ with open(os.path.join(gettempdir(), filedates[date]), "r") as fd_:
for line in fd_:
lines.append(line)
@@ -405,12 +508,12 @@ def get_aqua_terra_dumpdata_from_ftp(sat, dump_url):
# dumps.append(overpass)
for line in lines[7::2]:
- if line.strip() == '':
+ if line.strip() == "":
break
station, aos, elev, los = line.split()[:4]
aos = datetime.strptime(aos, "%Y:%j:%H:%M:%S")
los = datetime.strptime(los, "%Y:%j:%H:%M:%S")
- dumps.append({'station': station, 'aos': aos, 'los': los, 'elev': elev})
+ dumps.append({"station": station, "aos": aos, "los": los, "elev": elev})
if f is not None:
f.quit()
@@ -422,8 +525,12 @@ def get_next_passes(satellites,
forward,
coords,
tle_file=None,
- aqua_terra_dumps=None):
- """Get the next passes for *satellites*, starting at *utctime*, for a
+ aqua_terra_dumps=None,
+ min_pass=MIN_PASS,
+ local_horizon=0):
+ """Get the next passes for *satellites*.
+
+ Get the next passes for *satellites* , starting at *utctime*, for a
duration of *forward* hours, with observer at *coords* ie lon (°E), lat
(°N), altitude (km). Uses *tle_file* if provided, downloads from celestrack
otherwise.
@@ -432,60 +539,62 @@ def get_next_passes(satellites,
"""
passes = {}
- if tle_file is None and 'TLES' not in os.environ:
- fp_, tle_file = mkstemp(prefix="tle", dir="/tmp")
+ if tle_file is None and "TLES" not in os.environ:
+ fp_, tle_file = mkstemp(prefix="tle", dir=gettempdir())
os.close(fp_)
logger.info("Fetch tle info from internet")
tlefile.fetch(tle_file)
- if not os.path.exists(tle_file) and 'TLES' not in os.environ:
+ if not os.path.exists(tle_file) and "TLES" not in os.environ:
logger.info("Fetch tle info from internet")
tlefile.fetch(tle_file)
for sat in satellites:
- if not hasattr(sat, 'name'):
+ if not hasattr(sat, "name"):
from trollsched.schedule import Satellite
sat = Satellite(sat, 0, 0)
satorb = orbital.Orbital(sat.name, tle_file=tle_file)
- passlist = satorb.get_next_passes(utctime, forward, *coords)
-
- if sat.name == "metop-a":
- # Take care of metop-a
+ passlist = satorb.get_next_passes(utctime,
+ forward,
+ *coords,
+ horizon=local_horizon,
+ )
+
+ if sat.name.lower() == "metop-a":
+ # Take care of metop-a special case
passes["metop-a"] = get_metopa_passes(sat, passlist, satorb)
-
- elif sat.name in ["aqua", "terra"] and aqua_terra_dumps:
+ elif sat.name.lower() in ["aqua", "terra"] and aqua_terra_dumps:
# Take care of aqua (dumps in svalbard and poker flat)
# Get the Terra/Aqua passes and fill the passes dict:
get_terra_aqua_passes(passes, utctime, forward, sat, passlist, satorb, aqua_terra_dumps)
-
else:
if sat.name.upper() in VIIRS_PLATFORM_NAMES:
instrument = "viirs"
elif sat.name.lower().startswith("metop") or sat.name.lower().startswith("noaa"):
instrument = "avhrr"
+ elif sat.name.lower() in ["aqua", "terra"]: # when aqua_terra_dumps=False
+ instrument = "modis"
+ elif sat.name.upper() in MERSI_PLATFORM_NAMES:
+ instrument = "mersi"
elif sat.name.upper() in MERSI2_PLATFORM_NAMES:
- instrument = "mersi2"
+ instrument = "mersi-2"
else:
instrument = "unknown"
passes[sat.name] = [
Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument=instrument)
for rtime, ftime, uptime in passlist
- if ftime - rtime > timedelta(minutes=MIN_PASS)
+ if ftime - rtime > timedelta(minutes=min_pass)
]
return set(fctools_reduce(operator.concat, list(passes.values())))
def get_metopa_passes(sat, passlist, satorb):
- """Get the Metop-A passes, taking care that Metop-A doesn't transmit to ground
- everywhere
-
- """
-
+ """Get the Metop-A passes, taking care that Metop-A doesn't transmit to ground everywhere."""
metop_passes = [
- Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument='avhrr')
+ Pass(sat, rtime, ftime, orb=satorb, uptime=uptime, instrument="avhrr")
for rtime, ftime, uptime in passlist if rtime < ftime
]
@@ -504,28 +613,29 @@ def get_metopa_passes(sat, passlist, satorb):
def get_terra_aqua_passes(passes, utctime, forward, sat, passlist, satorb, aqua_terra_dumps):
- """Get the Terra/Aqua passes, taking care that Terra and Aqua do not have
- direct broadcast when there are global dumps
+ """Get the Terra/Aqua passes.
- passes: The dictionary of satellite passes which is being built
+ We take care that Terra and Aqua do not have direct broadcast when there are global dumps.
- utctime: The start time (datetime object)
+ Args:
+ passes: The dictionary of satellite passes which is being built
- forward: The number of hours ahead for which we will get the coming passes
+ utctime: The start time (datetime object)
- sat: The Satellite platform considered
+ forward: The number of hours ahead for which we will get the coming passes
- passlist: List of Pass objects
+ sat: The Satellite platform considered
- satorb: Orbital instance for the actual satellite and tles considered
+ passlist: List of Pass objects
- aqua_terra_dumps: True or False or the actual URL to get info on Terra/Aqua
- dumps. If True, the default URL will be used. If False or None, no dump
- info will be considered.
+ satorb: Orbital instance for the actual satellite and tles considered
- """
+ aqua_terra_dumps: True or False or the actual URL to get info on Terra/Aqua
+ dumps. If True, the default URL will be used. If False or None, no dump
+ info will be considered.
- instrument = 'modis'
+ """
+ instrument = "modis"
wpcoords = (-75.457222, 37.938611, 0)
passlist_wp = satorb.get_next_passes(
diff --git a/trollsched/schedule.py b/trollsched/schedule.py
index 42bcbe0..3924bec 100644
--- a/trollsched/schedule.py
+++ b/trollsched/schedule.py
@@ -1,6 +1,3 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
# Copyright (c) 2013 - 2019 PyTroll
# Author(s):
@@ -21,42 +18,42 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Scheduling
-"""
+"""Module and script for pass scheduling."""
+import argparse
import logging
import logging.handlers
import os
-try:
- from urllib.parse import urlparse
-except ImportError:
- from urlparse import urlparse
-
from datetime import datetime, timedelta
from pprint import pformat
+from urllib.parse import urlparse
import numpy as np
from pyorbital import astronomy
-from pyresample import utils as resample_utils
-from trollsched import utils
-from trollsched.spherical import get_twilight_poly
-from trollsched.graph import Graph
-from trollsched.satpass import get_next_passes, SimplePass
-from pyresample.boundary import AreaDefBoundary
-from trollsched.combine import get_combined_sched
+from trollsched.writers import generate_meos_file, generate_metno_xml_file, generate_sch_file, generate_xml_file
+
+try:
+ from pyresample import parse_area_file
+except ImportError:
+ # Older versions of pyresample:
+ from pyresample.utils import parse_area_file
-logger = logging.getLogger(__name__)
-# name/id for centre/org creating schedules
-CENTER_ID = "SMHI"
+from trollsched import MIN_PASS, utils
+from trollsched.combine import get_combined_sched
+from trollsched.graph import Graph
+from trollsched.satpass import SimplePass, get_next_passes
+from trollsched.spherical import get_twilight_poly
+logger = logging.getLogger(__name__)
-class Station(object):
+class Station:
"""docstring for Station."""
- def __init__(self, station_id, name, longitude, latitude, altitude, area, satellites, area_file=None):
- super(Station, self).__init__()
+ def __init__(self, station_id, name, longitude, latitude, altitude, area, satellites, area_file=None,
+ min_pass=MIN_PASS, local_horizon=0):
+ """Initialize the station."""
self.id = station_id
self.name = name
self.longitude = longitude
@@ -67,17 +64,19 @@ def __init__(self, station_id, name, longitude, latitude, altitude, area, satell
if area_file is not None:
try:
- self.area = resample_utils.parse_area_file(area_file, area)[0]
+ self.area = parse_area_file(area_file, area)[0]
except TypeError:
pass
+ self.min_pass = min_pass
+ self.local_horizon = local_horizon
@property
def coords(self):
+ """Get the coordinates lon, lat, alt."""
return self.longitude, self.latitude, self.altitude
def single_station(self, sched, start_time, tle_file):
"""Calculate passes, graph, and schedule for one station."""
-
logger.debug("station: %s coords: %s area: %s scores: %s",
self.id, self.coords, self.area.area_id, self.satellites)
@@ -90,23 +89,13 @@ def single_station(self, sched, start_time, tle_file):
"time": start_time.strftime("%H%M%S")
}
if opts.xml:
- pattern_args['mode'] = "request"
+ pattern_args["mode"] = "request"
elif opts.report:
- pattern_args['mode'] = "report"
-
- logger.info("Computing next satellite passes")
- allpasses = get_next_passes(self.satellites, start_time,
- sched.forward,
- self.coords, tle_file,
- aqua_terra_dumps=(sched.dump_url or True
- if opts.no_aqua_terra_dump
- else None)
- )
- logger.info("Computation of next overpasses done")
+ pattern_args["mode"] = "report"
- logger.debug(str(sorted(allpasses, key=lambda x: x.risetime)))
+ allpasses = self.get_next_passes(opts, sched, start_time, tle_file)
- area_boundary = AreaDefBoundary(self.area, frequency=500)
+ area_boundary = self.area.boundary(8)
self.area.poly = area_boundary.contour_poly
if opts.plot:
@@ -118,7 +107,9 @@ def single_station(self, sched, start_time, tle_file):
args=(allpasses,
self.area.poly,
build_filename(
- "dir_plots", pattern, pattern_args)
+ "dir_plots", pattern, pattern_args),
+ sched.plot_parameters,
+ sched.plot_title
)
)
image_saver.start()
@@ -143,19 +134,26 @@ def single_station(self, sched, start_time, tle_file):
generate_sch_file(build_filename("file_sci", pattern,
pattern_args), allpasses, self.coords)
+ if opts.meos:
+ generate_meos_file(build_filename("file_meos", pattern, pattern_args), allpasses,
+ self.coords, start_time + timedelta(hours=sched.start), True) # Ie report mode
+
+ if opts.plot:
+ logger.info("Waiting for images to be saved...")
+ image_saver.join()
+ logger.info("Done!")
+
+ if opts.metno_xml:
+ generate_metno_xml_file(build_filename("file_metno_xml", pattern, pattern_args), allpasses,
+ self.coords, start_time + timedelta(hours=sched.start),
+ start_time + timedelta(hours=sched.forward), self.id, sched.center_id,
+ report_mode=True)
+
if opts.xml or opts.report:
url = urlparse(opts.output_url or opts.output_dir)
- if url.scheme not in ["file", ""]:
- directory = "/tmp"
- else:
- directory = url.path
- if opts.plot:
- logger.info("Waiting for images to be saved...")
- image_saver.join()
- logger.info("Done!")
if opts.xml or opts.report:
- """Allways create xml-file in request-mode"""
- pattern_args['mode'] = "request"
+ # Always create xml-file in request-mode
+ pattern_args["mode"] = "request"
xmlfile = generate_xml_file(allpasses,
start_time + timedelta(hours=sched.start),
start_time + timedelta(hours=sched.forward),
@@ -163,13 +161,13 @@ def single_station(self, sched, start_time, tle_file):
"file_xml", pattern, pattern_args),
self.id,
sched.center_id,
- False
+ report_mode=False
)
logger.info("Generated " + str(xmlfile))
send_file(url, xmlfile)
if opts.report:
"""'If report-mode was set"""
- pattern_args['mode'] = "report"
+ pattern_args["mode"] = "report"
xmlfile = generate_xml_file(allpasses,
start_time + timedelta(hours=sched.start),
start_time + timedelta(hours=sched.forward),
@@ -196,36 +194,49 @@ def single_station(self, sched, start_time, tle_file):
return graph, allpasses
+ def get_next_passes(self, opts, sched, start_time, tle_file):
+ """Get the next passes."""
+ logger.info("Computing next satellite passes")
+ allpasses = get_next_passes(self.satellites, start_time,
+ sched.forward,
+ self.coords, tle_file,
+ aqua_terra_dumps=(sched.dump_url or True
+ if opts.no_aqua_terra_dump
+ else None),
+ min_pass=self.min_pass,
+ local_horizon=self.local_horizon
+ )
+ logger.info("Computation of next overpasses done")
+ logger.debug(str(sorted(allpasses, key=lambda x: x.risetime)))
+ return allpasses
-class SatScore(object):
+class SatScore:
"""docstring for SatScore."""
def __init__(self, day, night):
- super(SatScore, self).__init__()
+ """Initialize the score."""
self.day = day
self.night = night
-class Satellite(object):
-
+class Satellite:
"""docstring for Satellite."""
def __init__(self, name, day, night,
schedule_name=None, international_designator=None):
- super(Satellite, self).__init__()
+ """Initialize the satellite."""
self.name = name
self.international_designator = international_designator
self.score = SatScore(day, night)
self.schedule_name = schedule_name or name
-class Scheduler(object):
-
+class Scheduler:
"""docstring for Scheduler."""
- def __init__(self, stations, min_pass, forward, start, dump_url, patterns, center_id):
- super(Scheduler, self).__init__()
+ def __init__(self, stations, min_pass, forward, start, dump_url, patterns, center_id, plot_parameters, plot_title):
+ """Initialize the scheduler."""
self.stations = stations
self.min_pass = min_pass
self.forward = forward
@@ -233,12 +244,15 @@ def __init__(self, stations, min_pass, forward, start, dump_url, patterns, cente
self.dump_url = dump_url
self.patterns = patterns
self.center_id = center_id
+ self.plot_parameters = plot_parameters
+ self.plot_title = plot_title
self.opts = None
-def conflicting_passes(allpasses, delay=timedelta(seconds=0)):
- """Get the passes in groups of conflicting passes.
- """
+def conflicting_passes(allpasses, delay=None):
+ """Get the passes in groups of conflicting passes."""
+ if delay is None:
+ delay = timedelta(seconds=0)
passes = sorted(allpasses, key=lambda x: x.risetime)
@@ -261,11 +275,11 @@ def conflicting_passes(allpasses, delay=timedelta(seconds=0)):
return groups
-def get_non_conflicting_groups(passes, delay=timedelta(seconds=0)):
- """Get the different non-conflicting solutions in a group of conflicting
- passes.
- """
+def get_non_conflicting_groups(passes, delay=None):
+ """Get the different non-conflicting solutions in a group of conflicting passes."""
# Uses graphs and maximal clique finding with the Bron-Kerbosch algorithm.
+ if delay is None:
+ delay = timedelta(seconds=0)
order = len(passes)
@@ -290,6 +304,7 @@ def get_non_conflicting_groups(passes, delay=timedelta(seconds=0)):
def fermia(t):
+ """Return the Fermi value a."""
a = 0.25
b = a / 4
k = b * np.log(1 / 3.0) + a
@@ -298,6 +313,7 @@ def fermia(t):
def fermib(t):
+ """Return the Fermi value b."""
a = 0.25
b = a / 4
return 1 / (np.exp((t - a) / b) + 1)
@@ -307,9 +323,7 @@ def fermib(t):
def combine(p1, p2, area_of_interest):
- """Combine passes together.
- """
-
+ """Combine passes together."""
try:
return combination[p1, p2]
except KeyError:
@@ -416,8 +430,7 @@ def pscore(poly, coeff=1):
def get_best_sched(overpasses, area_of_interest, delay, avoid_list=None):
- """Get the best schedule based on *area_of_interest*.
- """
+ """Get the best schedule based on *area_of_interest*."""
avoid_list = avoid_list or []
passes = sorted(overpasses, key=lambda x: x.risetime)
grs = conflicting_passes(passes, delay)
@@ -472,127 +485,39 @@ def add_arc(graph, p1, p2, hook=None):
def argmax(iterable):
+ """Find the index of the maximum of an iterable."""
return max((x, i) for i, x in enumerate(iterable))[1]
def get_max(groups, fun):
- """Get the best group of *groups* using the score function *fun*
- """
+ """Get the best group of *groups* using the score function *fun*."""
scores = []
for grp in groups:
scores.append(sum([fun(p) for p in grp]))
return groups[argmax(scores)]
-def generate_sch_file(output_file, overpasses, coords):
-
- with open(output_file, "w") as out:
- # create epochs
- out.write("#Orbital elements\n#\n#SCName Epochtime\n#\n")
- satellites = set()
-
- for overpass in overpasses:
- epoch = "!{0:<16} {1}".format(overpass.satellite.name.upper(),
- overpass.orb.tle.epoch.strftime("%Y%m%d %H%M%S"))
- satellites |= set([epoch])
- sats = "\n".join(satellites) + "\n"
- out.write(sats)
- out.write("#\n#\n#Pass List\n#\n")
-
- out.write(
- "#SCName RevNum Risetime Falltime Elev Dura ANL Rec Dir Man Ovl OvlSCName OvlRev OvlRisetime OrigRisetime OrigFalltime OrigDuration\n#\n")
-
- for overpass in sorted(overpasses):
- out.write(overpass.print_vcs(coords) + "\n")
-
-
-def generate_xml_requests(sched, start, end, station_name, center_id, report_mode=False):
- """Create xml requests.
- """
- import xml.etree.ElementTree as ET
-
- reqtime = datetime.utcnow()
- eum_format = "%Y-%m-%d-%H:%M:%S"
-
- root = ET.Element("acquisition-schedule")
- props = ET.SubElement(root, "properties")
- proj = ET.SubElement(props, "project")
- proj.text = "Pytroll"
- typep = ET.SubElement(props, "type")
- if report_mode:
- typep.text = "report"
- else:
- typep.text = "request"
- station = ET.SubElement(props, "station")
- station.text = station_name
- file_start = ET.SubElement(props, "file-start")
- file_start.text = start.strftime(eum_format)
- file_end = ET.SubElement(props, "file-end")
- file_end.text = end.strftime(eum_format)
- reqby = ET.SubElement(props, "requested-by")
- reqby.text = center_id
- reqon = ET.SubElement(props, "requested-on")
- reqon.text = reqtime.strftime(eum_format)
- for overpass in sorted(sched):
- if (overpass.rec or report_mode) and overpass.risetime > start:
- ovpass = ET.SubElement(root, "pass")
- sat_name = overpass.satellite.schedule_name or overpass.satellite.name
- ovpass.set("satellite", sat_name)
- ovpass.set("start-time", overpass.risetime.strftime(eum_format))
- ovpass.set("end-time", overpass.falltime.strftime(eum_format))
- if report_mode:
- if overpass.fig is not None:
- ovpass.set("img", overpass.fig)
- ovpass.set("rec", str(overpass.rec))
-
- return root, reqtime
-
-
-def generate_xml_file(sched, start, end, xml_file, station, center_id, report_mode=False):
- """Create an xml request file.
- """
- import xml.etree.ElementTree as ET
- tree, reqtime = generate_xml_requests(sched,
- start, end,
- station, center_id, report_mode)
- filename = xml_file
- tmp_filename = xml_file + reqtime.strftime("%Y-%m-%d-%H-%M-%S") + ".tmp"
- with open(tmp_filename, "w") as fp_:
- if report_mode:
- fp_.write(""
- "")
- fp_.write(str(ET.tostring(tree)))
- os.rename(tmp_filename, filename)
- return filename
-
-
-def parse_datetime(strtime):
- """Parse the time string *strtime*
- """
- return datetime.strptime(strtime, "%Y%m%d%H%M%S")
-
-
-def save_passes(allpasses, poly, output_dir):
- """Save overpass plots to png and store in directory *output_dir*
- """
+def save_passes(allpasses, poly, output_dir, plot_parameters=None, plot_title=None):
+ """Save overpass plots to png and store in directory *output_dir*."""
from trollsched.drawing import save_fig
for overpass in allpasses:
- save_fig(overpass, poly=poly, directory=output_dir)
+ save_fig(overpass, poly=poly, directory=output_dir, plot_parameters=plot_parameters, plot_title=plot_title)
+ logger.info("All plots saved!")
def get_passes_from_xml_file(filename):
"""Read passes from aquisition xml file."""
- import xml.etree.ElementTree as ET
+ import defusedxml.ElementTree as ET
tree = ET.parse(filename)
root = tree.getroot()
pass_list = []
- for overpass in root.iter('pass'):
+ for overpass in root.iter("pass"):
start_time = datetime.strptime(
- overpass.attrib['start-time'], '%Y-%m-%d-%H:%M:%S')
+ overpass.attrib["start-time"], "%Y-%m-%d-%H:%M:%S")
end_time = datetime.strptime(
- overpass.attrib['end-time'], '%Y-%m-%d-%H:%M:%S')
+ overpass.attrib["end-time"], "%Y-%m-%d-%H:%M:%S")
pass_list.append(SimplePass(
- overpass.attrib['satellite'], start_time, end_time))
+ overpass.attrib["satellite"], start_time, end_time))
return pass_list
@@ -607,36 +532,31 @@ def build_filename(pattern_name, pattern_dict, kwargs):
def send_file(url, file):
+ """Send a file through ftp."""
pathname, filename = os.path.split(file)
del pathname
if url.scheme in ["file", ""]:
pass
- elif url.scheme == "ftp":
+ elif url.scheme in ["ftp", b"ftp"]:
import ftplib
session = ftplib.FTP(url.hostname, url.username, url.password)
with open(file, "rb") as xfile:
- session.storbinary('STOR ' + str(filename), xfile)
+ session.storbinary("STOR " + str(filename), xfile)
session.quit()
else:
logger.error("Cannot save to %s, but file is there:",
- str(url.scheme), str(file))
+ (str(url.scheme), str(file)))
def combined_stations(scheduler, start_time, graph, allpasses):
- # opts, pattern, station_list, graph, allpasses, start_time, start, forward, center_id):
"""The works around the combination of schedules for two or more stations."""
-
logger.info("Generating coordinated schedules ...")
def collect_labels(newpasses, stats):
"""Collect labels, each with one pass per station."""
# TODO: is there a simpler way?
clabels = []
- from sys import version_info
- if version_info < (2, 7):
- npasses = dict((s, set()) for s in stats)
- else:
- npasses = {s: set() for s in stats}
+ npasses = {s: set() for s in stats}
for npass in newpasses:
cl = []
for i, s in zip(range(len(stats)), stats):
@@ -657,9 +577,9 @@ def collect_labels(newpasses, stats):
"time": start_time.strftime("%H%M%S")
}
if scheduler.opts.xml:
- pattern_args['mode'] = "request"
+ pattern_args["mode"] = "request"
elif scheduler.opts.report:
- pattern_args['mode'] = "report"
+ pattern_args["mode"] = "report"
passes = {}
# reset flag "rec" for all passes.
@@ -668,16 +588,15 @@ def collect_labels(newpasses, stats):
passes[s] = list(ap)
for p in passes[s]:
p.rec = False
- except:
+ except Exception:
logger.exception("Failed to reset 'rec' for s:%s ap:%s passes[s]:%s p:%s",
- a, ap, passes[s], p)
+ s, ap, passes[s], p)
raise
stats, schedule, (newgraph, newpasses) = get_combined_sched(graph, passes)
-# logger.debug(pformat(schedule))
for opass in schedule:
- for i, ipass in zip(range(len(opass)), opass):
+ for _i, ipass in zip(range(len(opass)), opass):
if ipass[0] is None:
continue
ipass[0].rec = True
@@ -702,7 +621,7 @@ def collect_labels(newpasses, stats):
passes[station_id],
[s.coords for s in scheduler.stations if s.id == station_id][0])
if scheduler.opts.xml or scheduler.opts.report:
- pattern_args['mode'] = "request"
+ pattern_args["mode"] = "request"
xmlfile = generate_xml_file(passes[station_id],
start_time + timedelta(hours=scheduler.start),
start_time + timedelta(hours=scheduler.forward),
@@ -715,7 +634,7 @@ def collect_labels(newpasses, stats):
url = urlparse(scheduler.opts.output_url or scheduler.opts.output_dir)
send_file(url, xmlfile)
if scheduler.opts.report:
- pattern_args['mode'] = "report"
+ pattern_args["mode"] = "report"
xmlfile = generate_xml_file(passes[station_id],
start_time + timedelta(hours=scheduler.start),
start_time + timedelta(hours=scheduler.forward),
@@ -727,71 +646,32 @@ def collect_labels(newpasses, stats):
True)
logger.info("Generated " + str(xmlfile))
+ if scheduler.opts.meos:
+ meosfile = generate_meos_file(build_filename("file_meos", scheduler.patterns, pattern_args),
+ passes[station_id],
+ # station_meta[station]['coords'],
+ [s.coords for s in scheduler.stations if s.id == station_id][0],
+ start_time + timedelta(hours=scheduler.start),
+ False) # Ie only print schedule passes
+ logger.info("Generated " + str(meosfile))
+ if scheduler.opts.metno_xml:
+ metno_xmlfile = generate_metno_xml_file(build_filename("file_metno_xml", scheduler.patterns, pattern_args),
+ passes[station_id],
+ # station_meta[station]['coords'],
+ [s.coords for s in scheduler.stations if s.id == station_id][0],
+ start_time + timedelta(hours=scheduler.start),
+ start_time + timedelta(hours=scheduler.forward),
+ station_id, scheduler.center_id, False)
+ logger.info("Generated " + str(metno_xmlfile))
+
logger.info("Finished coordinated schedules.")
-def run():
+def run(args=None):
"""The schedule command."""
- import argparse
global logger
- parser = argparse.ArgumentParser()
- # general arguments
- parser.add_argument("-c", "--config", default=None,
- help="configuration file to use")
- parser.add_argument("-t", "--tle", default=None,
- help="tle file to use")
- parser.add_argument("-l", "--log", default=None,
- help="File to log to (defaults to stdout)")
- parser.add_argument("-m", "--mail", nargs="*", default=None,
- help="mail address(es) to send error messages to.")
- parser.add_argument("-v", "--verbose", action="store_true",
- help="print debug messages too")
- # argument group: coordinates and times
- group_postim = parser.add_argument_group(title="start-parameter",
- description="(or set values in the configuration file)")
- group_postim.add_argument("--lat", type=float,
- help="Latitude, degrees north")
- group_postim.add_argument("--lon", type=float,
- help="Longitude, degrees east")
- group_postim.add_argument("--alt", type=float,
- help="Altitude, km")
- group_postim.add_argument("-f", "--forward", type=float,
- help="time ahead to compute the schedule")
- group_postim.add_argument("-s", "--start-time", type=parse_datetime,
- help="start time of the schedule to compute")
- group_postim.add_argument("-d", "--delay", default=60, type=float,
- help="delay (in seconds) needed between two "
- + "consecutive passes (60 seconds by default)")
- # argument group: special behaviour
- group_spec = parser.add_argument_group(title="special",
- description="(additional parameter changing behaviour)")
- group_spec.add_argument("-a", "--avoid",
- help="xml request file with passes to avoid")
- group_spec.add_argument("--no-aqua-terra-dump", action="store_false",
- help="do not consider Aqua/Terra-dumps")
- group_spec.add_argument("--multiproc", action="store_true",
- help="use multiple parallel processes")
- # argument group: output-related
- group_outp = parser.add_argument_group(title="output",
- description="(file pattern are taken from configuration file)")
- group_outp.add_argument("-o", "--output-dir", default=None,
- help="where to put generated files")
- group_outp.add_argument("-u", "--output-url", default=None,
- help="URL where to put generated schedule file(s)"
- + ", otherwise use output-dir")
- group_outp.add_argument("-x", "--xml", action="store_true",
- help="generate an xml request file (schedule)"
- )
- group_outp.add_argument("-r", "--report", action="store_true",
- help="generate an xml report file (schedule)")
- group_outp.add_argument("--scisys", action="store_true",
- help="generate a SCISYS schedule file")
- group_outp.add_argument("-p", "--plot", action="store_true",
- help="generate plot images")
- group_outp.add_argument("-g", "--graph", action="store_true",
- help="save graph info")
- opts = parser.parse_args()
+ opts = parse_args(args)
if opts.config:
# read_config() returns:
@@ -799,52 +679,14 @@ def run():
# station_list, forward, start, pattern = utils.read_config(opts.config)
scheduler = utils.read_config(opts.config)
- # TODO make config file compulsory
-
- if (not opts.config) and (not (opts.lon or opts.lat or opts.alt)):
- parser.error("Coordinates must be provided in the absence of "
- "configuration file.")
-
- if not (opts.xml or opts.scisys or opts.report):
- parser.error("No output specified, use '--scisys' or '-x/--xml'")
-
- if opts.output_dir is None:
- opts.output_dir = os.path.curdir
- if "dir_output" not in scheduler.patterns:
- pattern["dir_output"] = opts.output_dir
-
- if opts.log:
- previous = os.path.exists(opts.log)
- handler = logging.handlers.RotatingFileHandler(opts.log, backupCount=7)
- if previous:
- handler.doRollover()
+ if opts.output_dir:
+ scheduler.patterns["dir_output"] = opts.output_dir
else:
- handler = logging.StreamHandler()
- handler.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :"
- " %(name)s] %(message)s",
- '%Y-%m-%d %H:%M:%S'))
- if opts.verbose:
- loglevel = logging.DEBUG
- else:
- loglevel = logging.INFO
+ scheduler.patterns.setdefault("dir_output", os.path.curdir)
- handler.setLevel(loglevel)
- logging.getLogger('').setLevel(loglevel)
- logging.getLogger('').addHandler(handler)
-
- if opts.mail:
- mhandler = logging.handlers.SMTPHandler("localhost",
- "pytroll-schedule@pytroll.org",
- opts.mail,
- "Scheduler")
- mhandler.setLevel(logging.WARNING)
- logging.getLogger('').addHandler(mhandler)
-
- logger = logging.getLogger("trollsched")
+ setup_logging(opts)
tle_file = opts.tle
- if opts.forward:
- forward = opts.forward
if opts.start_time:
start_time = opts.start_time
else:
@@ -914,9 +756,113 @@ def run():
combined_stations(scheduler, start_time, graph, allpasses)
-if __name__ == '__main__':
+def setup_logging(opts):
+ """Set up the logging."""
+ global logger
+ if opts.log:
+ previous = os.path.exists(opts.log)
+ handler = logging.handlers.RotatingFileHandler(opts.log, backupCount=7)
+ if previous:
+ handler.doRollover()
+ else:
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :"
+ " %(name)s] %(message)s",
+ "%Y-%m-%d %H:%M:%S"))
+ if opts.verbose:
+ loglevel = logging.DEBUG
+ else:
+ loglevel = logging.INFO
+ handler.setLevel(loglevel)
+ logging.getLogger("").setLevel(loglevel)
+ logging.getLogger("").addHandler(handler)
+ if opts.mail:
+ mhandler = logging.handlers.SMTPHandler("localhost",
+ "pytroll-schedule@pytroll.org",
+ opts.mail,
+ "Scheduler")
+ mhandler.setLevel(logging.WARNING)
+ logging.getLogger("").addHandler(mhandler)
+ logger = logging.getLogger("trollsched")
+
+
+def parse_args(args=None):
+ """Parse arguments from the command line."""
+ parser = argparse.ArgumentParser()
+ # general arguments
+ parser.add_argument("-c", "--config", required=True, default=None,
+ help="configuration file to use")
+ parser.add_argument("-t", "--tle", default=None,
+ help="tle file to use")
+ parser.add_argument("-l", "--log", default=None,
+ help="File to log to (defaults to stdout)")
+ parser.add_argument("-m", "--mail", nargs="*", default=None,
+ help="mail address(es) to send error messages to.")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="print debug messages too")
+ # argument group: coordinates and times
+ group_postim = parser.add_argument_group(title="start-parameter",
+ description="(or set values in the configuration file)")
+ group_postim.add_argument("--lat", type=float,
+ help="Latitude, degrees north")
+ group_postim.add_argument("--lon", type=float,
+ help="Longitude, degrees east")
+ group_postim.add_argument("--alt", type=float,
+ help="Altitude, km")
+ group_postim.add_argument("-f", "--forward", type=float,
+ help="time ahead to compute the schedule")
+ group_postim.add_argument("-s", "--start-time", type=datetime.fromisoformat,
+ help="start time of the schedule to compute")
+ group_postim.add_argument("-d", "--delay", default=60, type=float,
+ help="delay (in seconds) needed between two " +
+ "consecutive passes (60 seconds by default)")
+ # argument group: special behaviour
+ group_spec = parser.add_argument_group(title="special",
+ description="(additional parameter changing behaviour)")
+ group_spec.add_argument("-a", "--avoid",
+ help="xml request file with passes to avoid")
+ group_spec.add_argument("--no-aqua-terra-dump", action="store_false",
+ help="do not consider Aqua/Terra-dumps")
+ group_spec.add_argument("--multiproc", action="store_true",
+ help="use multiple parallel processes")
+ # argument group: output-related
+ group_outp = parser.add_argument_group(title="output",
+ description="(file pattern are taken from configuration file)")
+ group_outp.add_argument("-o", "--output-dir", default=None,
+ help="where to put generated files")
+ group_outp.add_argument("-u", "--output-url", default=None,
+ help="URL where to put generated schedule file(s)" +
+ ", otherwise use output-dir")
+ group_outp.add_argument("-x", "--xml", action="store_true",
+ help="generate an xml request file (schedule)"
+ )
+ group_outp.add_argument("-r", "--report", action="store_true",
+ help="generate an xml report file (schedule)")
+ group_outp.add_argument("--scisys", action="store_true",
+ help="generate a SCISYS schedule file")
+ group_outp.add_argument("-p", "--plot", action="store_true",
+ help="generate plot images")
+ group_outp.add_argument("-g", "--graph", action="store_true",
+ help="save graph info")
+ group_outp.add_argument("--meos", action="store_true",
+ help="generate a MEOS schedule file")
+ group_outp.add_argument("--metno-xml", action="store_true",
+ help="generate a METNO xml pass data file")
+ opts = parser.parse_args(args)
+
+ if (not opts.config) and (not (opts.lon or opts.lat or opts.alt)):
+ parser.error("Coordinates must be provided in the absence of "
+ "configuration file.")
+
+ if not (opts.xml or opts.scisys or opts.report or opts.metno_xml or opts.meos):
+ parser.error("No output specified, use '--scisys', '-x/--xml', '-r/--report', '--meos', or '--metno-xml'")
+
+ return opts
+
+
+if __name__ == "__main__":
try:
run()
- except:
+ except Exception:
logger.exception("Something wrong happened!")
raise
diff --git a/trollsched/spherical.py b/trollsched/spherical.py
index d544d0f..ff7c13b 100644
--- a/trollsched/spherical.py
+++ b/trollsched/spherical.py
@@ -26,321 +26,26 @@
"""
+import logging
+
import numpy as np
import pyresample.spherical
-import logging
logger = logging.getLogger(__name__)
-
-class SCoordinate(object):
-
- """Spherical coordinates
- """
-
- def __init__(self, lon, lat):
- self.lon = lon
- self.lat = lat
-
- def cross2cart(self, point):
- """Compute the cross product, and convert to cartesian coordinates
- """
-
- lat1 = self.lat
- lon1 = self.lon
- lat2 = point.lat
- lon2 = point.lon
-
- ad = np.sin(lat1 - lat2) * np.cos((lon1 - lon2) / 2.0)
- be = np.sin(lat1 + lat2) * np.sin((lon1 - lon2) / 2.0)
- c = np.sin((lon1 + lon2) / 2.0)
- f = np.cos((lon1 + lon2) / 2.0)
- g = np.cos(lat1)
- h = np.cos(lat2)
- i = np.sin(lon2 - lon1)
- res = CCoordinate(np.array([-ad * c + be * f,
- ad * f + be * c,
- g * h * i]))
-
- return res
-
- def to_cart(self):
- """Convert to cartesian.
- """
- return CCoordinate(np.array([np.cos(self.lat) * np.cos(self.lon),
- np.cos(self.lat) * np.sin(self.lon),
- np.sin(self.lat)]))
-
- def distance(self, point):
- """Vincenty formula.
- """
-
- dlambda = self.lon - point.lon
- num = ((np.cos(point.lat) * np.sin(dlambda)) ** 2 +
- (np.cos(self.lat) * np.sin(point.lat) -
- np.sin(self.lat) * np.cos(point.lat) *
- np.cos(dlambda)) ** 2)
- den = (np.sin(self.lat) * np.sin(point.lat) +
- np.cos(self.lat) * np.cos(point.lat) * np.cos(dlambda))
-
- return np.arctan2(num ** .5, den)
-
- def hdistance(self, point):
- """Haversine formula
- """
-
- return 2 * np.arcsin((np.sin((point.lat - self.lat) / 2.0) ** 2.0 +
- np.cos(point.lat) * np.cos(self.lat) *
- np.sin((point.lon - self.lon) / 2.0) ** 2.0) ** .5)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __eq__(self, other):
- return np.allclose((self.lon, self.lat), (other.lon, other.lat))
-
- def __str__(self):
- return str((np.rad2deg(self.lon), np.rad2deg(self.lat)))
-
- def __repr__(self):
- return str((np.rad2deg(self.lon), np.rad2deg(self.lat)))
-
- def __iter__(self):
- return [self.lon, self.lat].__iter__()
-
-
-class CCoordinate(object):
-
- """Cartesian coordinates
- """
-
- def __init__(self, cart):
- self.cart = np.array(cart)
-
- def norm(self):
- """Euclidean norm of the vector.
- """
- return np.sqrt(np.einsum('...i, ...i', self.cart, self.cart))
-
- def normalize(self):
- """normalize the vector.
- """
-
- self.cart /= np.sqrt(np.einsum('...i, ...i', self.cart, self.cart))
-
- return self
-
- def cross(self, point):
- """cross product with another vector.
- """
- return CCoordinate(np.cross(self.cart, point.cart))
-
- def dot(self, point):
- """dot product with another vector.
- """
- return np.inner(self.cart, point.cart)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __eq__(self, other):
- return np.allclose(self.cart, other.cart)
-
- def __str__(self):
- return str(self.cart)
-
- def __repr__(self):
- return str(self.cart)
-
- def __add__(self, other):
- try:
- return CCoordinate(self.cart + other.cart)
- except AttributeError:
- return CCoordinate(self.cart + np.array(other))
-
- def __radd__(self, other):
- return self.__add__(other)
-
- def __mul__(self, other):
- try:
- return CCoordinate(self.cart * other.cart)
- except AttributeError:
- return CCoordinate(self.cart * np.array(other))
-
- def __rmul__(self, other):
- return self.__mul__(other)
-
- def to_spherical(self):
- return SCoordinate(np.arctan2(self.cart[1], self.cart[0]),
- np.arcsin(self.cart[2]))
-
-
EPSILON = 0.0000001
def modpi(val, mod=np.pi):
- """Puts *val* between -*mod* and *mod*.
- """
+ """Put *val* between -*mod* and *mod*."""
return (val + mod) % (2 * mod) - mod
-class Arc(object):
-
- """An arc of the great circle between two points.
- """
- start = None
- end = None
-
- def __init__(self, start, end):
- self.start, self.end = start, end
-
- def __eq__(self, other):
- if(self.start == other.start and self.end == other.end):
- return 1
- return 0
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __str__(self):
- return (str(self.start) + " -> " + str(self.end))
-
- def __repr__(self):
- return (str(self.start) + " -> " + str(self.end))
-
- def angle(self, other_arc):
- """Oriented angle between two arcs.
- """
- if self.start == other_arc.start:
- a__ = self.start
- b__ = self.end
- c__ = other_arc.end
- elif self.start == other_arc.end:
- a__ = self.start
- b__ = self.end
- c__ = other_arc.start
- elif self.end == other_arc.end:
- a__ = self.end
- b__ = self.start
- c__ = other_arc.start
- elif self.end == other_arc.start:
- a__ = self.end
- b__ = self.start
- c__ = other_arc.end
- else:
- raise ValueError("No common point in angle computation.")
-
- ua_ = a__.cross2cart(b__)
- ub_ = a__.cross2cart(c__)
-
- val = ua_.dot(ub_) / (ua_.norm() * ub_.norm())
- if abs(val - 1) < EPSILON:
- angle = 0
- elif abs(val + 1) < EPSILON:
- angle = np.pi
- else:
- angle = np.arccos(val)
-
- n__ = ua_.normalize()
- if n__.dot(c__.to_cart()) > 0:
- return -angle
- else:
- return angle
-
- def intersections(self, other_arc):
- """Gives the two intersections of the greats circles defined by the
- current arc and *other_arc*.
- From http://williams.best.vwh.net/intersect.htm
- """
-
- if self.end.lon - self.start.lon > np.pi:
- self.end.lon -= 2 * np.pi
- if other_arc.end.lon - other_arc.start.lon > np.pi:
- other_arc.end.lon -= 2 * np.pi
- if self.end.lon - self.start.lon < -np.pi:
- self.end.lon += 2 * np.pi
- if other_arc.end.lon - other_arc.start.lon < -np.pi:
- other_arc.end.lon += 2 * np.pi
-
- ea_ = self.start.cross2cart(self.end).normalize()
- eb_ = other_arc.start.cross2cart(other_arc.end).normalize()
-
- cross = ea_.cross(eb_)
- lat = np.arctan2(cross.cart[2],
- np.sqrt(cross.cart[0] ** 2 + cross.cart[1] ** 2))
- lon = np.arctan2(cross.cart[1], cross.cart[0])
-
- return (SCoordinate(lon, lat),
- SCoordinate(modpi(lon + np.pi), -lat))
-
- def intersects(self, other_arc):
- """Says if two arcs defined by the current arc and the *other_arc*
- intersect. An arc is defined as the shortest tracks between two points.
- """
-
- return bool(self.intersection(other_arc))
-
- def intersection(self, other_arc):
- """Says where, if two arcs defined by the current arc and the
- *other_arc* intersect. An arc is defined as the shortest tracks between
- two points.
- """
- if self == other_arc:
- return None
- # if (self.end == other_arc.start or
- # self.end == other_arc.end or
- # self.start == other_arc.start or
- # self.start == other_arc.end):
- # return None
-
- for i in self.intersections(other_arc):
- a__ = self.start
- b__ = self.end
- c__ = other_arc.start
- d__ = other_arc.end
-
- ab_ = a__.hdistance(b__)
- cd_ = c__.hdistance(d__)
-
- if(((i in (a__, b__)) or
- (abs(a__.hdistance(i) + b__.hdistance(i) - ab_) < EPSILON)) and
- ((i in (c__, d__)) or
- (abs(c__.hdistance(i) + d__.hdistance(i) - cd_) < EPSILON))):
- return i
- return None
-
- def get_next_intersection(self, arcs, known_inter=None):
- """Get the next intersection between the current arc and *arcs*
- """
- res = []
- for arc in arcs:
- inter = self.intersection(arc)
- if (inter is not None and
- inter != arc.end and
- inter != self.end):
- res.append((inter, arc))
-
- def dist(args):
- """distance key.
- """
- return self.start.distance(args[0])
-
- take_next = False
- for inter, arc in sorted(res, key=dist):
- if known_inter is not None:
- if known_inter == inter:
- take_next = True
- elif take_next:
- return inter, arc
- else:
- return inter, arc
-
- return None, None
-
-
class SphPolygon(pyresample.spherical.SphPolygon):
+ """A spherical polygon with drawing capabilities."""
def draw(self, mapper, options, **more_options):
+ """Draw the polygon."""
lons = np.rad2deg(self.lon.take(np.arange(len(self.lon) + 1),
mode="wrap"))
lats = np.rad2deg(self.lat.take(np.arange(len(self.lat) + 1),
@@ -350,8 +55,7 @@ def draw(self, mapper, options, **more_options):
def get_twilight_poly(utctime):
- """Return a polygon enclosing the sunlit part of the globe at *utctime*.
- """
+ """Return a polygon enclosing the sunlit part of the globe at *utctime*."""
from pyorbital import astronomy
ra, dec = astronomy.sun_ra_dec(utctime)
lon = modpi(ra - astronomy.gmst(utctime))
diff --git a/trollsched/tests/__init__.py b/trollsched/tests/__init__.py
index 49e2a10..272b3ee 100644
--- a/trollsched/tests/__init__.py
+++ b/trollsched/tests/__init__.py
@@ -1,7 +1,4 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright (c) 2014 - 2018 PyTroll Community
+# Copyright (c) 2014 - 2022 PyTroll Community
# Author(s):
@@ -21,20 +18,4 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Tests for scheduler.
-"""
-
-from trollsched.tests import (test_schedule, test_spherical, test_satpass)
-
-import unittest
-
-
-def suite():
- """The global test suite.
- """
- mysuite = unittest.TestSuite()
- mysuite.addTests(test_schedule.suite())
- mysuite.addTests(test_spherical.suite())
- mysuite.addTests(test_satpass.suite())
-
- return mysuite
+"""Tests for scheduler."""
diff --git a/trollsched/tests/test_satpass.py b/trollsched/tests/test_satpass.py
index 6711be7..344f4d6 100644
--- a/trollsched/tests/test_satpass.py
+++ b/trollsched/tests/test_satpass.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2018 - 2019 PyTroll
+# Copyright (c) 2018 - 2021 Pytroll-schedule developers
# Author(s):
@@ -20,16 +20,18 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Test the satellite pass and swath boundary classes
-"""
+"""Test the satellite pass and swath boundary classes."""
-import unittest
-import numpy as np
from datetime import datetime, timedelta
-from trollsched.satpass import Pass
-from trollsched.boundary import SwathBoundary
+
+import numpy as np
+import numpy.testing
+import pytest
from pyorbital.orbital import Orbital
+from pyresample.geometry import AreaDefinition, create_area_def
+from trollsched.boundary import SwathBoundary
+from trollsched.satpass import Pass
LONS1 = np.array([-122.29913729160562, -131.54385362589042, -155.788034272281,
143.1730880418349, 105.69172088208997, 93.03135571771092,
@@ -57,7 +59,7 @@
-27.993172418988525, -29.79361072725673, -32.11515837055801,
-35.36860848223405, -35.38196057933595, -35.96564490844792,
-37.14469461070555, -39.34032289002443, -43.49756191648018,
- -52.140150361811244, -73.32968630186114], dtype='float64')
+ -52.140150361811244, -73.32968630186114], dtype="float64")
LATS1 = np.array([84.60636067724808, 86.98555849233523, 88.49911967556697,
88.90233393880413, 88.23555365613707, 87.41630911481282,
@@ -85,25 +87,25 @@
41.57136466452366, 41.66608254408796, 41.745942562974314,
41.77850750277849, 54.62516158367828, 59.69624962433962,
64.7365168572082, 69.72588498397877, 74.61859631181376,
- 79.2863412851444, 83.25136141880888], dtype='float64')
-
-LONS2 = np.array([-174.41109502, 167.84584132, 148.24213696, 130.10334782,
- 115.7074828, 105.07369809, 97.28481583, 91.4618503,
- 86.98024241, 83.4283141, 80.53652225, 78.1253594,
- 76.07228855, 74.29143113, 72.72103408, 71.31559576,
- 70.04080412, 68.87020177, 67.78293355, 66.76218577,
- 65.79407472, 64.86682945, 63.97016605, 63.09478077,
- 62.23190558, 61.37287373, 60.50863405, 59.62912286,
- 58.72232744, 57.77268809, 56.75796498, 55.6419694,
- 54.36007027, 41.41762911, 41.15660793, 40.9331126,
- 40.73252665, 40.54677784, 40.37092304, 40.20150965,
- 40.0358693, 39.87175642, 39.70713409, 39.54002703,
- 39.36840323, 39.1900621, 39.00251256, 38.80282499,
- 38.58743647, 38.35188019, 38.09039231, 37.79531831,
- 37.45618154, 37.05815986, 36.57947382, 35.98665163,
- 35.22533847, 34.20085643, 32.73220377, 30.42514135,
- 26.23397747, 16.29417395, -23.91719576, -102.71481425,
- -122.5294795, -129.09284487], dtype='float64')
+ 79.2863412851444, 83.25136141880888], dtype="float64")
+
+LONS2 = np.array([-174.41109502, 167.84584132, 148.24213696, 130.10334782,
+ 115.7074828, 105.07369809, 97.28481583, 91.4618503,
+ 86.98024241, 83.4283141, 80.53652225, 78.1253594,
+ 76.07228855, 74.29143113, 72.72103408, 71.31559576,
+ 70.04080412, 68.87020177, 67.78293355, 66.76218577,
+ 65.79407472, 64.86682945, 63.97016605, 63.09478077,
+ 62.23190558, 61.37287373, 60.50863405, 59.62912286,
+ 58.72232744, 57.77268809, 56.75796498, 55.6419694,
+ 54.36007027, 41.41762911, 41.15660793, 40.9331126,
+ 40.73252665, 40.54677784, 40.37092304, 40.20150965,
+ 40.0358693, 39.87175642, 39.70713409, 39.54002703,
+ 39.36840323, 39.1900621, 39.00251256, 38.80282499,
+ 38.58743647, 38.35188019, 38.09039231, 37.79531831,
+ 37.45618154, 37.05815986, 36.57947382, 35.98665163,
+ 35.22533847, 34.20085643, 32.73220377, 30.42514135,
+ 26.23397747, 16.29417395, -23.91719576, -102.71481425,
+ -122.5294795, -129.09284487], dtype="float64")
LATS2 = np.array([83.23214786, 84.90973645, 85.62529048, 85.74243351, 85.52147568,
85.13874302, 84.69067959, 84.22338069, 83.75720094, 83.30023412,
@@ -118,215 +120,297 @@
81.74459732, 82.20957417, 82.68298027, 83.16949849, 83.67435372,
84.20356848, 84.76429067, 85.36521771, 86.01711637, 86.73327122,
87.5286869, 88.40887156, 89.21959299, 88.71884272, 87.09172665,
- 84.6670132], dtype='float64')
+ 84.6670132], dtype="float64")
LONS3 = np.array([-8.66259458, -6.20984986, 15.99813586, 25.41134052, 33.80598414,
48.28641356, 49.55596283, 45.21769275, 43.95449327, 30.04053601,
- 22.33028017, 13.90584249, -5.59290326, -7.75625031], dtype='float64')
+ 22.33028017, 13.90584249, -5.59290326, -7.75625031], dtype="float64")
LATS3 = np.array([66.94713585, 67.07854554, 66.53108388, 65.27837805, 63.50223596,
58.33858588, 57.71210872, 55.14964148, 55.72506407, 60.40889798,
- 61.99561474, 63.11425455, 63.67173255, 63.56939058], dtype='float64')
-
+ 61.99561474, 63.11425455, 63.67173255, 63.56939058], dtype="float64")
-def assertNumpyArraysEqual(self, other):
- if self.shape != other.shape:
- raise AssertionError("Shapes don't match")
- if not np.allclose(self, other):
- raise AssertionError("Elements don't match!")
+AREA_DEF_EURON1 = AreaDefinition("euron1", "Northern Europe - 1km",
+ "", {"proj": "stere", "ellps": "WGS84",
+ "lat_0": 90.0, "lon_0": 0.0, "lat_ts": 60.0},
+ 3072, 3072, (-1000000.0, -4500000.0, 2072000.0, -1428000.0))
def get_n20_orbital():
"""Return the orbital instance for a given set of TLEs for NOAA-20.
+
From 16 October 2018.
"""
tle1 = "1 43013U 17073A 18288.00000000 .00000042 00000-0 20142-4 0 2763"
tle2 = "2 43013 098.7338 224.5862 0000752 108.7915 035.0971 14.19549169046919"
- return Orbital('NOAA-20', line1=tle1, line2=tle2)
+ return Orbital("NOAA-20", line1=tle1, line2=tle2)
def get_n19_orbital():
"""Return the orbital instance for a given set of TLEs for NOAA-19.
+
From 16 October 2018.
"""
tle1 = "1 33591U 09005A 18288.64852564 .00000055 00000-0 55330-4 0 9992"
tle2 = "2 33591 99.1559 269.1434 0013899 353.0306 7.0669 14.12312703499172"
- return Orbital('NOAA-19', line1=tle1, line2=tle2)
+ return Orbital("NOAA-19", line1=tle1, line2=tle2)
-def get_region(areaid):
- try:
- from satpy.resample import get_area_def
- except ImportError:
- from mpop.projector import get_area_def
+def get_mb_orbital():
+ """Return orbital for a given set of TLEs for MetOp-B.
- return get_area_def(areaid)
+ From 2021-02-04
+ """
+ tle1 = "1 38771U 12049A 21034.58230818 -.00000012 00000-0 14602-4 0 9998"
+ tle2 = "2 38771 98.6992 96.5537 0002329 71.3979 35.1836 14.21496632434867"
+ return Orbital("Metop-B", line1=tle1, line2=tle2)
-class TestPass(unittest.TestCase):
+class TestPass:
+ """Tests for the Pass object."""
- def setUp(self):
- """Set up"""
+ def setup_method(self):
+ """Set up."""
self.n20orb = get_n20_orbital()
self.n19orb = get_n19_orbital()
def test_pass_instrument_interface(self):
-
+ """Test the intrument interface."""
tstart = datetime(2018, 10, 16, 2, 48, 29)
tend = datetime(2018, 10, 16, 3, 2, 38)
- instruments = set(('viirs', 'avhrr', 'modis'))
- overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument=instruments)
- self.assertEqual(overp.instrument, 'avhrr')
+ instruments = set(("viirs", "avhrr", "modis", "mersi", "mersi-2"))
+ for instrument in instruments:
+ overp = Pass("NOAA-20", tstart, tend, orb=self.n20orb, instrument=instrument)
+ assert overp.instrument == instrument
- instruments = set(('viirs', 'modis'))
- overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument=instruments)
- self.assertEqual(overp.instrument, 'viirs')
+ instruments = set(("viirs", "avhrr", "modis"))
+ overp = Pass("NOAA-20", tstart, tend, orb=self.n20orb, instrument=instruments)
+ assert overp.instrument == "avhrr"
- instruments = set(('amsu-a', 'mhs'))
- self.assertRaises(TypeError, Pass, self,
- 'NOAA-20', tstart, tend, orb=self.n20orb, instrument=instruments)
+ instruments = set(("viirs", "modis"))
+ overp = Pass("NOAA-20", tstart, tend, orb=self.n20orb, instrument=instruments)
+ assert overp.instrument == "viirs"
- def tearDown(self):
- """Clean up"""
- pass
+ instruments = set(("amsu-a", "mhs"))
+ with pytest.raises(TypeError):
+ Pass("NOAA-20", tstart, tend, orb=self.n20orb, instrument=instruments)
-class TestSwathBoundary(unittest.TestCase):
+class TestSwathBoundary:
+ """Test the swath boundary object."""
- def setUp(self):
- """Set up"""
+ def setup_method(self):
+ """Set up."""
self.n20orb = get_n20_orbital()
self.n19orb = get_n19_orbital()
- self.euron1 = get_region('euron1')
+ self.mborb = get_mb_orbital()
+ self.euron1 = AREA_DEF_EURON1
+ self.antarctica = create_area_def(
+ "antarctic",
+ {"ellps": "WGS84", "lat_0": "-90", "lat_ts": "-60",
+ "lon_0": "0", "no_defs": "None", "proj": "stere",
+ "type": "crs", "units": "m", "x_0": "0", "y_0": "0"},
+ width=1000, height=1000,
+ area_extent=(-4008875.4031, -4000855.294,
+ 4000855.9937, 4008874.7048))
+ self.arctica = create_area_def(
+ "arctic",
+ {"ellps": "WGS84", "lat_0": "90", "lat_ts": "60",
+ "lon_0": "0", "no_defs": "None", "proj": "stere",
+ "type": "crs", "units": "m", "x_0": "0", "y_0": "0"},
+ width=1000, height=1000,
+ area_extent=(-4008875.4031, -4000855.294,
+ 4000855.9937, 4008874.7048))
def test_swath_boundary(self):
-
+ """Test generating a swath boundary."""
tstart = datetime(2018, 10, 16, 2, 48, 29)
tend = datetime(2018, 10, 16, 3, 2, 38)
- overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument='viirs')
+ overp = Pass("NOAA-20", tstart, tend, orb=self.n20orb, instrument="viirs")
overp_boundary = SwathBoundary(overp)
cont = overp_boundary.contour()
- assertNumpyArraysEqual(cont[0], LONS1)
- assertNumpyArraysEqual(cont[1], LATS1)
+ numpy.testing.assert_array_almost_equal(cont[0], LONS1)
+ numpy.testing.assert_array_almost_equal(cont[1], LATS1)
tstart = datetime(2018, 10, 16, 4, 29, 4)
tend = datetime(2018, 10, 16, 4, 30, 29, 400000)
- overp = Pass('NOAA-20', tstart, tend, orb=self.n20orb, instrument='viirs')
+ overp = Pass("NOAA-20", tstart, tend, orb=self.n20orb, instrument="viirs")
overp_boundary = SwathBoundary(overp, frequency=200)
cont = overp_boundary.contour()
- assertNumpyArraysEqual(cont[0], LONS2)
- assertNumpyArraysEqual(cont[1], LATS2)
+ numpy.testing.assert_array_almost_equal(cont[0], LONS2)
+ numpy.testing.assert_array_almost_equal(cont[1], LATS2)
# NOAA-19 AVHRR:
- tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S')
- tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S')
+ tstart = datetime.strptime("20181016 04:00:00", "%Y%m%d %H:%M:%S")
+ tend = datetime.strptime("20181016 04:01:00", "%Y%m%d %H:%M:%S")
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr')
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr")
overp_boundary = SwathBoundary(overp, frequency=500)
cont = overp_boundary.contour()
- assertNumpyArraysEqual(cont[0], LONS3)
- assertNumpyArraysEqual(cont[1], LATS3)
+ numpy.testing.assert_array_almost_equal(cont[0], LONS3)
+ numpy.testing.assert_array_almost_equal(cont[1], LATS3)
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr/3')
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr/3")
overp_boundary = SwathBoundary(overp, frequency=500)
cont = overp_boundary.contour()
- assertNumpyArraysEqual(cont[0], LONS3)
- assertNumpyArraysEqual(cont[1], LATS3)
+ numpy.testing.assert_array_almost_equal(cont[0], LONS3)
+ numpy.testing.assert_array_almost_equal(cont[1], LATS3)
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr-3')
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr-3")
overp_boundary = SwathBoundary(overp, frequency=500)
cont = overp_boundary.contour()
- assertNumpyArraysEqual(cont[0], LONS3)
- assertNumpyArraysEqual(cont[1], LATS3)
-
- def test_swath_coverage(self):
+ numpy.testing.assert_array_almost_equal(cont[0], LONS3)
+ numpy.testing.assert_array_almost_equal(cont[1], LATS3)
+ def test_swath_coverage_does_not_cover_data_outside_area(self):
+ """Test that swath covergate is 0 when the data is outside the area of interest."""
# NOAA-19 AVHRR:
- tstart = datetime.strptime('20181016 03:54:13', '%Y%m%d %H:%M:%S')
- tend = datetime.strptime('20181016 03:55:13', '%Y%m%d %H:%M:%S')
+ tstart = datetime.strptime("20181016 03:54:13", "%Y%m%d %H:%M:%S")
+ tend = datetime.strptime("20181016 03:55:13", "%Y%m%d %H:%M:%S")
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr')
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr")
cov = overp.area_coverage(self.euron1)
- self.assertEqual(cov, 0)
+ assert cov == 0
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=80)
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr", frequency=80)
cov = overp.area_coverage(self.euron1)
- self.assertEqual(cov, 0)
+ assert cov == 0
- tstart = datetime.strptime('20181016 04:00:00', '%Y%m%d %H:%M:%S')
- tend = datetime.strptime('20181016 04:01:00', '%Y%m%d %H:%M:%S')
+ def test_swath_coverage_over_area(self):
+ """Test that swath coverage matches when covering a part of the area of interest."""
+ tstart = datetime.strptime("20181016 04:00:00", "%Y%m%d %H:%M:%S")
+ tend = datetime.strptime("20181016 04:01:00", "%Y%m%d %H:%M:%S")
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr')
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr")
cov = overp.area_coverage(self.euron1)
- self.assertAlmostEqual(cov, 0.103526, 5)
+ assert cov == pytest.approx(0.103526, 1e-5)
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=100)
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr", frequency=100)
cov = overp.area_coverage(self.euron1)
- self.assertAlmostEqual(cov, 0.103526, 5)
+ assert cov == pytest.approx(0.103526, 1e-5)
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr/3', frequency=133)
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr/3", frequency=133)
cov = overp.area_coverage(self.euron1)
- self.assertAlmostEqual(cov, 0.103526, 5)
+ assert cov == pytest.approx(0.103526, 1e-5)
- overp = Pass('NOAA-19', tstart, tend, orb=self.n19orb, instrument='avhrr', frequency=300)
+ overp = Pass("NOAA-19", tstart, tend, orb=self.n19orb, instrument="avhrr", frequency=300)
cov = overp.area_coverage(self.euron1)
- self.assertAlmostEqual(cov, 0.103526, 5)
+ assert cov == pytest.approx(0.103526, 1e-5)
+ def test_swath_coverage_metop(self):
+ """Test ascat and avhrr coverages."""
# ASCAT and AVHRR on Metop-B:
tstart = datetime.strptime("2019-01-02T10:19:39", "%Y-%m-%dT%H:%M:%S")
tend = tstart + timedelta(seconds=180)
- tle1 = '1 38771U 12049A 19002.35527803 .00000000 00000+0 21253-4 0 00017'
- tle2 = '2 38771 98.7284 63.8171 0002025 96.0390 346.4075 14.21477776326431'
+ tle1 = "1 38771U 12049A 19002.35527803 .00000000 00000+0 21253-4 0 00017"
+ tle2 = "2 38771 98.7284 63.8171 0002025 96.0390 346.4075 14.21477776326431"
- mypass = Pass('Metop-B', tstart, tend, instrument='ascat', tle1=tle1, tle2=tle2)
+ mypass = Pass("Metop-B", tstart, tend, instrument="ascat", tle1=tle1, tle2=tle2)
cov = mypass.area_coverage(self.euron1)
- self.assertAlmostEqual(cov, 0.322812, 5)
+ assert cov == pytest.approx(0.322812, 1e-5)
- mypass = Pass('Metop-B', tstart, tend, instrument='avhrr', tle1=tle1, tle2=tle2)
+ mypass = Pass("Metop-B", tstart, tend, instrument="avhrr", tle1=tle1, tle2=tle2)
cov = mypass.area_coverage(self.euron1)
- self.assertAlmostEqual(cov, 0.357324, 5)
+ assert cov == pytest.approx(0.357324, 1e-5)
+ def test_swath_coverage_fy3(self):
+ """Test FY3 coverages."""
tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
tend = tstart + timedelta(seconds=60*15.5)
- tle1 = '1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999'
- tle2 = '2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158'
+ tle1 = "1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999"
+ tle2 = "2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158"
- mypass = Pass('FENGYUN 3D', tstart, tend, instrument='mersi2', tle1=tle1, tle2=tle2)
+ mypass = Pass("FENGYUN 3D", tstart, tend, instrument="mersi2", tle1=tle1, tle2=tle2, frequency=100)
cov = mypass.area_coverage(self.euron1)
+ assert cov == pytest.approx(0.786836, 1e-5)
- self.assertAlmostEqual(cov, 0.786836, 5)
+ mypass = Pass("FENGYUN 3D", tstart, tend, instrument="mersi-2", tle1=tle1, tle2=tle2, frequency=100)
+ cov = mypass.area_coverage(self.euron1)
+ assert cov == pytest.approx(0.786836, 1e-5)
- def tearDown(self):
- """Clean up"""
- pass
+ def test_arctic_is_not_antarctic(self):
+ """Test that artic and antarctic are not mixed up."""
+ tstart = datetime(2021, 2, 3, 16, 28, 3)
+ tend = datetime(2021, 2, 3, 16, 31, 3)
+ overp = Pass("Metop-B", tstart, tend, orb=self.mborb, instrument="avhrr")
-def suite():
- """The suite for test_satpass
- """
- loader = unittest.TestLoader()
- mysuite = unittest.TestSuite()
- mysuite.addTest(loader.loadTestsFromTestCase(TestSwathBoundary))
- mysuite.addTest(loader.loadTestsFromTestCase(TestPass))
+ cov_south = overp.area_coverage(self.antarctica)
+ cov_north = overp.area_coverage(self.arctica)
- return mysuite
+ assert cov_north == 0
+ assert cov_south != 0
+
+
+class TestPassList:
+ """Tests for the pass list."""
+
+ def test_meos_pass_list(self):
+ """Test generating a meos pass list."""
+ orig = (" 1 20190105 FENGYUN 3D 5907 52.943 01:01:45 n/a 01:17:15 15:30 18.6 107.4 -- "
+ "Undefined(Scheduling not done 1546650105 ) a3d0df0cd289244e2f39f613f229a5cc D")
+
+ tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
+ tend = tstart + timedelta(seconds=60 * 15.5)
+
+ tle1 = "1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999"
+ tle2 = "2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158"
+
+ mypass = Pass("FENGYUN 3D", tstart, tend, instrument="mersi2", tle1=tle1, tle2=tle2)
+ coords = (10.72, 59.942, 0.1)
+ meos_format_str = mypass.print_meos(coords, line_no=1)
+ assert meos_format_str == orig
+
+ mypass = Pass("FENGYUN 3D", tstart, tend, instrument="mersi-2", tle1=tle1, tle2=tle2)
+ coords = (10.72, 59.942, 0.1)
+ meos_format_str = mypass.print_meos(coords, line_no=1)
+ assert meos_format_str == orig
+
+ def test_generate_metno_xml(self):
+ """Test generating a metno xml."""
+ import xml.etree.ElementTree as ET # noqa because defusedxml has no Element, see defusedxml#48
+ root = ET.Element("acquisition-schedule")
+
+ orig = ('')
+
+ tstart = datetime.strptime("2019-01-05T01:01:45", "%Y-%m-%dT%H:%M:%S")
+ tend = tstart + timedelta(seconds=60 * 15.5)
+
+ tle1 = "1 43010U 17072A 18363.54078832 -.00000045 00000-0 -79715-6 0 9999"
+ tle2 = "2 43010 98.6971 300.6571 0001567 143.5989 216.5282 14.19710974 58158"
+
+ mypass = Pass("FENGYUN 3D", tstart, tend, instrument="mersi2", tle1=tle1, tle2=tle2)
+
+ coords = (10.72, 59.942, 0.1)
+ mypass.generate_metno_xml(coords, root)
+
+ # Dictionaries don't have guaranteed ordering in Python 3.7, so convert the strings to sets and compare them
+ res = set(ET.tostring(root).decode("utf-8").split())
+ assert res == set(orig.split())
+
+ def tearDown(self):
+ """Clean up."""
+ pass
diff --git a/trollsched/tests/test_schedule.py b/trollsched/tests/test_schedule.py
index 2c7a0fc..609a1db 100644
--- a/trollsched/tests/test_schedule.py
+++ b/trollsched/tests/test_schedule.py
@@ -21,68 +21,24 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-"""Test the schedule module.
-"""
-
-import numpy as np
+"""Test the schedule module."""
+import os
+import tempfile
from datetime import datetime, timedelta
+from unittest.mock import patch
-from trollsched.schedule import fermia, fermib, conflicting_passes
-from trollsched.schedule import parse_datetime, build_filename
-from pyresample.boundary import AreaBoundary
-from trollsched.satpass import get_next_passes
-from trollsched.satpass import get_aqua_terra_dumps
-from trollsched.satpass import get_metopa_passes
-
-import sys
-if sys.version_info < (2, 7):
- import unittest2 as unittest
-else:
- import unittest
-
-try:
- from unittest.mock import patch
-except ImportError:
- from mock import patch
-
-
-# class TestPass(unittest.TestCase):
-
-# def test_day(self):
-# satellite = "noaa 16"
-# tle1 = "1 26536U 00055A 13076.42963155 .00000201 00000-0 13237-3 0 1369"
-# tle2 = "2 26536 99.0540 128.2392 0010826 39.9070 85.2960 14.12848373643614"
-# orb = Orbital(satellite, line1=tle1, line2=tle2)
-# tstart = datetime(2013, 3, 18, 8, 15, 22, 352000)
-# tup = datetime(2013, 3, 18, 8, 22, 52, 352000)
-# tend = datetime(2013, 3, 18, 8, 30, 22, 352000)
-# overp = Pass(satellite, tstart, tend, orb, tup)
-
-# a little night
-
-# day = overp.day()
-
-# self.assertEquals(0.99735685408290298, day)
-
-# on the area of interest there is no night
-
-# area_of_interest = get_area_def("euron1")
-# day = overp.day(area_of_interest)
-# self.assertEquals(1.0, day)
+import pytest
+import yaml
-# tstart = datetime(2013, 3, 18, 8, 16, 22, 352000)
-# overp = Pass(satellite, tstart, tend, orb, tup)
+from trollsched.satpass import get_aqua_terra_dumps, get_metopa_passes, get_next_passes
+from trollsched.schedule import build_filename, conflicting_passes, fermia, fermib, run
-# an entire pass without night
-# day = overp.day()
-# self.assertEquals(1.0, day)
-
-
-class TestTools(unittest.TestCase):
+class TestTools:
+ """Test the tools."""
def test_conflicting_passes(self):
-
+ """Test conflicting passes."""
class MyPass(object):
def __init__(self, rise, fall):
@@ -93,129 +49,81 @@ def __init__(self, rise, fall):
passes = [MyPass(ref_time, ref_time + timedelta(minutes=10)),
MyPass(ref_time + timedelta(minutes=10.01),
ref_time + timedelta(minutes=20))]
- self.assertEquals(
- len(conflicting_passes(passes, timedelta(seconds=0))), 2)
- self.assertEquals(
- len(conflicting_passes(passes, timedelta(seconds=60))), 1)
-
-
-class TestAreaBoundary(unittest.TestCase):
-
- def test_contour(self):
-
- side1_lons = np.arange(4)
- side1_lats = np.arange(4) + 20
- side2_lons = np.arange(4) + 3
- side2_lats = np.arange(4) + 20 + 3
- side3_lons = np.arange(4) + 6
- side3_lats = np.arange(4) + 20 + 6
- side4_lons = np.arange(4) + 9
- side4_lats = np.arange(4) + 20 + 9
-
- bond = AreaBoundary((side1_lons, side1_lats),
- (side2_lons, side2_lats),
- (side3_lons, side3_lats),
- (side4_lons, side4_lats))
-
- lons, lats = bond.contour()
- self.assertTrue(np.allclose(lons, np.arange(12)))
- self.assertTrue(np.allclose(lats, np.arange(12) + 20))
-
- def test_decimate(self):
-
- side1_lons = np.arange(8)
- side1_lats = np.arange(8) + 30
- side2_lons = np.arange(8) + 7
- side2_lats = np.arange(8) + 30 + 7
- side3_lons = np.arange(8) + 14
- side3_lats = np.arange(8) + 30 + 14
- side4_lons = np.arange(8) + 21
- side4_lats = np.arange(8) + 30 + 21
+ assert len(conflicting_passes(passes, timedelta(seconds=0))) == 2
+ assert len(conflicting_passes(passes, timedelta(seconds=60))) == 1
- bond = AreaBoundary((side1_lons, side1_lats),
- (side2_lons, side2_lats),
- (side3_lons, side3_lats),
- (side4_lons, side4_lats))
- bond.decimate(5)
- lons, lats = bond.contour()
-
- self.assertTrue(np.allclose(lons,
- np.array([0, 1, 6, 7, 8,
- 13, 14, 15, 20, 21, 22, 27])))
- self.assertTrue(np.allclose(lats,
- np.array([30, 31, 36, 37, 38, 43, 44, 45,
- 50, 51, 52, 57])))
-
-
-class TestUtils(unittest.TestCase):
+class TestUtils:
+ """Test class for utilities."""
def test_fermi(self):
- self.assertEquals(fermia(0.25), 0.875)
- self.assertEquals(fermib(0.25), 0.5)
-
- def test_parse_datetime(self):
-
- dtobj = parse_datetime('20190104110059')
- self.assertEqual(dtobj, datetime(2019, 1, 4, 11, 0, 59))
+ """Test the fermi formula."""
+ assert fermia(0.25) == 0.875
+ assert fermib(0.25) == 0.5
def test_build_filename(self):
-
+ """Test building filename."""
+ tempdir = tempfile.gettempdir()
pattern_name = "dir_output"
- pattern_dict = {'file_xml': '{dir_output}/{date}-{time}-aquisition-schedule-{mode}-{station}.xml', 'file_sci': '{dir_output}/scisys-schedule-{station}.txt',
- 'dir_plots': '{dir_output}/plots.{station}', 'dir_output': '/tmp', 'file_graph': '{dir_output}/graph.{station}'}
- kwargs = {'date': '20190104', 'output_dir': '.', 'dir_output': '/tmp', 'time': '122023'}
+ pattern_dict = {"file_xml": os.path.join("{dir_output}",
+ "{date}-{time}-aquisition-schedule-{mode}-{station}.xml"),
+ "file_sci": os.path.join("{dir_output}", "scisys-schedule-{station}.txt"),
+ "dir_plots": os.path.join("{dir_output}", "plots.{station}"), "dir_output": tempdir,
+ "file_graph": os.path.join("{dir_output}", "graph.{station}")}
+ kwargs = {"date": "20190104", "output_dir": ".", "dir_output": tempdir, "time": "122023"}
res = build_filename(pattern_name, pattern_dict, kwargs)
- self.assertEqual(res, '/tmp')
+ assert res == tempdir
pattern_name = "file_xml"
- kwargs = {'station': 'nrk', 'mode': 'request', 'time': '125334',
- 'date': '20190104', 'dir_output': '/tmp', 'output_dir': '.'}
+ kwargs = {"station": "nrk", "mode": "request", "time": "125334",
+ "date": "20190104", "dir_output": tempdir, "output_dir": "."}
res = build_filename(pattern_name, pattern_dict, kwargs)
- self.assertEqual(res, '/tmp/20190104-125334-aquisition-schedule-request-nrk.xml')
+ assert res == os.path.join(tempdir, "20190104-125334-aquisition-schedule-request-nrk.xml")
-class TestAll(unittest.TestCase):
+class TestAll:
+ """The test class."""
- def setUp(self):
- """Set up"""
+ def setup_method(self):
+ """Set up."""
from pyorbital import orbital
+
from trollsched.schedule import Satellite
self.utctime = datetime(2018, 11, 28, 10, 0)
self.satellites = ["noaa-20", ]
- self.tles = {'noaa-20': {}}
- self.tles['noaa-20']['line1'] = "1 43013U 17073A 18331.00000000 .00000048 00000-0 22749-4 0 3056"
- self.tles['noaa-20']['line2'] = "2 43013 098.7413 267.0121 0001419 108.5818 058.1314 14.19552981053016"
+ self.tles = {"noaa-20": {}}
+ self.tles["noaa-20"]["line1"] = "1 43013U 17073A 18331.00000000 .00000048 00000-0 22749-4 0 3056"
+ self.tles["noaa-20"]["line2"] = "2 43013 098.7413 267.0121 0001419 108.5818 058.1314 14.19552981053016"
self.aquas = ["aqua", ]
self.terras = ["terra", ]
- self.terra = Satellite('terra', 0, 0)
- self.metopa = Satellite('metop-a', 0, 0)
-
- self.tles['aqua'] = {}
- self.tles['aqua']['line1'] = "1 27424U 02022A 18332.21220389 .00000093 00000-0 30754-4 0 9994"
- self.tles['aqua']['line2'] = "2 27424 98.2121 270.9368 0001045 343.9225 155.8703 14.57111538881313"
- self.tles['terra'] = {}
- self.tles['terra']['line1'] = "1 25994U 99068A 18338.20920286 .00000076 00000-0 26867-4 0 9999"
- self.tles['terra']['line2'] = "2 25994 98.2142 50.5750 0000577 102.5211 257.6060 14.57132862 8586"
- self.tles['metop-a'] = {}
- self.tles['metop-a']['line1'] = "1 29499U 06044A 18338.30873671 .00000000 00000+0 31223-4 0 00013"
- self.tles['metop-a']['line2'] = "2 29499 98.6045 31.7725 0001942 91.8780 346.4884 14.21536046629175"
-
- self.orb = orbital.Orbital('NOAA 20',
- line1=self.tles['noaa-20']['line1'],
- line2=self.tles['noaa-20']['line2'])
- self.aqua_orb = orbital.Orbital('AQUA',
- line1=self.tles['aqua']['line1'],
- line2=self.tles['aqua']['line2'])
- self.terra_orb = orbital.Orbital('TERRA',
- line1=self.tles['terra']['line1'],
- line2=self.tles['terra']['line2'])
- self.metopa_orb = orbital.Orbital('Metop-A',
- line1=self.tles['metop-a']['line1'],
- line2=self.tles['metop-a']['line2'])
+ self.terra = Satellite("terra", 0, 0)
+ self.metopa = Satellite("metop-a", 0, 0)
+
+ self.tles["aqua"] = {}
+ self.tles["aqua"]["line1"] = "1 27424U 02022A 18332.21220389 .00000093 00000-0 30754-4 0 9994"
+ self.tles["aqua"]["line2"] = "2 27424 98.2121 270.9368 0001045 343.9225 155.8703 14.57111538881313"
+ self.tles["terra"] = {}
+ self.tles["terra"]["line1"] = "1 25994U 99068A 18338.20920286 .00000076 00000-0 26867-4 0 9999"
+ self.tles["terra"]["line2"] = "2 25994 98.2142 50.5750 0000577 102.5211 257.6060 14.57132862 8586"
+ self.tles["metop-a"] = {}
+ self.tles["metop-a"]["line1"] = "1 29499U 06044A 18338.30873671 .00000000 00000+0 31223-4 0 00013"
+ self.tles["metop-a"]["line2"] = "2 29499 98.6045 31.7725 0001942 91.8780 346.4884 14.21536046629175"
+
+ self.orb = orbital.Orbital("NOAA 20",
+ line1=self.tles["noaa-20"]["line1"],
+ line2=self.tles["noaa-20"]["line2"])
+ self.aqua_orb = orbital.Orbital("AQUA",
+ line1=self.tles["aqua"]["line1"],
+ line2=self.tles["aqua"]["line2"])
+ self.terra_orb = orbital.Orbital("TERRA",
+ line1=self.tles["terra"]["line1"],
+ line2=self.tles["terra"]["line2"])
+ self.metopa_orb = orbital.Orbital("Metop-A",
+ line1=self.tles["metop-a"]["line1"],
+ line2=self.tles["metop-a"]["line2"])
# These values were used to generate the get_next_passes list mock:
# utctime = datetime(2018, 12, 4, 9, 0)
@@ -235,110 +143,105 @@ def setUp(self):
datetime(2018, 12, 4, 14, 13, 11, 247497))]
self.dumpdata = [
- {'los': datetime(2018, 11, 28, 10, 0, 30), 'station': 'USAK05',
- 'aos': datetime(2018, 11, 28, 9, 50, 24), 'elev': '11.188'},
- {'los': datetime(2018, 11, 28, 11, 39, 47), 'station': 'AS2',
- 'aos': datetime(2018, 11, 28, 11, 28, 51), 'elev': '39.235'},
- {'los': datetime(2018, 11, 28, 13, 19, 8), 'station': 'USAK05',
- 'aos': datetime(2018, 11, 28, 13, 6, 36), 'elev': '58.249'},
- {'los': datetime(2018, 11, 28, 14, 54, 25), 'station': 'AS2',
- 'aos': datetime(2018, 11, 28, 14, 44, 37), 'elev': '22.403'},
- {'los': datetime(2018, 11, 28, 16, 27, 22), 'station': 'SG1',
- 'aos': datetime(2018, 11, 28, 16, 16, 58), 'elev': '9.521'}
+ {"los": datetime(2018, 11, 28, 10, 0, 30), "station": "USAK05",
+ "aos": datetime(2018, 11, 28, 9, 50, 24), "elev": "11.188"},
+ {"los": datetime(2018, 11, 28, 11, 39, 47), "station": "AS2",
+ "aos": datetime(2018, 11, 28, 11, 28, 51), "elev": "39.235"},
+ {"los": datetime(2018, 11, 28, 13, 19, 8), "station": "USAK05",
+ "aos": datetime(2018, 11, 28, 13, 6, 36), "elev": "58.249"},
+ {"los": datetime(2018, 11, 28, 14, 54, 25), "station": "AS2",
+ "aos": datetime(2018, 11, 28, 14, 44, 37), "elev": "22.403"},
+ {"los": datetime(2018, 11, 28, 16, 27, 22), "station": "SG1",
+ "aos": datetime(2018, 11, 28, 16, 16, 58), "elev": "9.521"}
]
- self.dumpdata_terra = [{'los': datetime(2018, 11, 20, 23, 24, 41), 'station': 'SG2',
- 'aos': datetime(2018, 11, 20, 23, 12, 32), 'elev': '17.4526'},
- {'los': datetime(2018, 11, 22, 23, 19, 21), 'station': 'AS3',
- 'aos': datetime(2018, 11, 22, 23, 8, 55), 'elev': '28.9558'},
- {'los': datetime(2018, 11, 22, 23, 19, 21), 'station': 'AS3',
- 'aos': datetime(2018, 11, 22, 23, 8, 55), 'elev': '28.9558'},
- {'los': datetime(2018, 11, 26, 22, 47, 34), 'station': 'SG1',
- 'aos': datetime(2018, 11, 26, 22, 34, 58), 'elev': '21.5694'},
- {'los': datetime(2018, 11, 26, 22, 47, 34), 'station': 'SG1',
- 'aos': datetime(2018, 11, 26, 22, 34, 58), 'elev': '21.5694'},
- {'los': datetime(2018, 11, 26, 22, 47, 34), 'station': 'SG1',
- 'aos': datetime(2018, 11, 26, 22, 34, 58), 'elev': '21.5694'},
- {'los': datetime(2018, 11, 27, 23, 30, 44), 'station': 'SG2',
- 'aos': datetime(2018, 11, 27, 23, 18, 39), 'elev': '16.8795'},
- {'los': datetime(2018, 11, 27, 23, 30, 44), 'station': 'SG2',
- 'aos': datetime(2018, 11, 27, 23, 18, 39), 'elev': '16.8795'},
- {'los': datetime(2018, 11, 28, 22, 43, 53), 'station': 'USAK05',
- 'aos': datetime(2018, 11, 28, 22, 31, 57), 'elev': '40.9264'},
- {'los': datetime(2018, 11, 28, 22, 43, 53), 'station': 'USAK05',
- 'aos': datetime(2018, 11, 28, 22, 31, 57), 'elev': '40.9264'},
- {'los': datetime(2018, 11, 29, 23, 25, 11), 'station': 'USAK05',
- 'aos': datetime(2018, 11, 29, 23, 14, 47), 'elev': '26.9937'},
- {'los': datetime(2018, 11, 29, 23, 25, 11), 'station': 'USAK05',
- 'aos': datetime(2018, 11, 29, 23, 14, 47), 'elev': '26.9937'},
- {'los': datetime(2018, 11, 30, 22, 31, 3), 'station': 'AS2',
- 'aos': datetime(2018, 11, 30, 22, 19, 48), 'elev': '47.8599'},
- {'los': datetime(2018, 12, 1, 1, 29, 2), 'station': 'WG1',
- 'aos': datetime(2018, 12, 1, 1, 21, 11), 'elev': '8.0543'},
- {'los': datetime(2018, 11, 30, 22, 31, 3), 'station': 'AS2',
- 'aos': datetime(2018, 11, 30, 22, 19, 48), 'elev': '47.8599'},
- {'los': datetime(2018, 12, 1, 1, 29, 2), 'station': 'WG1',
- 'aos': datetime(2018, 12, 1, 1, 21, 11), 'elev': '8.0543'},
- {'los': datetime(2018, 12, 3, 1, 28, 14), 'station': 'SG2',
- 'aos': datetime(2018, 12, 3, 1, 17, 53), 'elev': '9.2428'},
- {'los': datetime(2018, 12, 3, 22, 53, 35), 'station': 'SG1',
- 'aos': datetime(2018, 12, 3, 22, 41, 5), 'elev': '20.8371'},
- {'los': datetime(2018, 12, 3, 22, 53, 35), 'station': 'SG1',
- 'aos': datetime(2018, 12, 3, 22, 41, 5), 'elev': '20.8371'},
- {'los': datetime(2018, 12, 4, 23, 43, 5), 'station': 'AS2',
- 'aos': datetime(2018, 12, 4, 23, 33, 8), 'elev': '23.546'}]
-
- @patch('os.path.exists')
+ self.dumpdata_terra = [{"los": datetime(2018, 11, 20, 23, 24, 41), "station": "SG2",
+ "aos": datetime(2018, 11, 20, 23, 12, 32), "elev": "17.4526"},
+ {"los": datetime(2018, 11, 22, 23, 19, 21), "station": "AS3",
+ "aos": datetime(2018, 11, 22, 23, 8, 55), "elev": "28.9558"},
+ {"los": datetime(2018, 11, 22, 23, 19, 21), "station": "AS3",
+ "aos": datetime(2018, 11, 22, 23, 8, 55), "elev": "28.9558"},
+ {"los": datetime(2018, 11, 26, 22, 47, 34), "station": "SG1",
+ "aos": datetime(2018, 11, 26, 22, 34, 58), "elev": "21.5694"},
+ {"los": datetime(2018, 11, 26, 22, 47, 34), "station": "SG1",
+ "aos": datetime(2018, 11, 26, 22, 34, 58), "elev": "21.5694"},
+ {"los": datetime(2018, 11, 26, 22, 47, 34), "station": "SG1",
+ "aos": datetime(2018, 11, 26, 22, 34, 58), "elev": "21.5694"},
+ {"los": datetime(2018, 11, 27, 23, 30, 44), "station": "SG2",
+ "aos": datetime(2018, 11, 27, 23, 18, 39), "elev": "16.8795"},
+ {"los": datetime(2018, 11, 27, 23, 30, 44), "station": "SG2",
+ "aos": datetime(2018, 11, 27, 23, 18, 39), "elev": "16.8795"},
+ {"los": datetime(2018, 11, 28, 22, 43, 53), "station": "USAK05",
+ "aos": datetime(2018, 11, 28, 22, 31, 57), "elev": "40.9264"},
+ {"los": datetime(2018, 11, 28, 22, 43, 53), "station": "USAK05",
+ "aos": datetime(2018, 11, 28, 22, 31, 57), "elev": "40.9264"},
+ {"los": datetime(2018, 11, 29, 23, 25, 11), "station": "USAK05",
+ "aos": datetime(2018, 11, 29, 23, 14, 47), "elev": "26.9937"},
+ {"los": datetime(2018, 11, 29, 23, 25, 11), "station": "USAK05",
+ "aos": datetime(2018, 11, 29, 23, 14, 47), "elev": "26.9937"},
+ {"los": datetime(2018, 11, 30, 22, 31, 3), "station": "AS2",
+ "aos": datetime(2018, 11, 30, 22, 19, 48), "elev": "47.8599"},
+ {"los": datetime(2018, 12, 1, 1, 29, 2), "station": "WG1",
+ "aos": datetime(2018, 12, 1, 1, 21, 11), "elev": "8.0543"},
+ {"los": datetime(2018, 11, 30, 22, 31, 3), "station": "AS2",
+ "aos": datetime(2018, 11, 30, 22, 19, 48), "elev": "47.8599"},
+ {"los": datetime(2018, 12, 1, 1, 29, 2), "station": "WG1",
+ "aos": datetime(2018, 12, 1, 1, 21, 11), "elev": "8.0543"},
+ {"los": datetime(2018, 12, 3, 1, 28, 14), "station": "SG2",
+ "aos": datetime(2018, 12, 3, 1, 17, 53), "elev": "9.2428"},
+ {"los": datetime(2018, 12, 3, 22, 53, 35), "station": "SG1",
+ "aos": datetime(2018, 12, 3, 22, 41, 5), "elev": "20.8371"},
+ {"los": datetime(2018, 12, 3, 22, 53, 35), "station": "SG1",
+ "aos": datetime(2018, 12, 3, 22, 41, 5), "elev": "20.8371"},
+ {"los": datetime(2018, 12, 4, 23, 43, 5), "station": "AS2",
+ "aos": datetime(2018, 12, 4, 23, 33, 8), "elev": "23.546"}]
+
+ @patch("os.path.exists")
def test_get_next_passes_viirs(self, exists):
-
+ """Test getting the next viirs passes."""
exists.return_code = True
# mymock:
- with patch('pyorbital.orbital.Orbital') as mymock:
+ with patch("pyorbital.orbital.Orbital") as mymock:
instance = mymock.return_value
instance.get_next_passes = self.orb.get_next_passes
allpasses = get_next_passes(self.satellites, self.utctime,
- 4, (16, 58, 0), tle_file='nonexisting')
-
- self.assertEqual(len(allpasses), 2)
+ 4, (16, 58, 0), tle_file="nonexisting")
- n20pass1 = allpasses.pop()
+ assert len(allpasses) == 2
rt1 = datetime(2018, 11, 28, 10, 53, 42, 79483)
ft1 = datetime(2018, 11, 28, 11, 9, 6, 916787)
rt2 = datetime(2018, 11, 28, 12, 34, 44, 667963)
ft2 = datetime(2018, 11, 28, 12, 49, 25, 134067)
- dt_ = n20pass1.risetime - rt1
- self.assertAlmostEqual(dt_.seconds, 0)
+ rise_times = [p.risetime for p in allpasses]
+ fall_times = [p.falltime for p in allpasses]
- dt_ = n20pass1.falltime - ft1
- self.assertAlmostEqual(dt_.seconds, 0)
+ assert rt1 in rise_times
+ assert rt2 in rise_times
+ assert ft1 in fall_times
+ assert ft2 in fall_times
- n20pass2 = allpasses.pop()
+ assert all([p.instrument == "viirs" for p in allpasses])
- dt_ = n20pass2.risetime - rt2
- self.assertAlmostEqual(dt_.seconds, 0)
-
- dt_ = n20pass2.falltime - ft2
- self.assertAlmostEqual(dt_.seconds, 0)
-
- @patch('os.path.exists')
- @patch('trollsched.satpass.get_aqua_terra_dumpdata_from_ftp')
+ @patch("os.path.exists")
+ @patch("trollsched.satpass.get_aqua_terra_dumpdata_from_ftp")
def test_get_next_passes_with_aquadumps(self, dumps_from_ftp, exists):
+ """Test getting the passes with dumps."""
dumps_from_ftp.return_value = self.dumpdata
exists.return_code = True
# mymock:
- with patch('pyorbital.orbital.Orbital') as mymock:
+ with patch("pyorbital.orbital.Orbital") as mymock:
instance = mymock.return_value
instance.get_next_passes = self.aqua_orb.get_next_passes
allpasses = get_next_passes(self.aquas, self.utctime,
- 6, (16, 58, 0), tle_file='nonexisting',
+ 6, (16, 58, 0), tle_file="nonexisting",
aqua_terra_dumps=True)
- self.assertEqual(len(allpasses), 3)
+ assert len(allpasses) == 3
rt1 = datetime(2018, 11, 28, 11, 12, 8, 728455)
ft1 = datetime(2018, 11, 28, 11, 26, 8, 250021)
@@ -354,7 +257,7 @@ def test_get_next_passes_with_aquadumps(self, dumps_from_ftp, exists):
if dt_ < dtmin:
dtmin = dt_
- self.assertAlmostEqual(dtmin.seconds, 0)
+ assert dtmin.seconds == pytest.approx(0)
dtmin = timedelta(seconds=10000000)
for falltime in [ft1, ft2, ft3]:
@@ -362,14 +265,17 @@ def test_get_next_passes_with_aquadumps(self, dumps_from_ftp, exists):
if dt_ < dtmin:
dtmin = dt_
- self.assertAlmostEqual(dtmin.seconds, 0)
+ assert dtmin.seconds == pytest.approx(0)
+
+ assert mypass.instrument == "modis"
- @patch('trollsched.satpass.get_aqua_terra_dumpdata_from_ftp')
+ @patch("trollsched.satpass.get_aqua_terra_dumpdata_from_ftp")
def test_get_aqua_terra_dumps(self, dumps_from_ftp):
+ """Test getting the EOS dumps."""
dumps_from_ftp.return_value = self.dumpdata_terra
# mymock:
- with patch('pyorbital.orbital.Orbital') as mymock:
+ with patch("pyorbital.orbital.Orbital") as mymock:
instance = mymock.return_value
instance.get_next_passes = self.terra_orb.get_next_passes
@@ -378,49 +284,96 @@ def test_get_aqua_terra_dumps(self, dumps_from_ftp):
self.terra_orb,
self.terra)
- self.assertEqual(len(dumps), 4)
- self.assertEqual(dumps[0].station, 'SG2')
- self.assertEqual(dumps[0].max_elev, '9.2428')
- self.assertEqual(dumps[0].pass_direction(), 'ascending')
- self.assertEqual((dumps[0].risetime - datetime(2018, 12, 3, 1, 17, 53)).seconds, 0)
- self.assertEqual((dumps[0].falltime - datetime(2018, 12, 3, 1, 28, 14)).seconds, 0)
+ assert len(dumps) == 4
+ assert dumps[0].station == "SG2"
+ assert dumps[0].max_elev == "9.2428"
+ assert dumps[0].pass_direction() == "ascending"
+ assert (dumps[0].risetime - datetime(2018, 12, 3, 1, 17, 53)).seconds == 0
+ assert (dumps[0].falltime - datetime(2018, 12, 3, 1, 28, 14)).seconds == 0
- self.assertEqual(dumps[3].station, 'AS2')
- self.assertEqual(dumps[3].max_elev, '23.546')
- self.assertEqual(dumps[3].pass_direction(), 'descending')
- self.assertEqual((dumps[3].risetime - datetime(2018, 12, 4, 23, 33, 8)).seconds, 0)
- self.assertEqual((dumps[3].falltime - datetime(2018, 12, 4, 23, 43, 5)).seconds, 0)
+ assert dumps[3].station == "AS2"
+ assert dumps[3].max_elev == "23.546"
+ assert dumps[3].pass_direction() == "descending"
+ assert (dumps[3].risetime - datetime(2018, 12, 4, 23, 33, 8)).seconds == 0
+ assert (dumps[3].falltime - datetime(2018, 12, 4, 23, 43, 5)).seconds == 0
- @patch('os.path.exists')
+ @patch("os.path.exists")
def test_get_metopa_passes(self, exists):
-
+ """Test getting metopa passes."""
exists.return_code = True
- # mymock:
- with patch('pyorbital.orbital.Orbital') as mymock:
+ with patch("pyorbital.orbital.Orbital") as mymock:
instance = mymock.return_value
instance.get_next_passes = self.metopa_orb.get_next_passes
metopa_passes = get_metopa_passes(self.metopa, self.metopa_passlist, self.metopa_orb)
- self.assertEqual(len(metopa_passes), 2)
- self.assertEqual(metopa_passes[0].pass_direction(), 'descending')
- self.assertEqual(metopa_passes[0].seconds(), 462.466119)
- self.assertEqual((metopa_passes[0].uptime - datetime(2018, 12, 4, 9, 17, 48, 530484)).seconds, 0)
- self.assertEqual((metopa_passes[0].risetime - datetime(2018, 12, 4, 9, 17, 46, 691075)).seconds, 0)
+ assert len(metopa_passes) == 2
+ assert metopa_passes[0].pass_direction() == "descending"
+ assert metopa_passes[0].seconds() == pytest.approx(487.512589, 1e-5)
+ assert (metopa_passes[0].uptime - datetime(2018, 12, 4, 9, 17, 48, 530484)).seconds == 0
+ assert (metopa_passes[0].risetime - datetime(2018, 12, 4, 9, 17, 21, 644605)).seconds == 0
+
+
+euron1 = """euron1:
+ description: Northern Europe - 1km
+ projection:
+ proj: stere
+ ellps: WGS84
+ lat_0: 90.0
+ lon_0: 0.0
+ lat_ts: 60.0
+ shape:
+ height: 3072
+ width: 3072
+ area_extent:
+ lower_left_xy: [-1000000.0, -4500000.0]
+ upper_right_xy: [2072000.0, -1428000.0]
+"""
+
+
+
+def test_pyorbitals_platform_name(tmp_path):
+ """Test that using pyorbital's platform name allows spurious names in the TLE data."""
+ spurious_tle = ("NOAA 20 (JPSS-1)\n"
+ "1 43013U 17073A 24093.57357837 .00000145 00000+0 86604-4 0 9999\n"
+ "2 43013 98.7039 32.7741 0007542 324.8026 35.2652 14.21254587330172\n")
+
+
+ config_file = tmp_path / "config.yaml"
+ tle_file = tmp_path / "test.tle"
+ area_file = tmp_path / "areas.yaml"
+ sched_file = tmp_path / "mysched.xml"
+
+ with open(area_file, "w") as fd:
+ fd.write(euron1)
+
+ with open(tle_file, "w") as fd:
+ fd.write(spurious_tle)
+
- def tearDown(self):
- """Clean up"""
- pass
+ config = dict(default=dict(station=["nrk"],
+ forward=12,
+ start=0,
+ center_id="SMHI"),
+ stations=dict(nrk=dict(name="nrk",
+ longitude=16,
+ latitude=58,
+ altitude=0,
+ satellites=["noaa-20"],
+ area="euron1",
+ area_file=os.fspath(area_file))),
+ pattern=dict(dir_output=os.fspath(tmp_path),
+ file_xml=os.fspath(sched_file)),
+ satellites={"noaa-20": dict(schedule_name="noaa20",
+ international_designator="43013",
+ night=0.4,
+ day=0.9)}
+ )
-def suite():
- """The suite for test_schedule
- """
- loader = unittest.TestLoader()
- mysuite = unittest.TestSuite()
- mysuite.addTest(loader.loadTestsFromTestCase(TestUtils))
- mysuite.addTest(loader.loadTestsFromTestCase(TestAreaBoundary))
- mysuite.addTest(loader.loadTestsFromTestCase(TestTools))
+ with open(config_file, "w") as fd:
+ fd.write(yaml.dump(config))
- return mysuite
+ run(["-c", os.fspath(config_file), "-x", "-t", os.fspath(tle_file)])
+ assert sched_file in tmp_path.iterdir()
diff --git a/trollsched/tests/test_spherical.py b/trollsched/tests/test_spherical.py
deleted file mode 100644
index 997d7ef..0000000
--- a/trollsched/tests/test_spherical.py
+++ /dev/null
@@ -1,711 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright (c) 2013, 2014, 2015, 2018 Martin Raspaud
-
-# Author(s):
-
-# Martin Raspaud
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see .
-
-"""Test cases for spherical geometry.
-"""
-
-from trollsched.spherical import SphPolygon, Arc, SCoordinate, CCoordinate
-import unittest
-import numpy as np
-
-
-class TestSCoordinate(unittest.TestCase):
-
- """Test SCoordinates.
- """
-
- def test_distance(self):
- """Test Vincenty formula
- """
- d = SCoordinate(0, 0).distance(SCoordinate(1, 1))
- self.assertEquals(d, 1.2745557823062943)
-
- def test_hdistance(self):
- """Test Haversine formula
- """
- d = SCoordinate(0, 0).hdistance(SCoordinate(1, 1))
- self.assertTrue(np.allclose(d, 1.2745557823062943))
-
- def test_str(self):
- """Check the string representation
- """
- d = SCoordinate(0, 0)
- self.assertEqual(str(d), "(0.0, 0.0)")
-
- def test_repr(self):
- """Check the representation
- """
- d = SCoordinate(0, 0)
- self.assertEqual(repr(d), "(0.0, 0.0)")
-
-
-class TestCCoordinate(unittest.TestCase):
-
- """Test SCoordinates.
- """
-
- def test_str(self):
- """Check the string representation
- """
- d = CCoordinate((0, 0, 0))
- self.assertEqual(str(d), "[0 0 0]")
-
- def test_repr(self):
- """Check the representation
- """
- d = CCoordinate((0, 0, 0))
- self.assertEqual(repr(d), "[0 0 0]")
-
- def test_norm(self):
- """Euclidean norm of a cartesian vector
- """
- d = CCoordinate((1, 0, 0))
- self.assertEqual(d.norm(), 1.0)
-
- def test_normalize(self):
- """Normalize a cartesian vector
- """
- d = CCoordinate((2., 0., 0.))
- self.assertTrue(np.allclose(d.normalize().cart, [1, 0, 0]))
-
- def test_cross(self):
- """Test cross product in cartesian coordinates
- """
- d = CCoordinate((1., 0., 0.))
- c = CCoordinate((0., 1., 0.))
- self.assertTrue(np.allclose(d.cross(c).cart, [0., 0., 1.]))
-
- def test_dot(self):
- """Test the dot product of two cartesian vectors.
- """
- d = CCoordinate((1., 0., 0.))
- c = CCoordinate((0., 1., 0.))
- self.assertEqual(d.dot(c), 0)
-
- def test_ne(self):
- """Test inequality of two cartesian vectors.
- """
- d = CCoordinate((1., 0., 0.))
- c = CCoordinate((0., 1., 0.))
- self.assertTrue(c != d)
-
- def test_eq(self):
- """Test equality of two cartesian vectors.
- """
- d = CCoordinate((1., 0., 0.))
- c = CCoordinate((0., 1., 0.))
- self.assertFalse(c == d)
-
- def test_add(self):
- """Test adding cartesian vectors.
- """
- d = CCoordinate((1., 0., 0.))
- c = CCoordinate((0., 1., 0.))
- b = CCoordinate((1., 1., 0.))
- self.assertTrue(np.allclose((d + c).cart, b.cart))
-
- self.assertTrue(np.allclose((d + (0, 1, 0)).cart, b.cart))
-
- self.assertTrue(np.allclose(((0, 1, 0) + d).cart, b.cart))
-
- def test_mul(self):
- """Test multiplying (element-wise) cartesian vectors.
- """
- d = CCoordinate((1., 0., 0.))
- c = CCoordinate((0., 1., 0.))
- b = CCoordinate((0., 0., 0.))
- self.assertTrue(np.allclose((d * c).cart, b.cart))
- self.assertTrue(np.allclose((d * (0, 1, 0)).cart, b.cart))
-
- self.assertTrue(np.allclose(((0, 1, 0) * d).cart, b.cart))
-
- def test_to_spherical(self):
- """Test converting to spherical coordinates.
- """
- d = CCoordinate((1., 0., 0.))
- c = SCoordinate(0, 0)
- self.assertEqual(d.to_spherical(), c)
-
-
-class TestArc(unittest.TestCase):
-
- """Test arcs
- """
-
- def test_eq(self):
- arc1 = Arc(SCoordinate(0, 0),
- SCoordinate(np.deg2rad(10), np.deg2rad(10)))
- arc2 = Arc(SCoordinate(0, np.deg2rad(10)),
- SCoordinate(np.deg2rad(10), 0))
-
- self.assertFalse(arc1 == arc2)
-
- self.assertTrue(arc1 == arc1)
-
- def test_ne(self):
- arc1 = Arc(SCoordinate(0, 0),
- SCoordinate(np.deg2rad(10), np.deg2rad(10)))
- arc2 = Arc(SCoordinate(0, np.deg2rad(10)),
- SCoordinate(np.deg2rad(10), 0))
-
- self.assertTrue(arc1 != arc2)
-
- self.assertFalse(arc1 != arc1)
-
- def test_str(self):
- arc1 = Arc(SCoordinate(0, 0),
- SCoordinate(np.deg2rad(10), np.deg2rad(10)))
- self.assertEqual(str(arc1), str(arc1.start) + " -> " + str(arc1.end))
- self.assertEqual(repr(arc1), str(arc1.start) + " -> " + str(arc1.end))
-
- def test_intersection(self):
- arc1 = Arc(SCoordinate(0, 0),
- SCoordinate(np.deg2rad(10), np.deg2rad(10)))
- arc2 = Arc(SCoordinate(0, np.deg2rad(10)),
- SCoordinate(np.deg2rad(10), 0))
- lon, lat = arc1.intersection(arc2)
-
- self.assertTrue(np.allclose(np.rad2deg(lon), 5))
- self.assertEquals(np.rad2deg(lat), 5.0575148968282093)
-
- arc1 = Arc(SCoordinate(0, 0),
- SCoordinate(np.deg2rad(10), np.deg2rad(10)))
-
- self.assertTrue(arc1.intersection(arc1) is None)
-
- arc1 = Arc(SCoordinate(np.deg2rad(24.341215776575297),
- np.deg2rad(44.987819588259327)),
- SCoordinate(np.deg2rad(18.842727517611817),
- np.deg2rad(46.512483610284178)))
- arc2 = Arc(SCoordinate(np.deg2rad(20.165961750361905),
- np.deg2rad(46.177305385810541)),
- SCoordinate(np.deg2rad(20.253297585831707),
- np.deg2rad(50.935830837274324)))
- inter = SCoordinate(np.deg2rad(20.165957021925202),
- np.deg2rad(46.177022633103398))
- self.assertEquals(arc1.intersection(arc2), inter)
-
- arc1 = Arc(SCoordinate(np.deg2rad(-2.4982818108326734),
- np.deg2rad(48.596644847869655)),
- SCoordinate(np.deg2rad(-2.9571441235622835),
- np.deg2rad(49.165688435261394)))
- arc2 = Arc(SCoordinate(np.deg2rad(-3.4976667413531688),
- np.deg2rad(48.562704872921373)),
- SCoordinate(np.deg2rad(-5.893976312685715),
- np.deg2rad(48.445795283217116)))
-
- self.assertTrue(arc1.intersection(arc2) is None)
-
- def test_angle(self):
- arc1 = Arc(SCoordinate(np.deg2rad(157.5),
- np.deg2rad(89.234600944314138)),
- SCoordinate(np.deg2rad(90),
- np.deg2rad(89)))
- arc2 = Arc(SCoordinate(np.deg2rad(157.5),
- np.deg2rad(89.234600944314138)),
- SCoordinate(np.deg2rad(135),
- np.deg2rad(89)))
-
- self.assertAlmostEqual(np.rad2deg(arc1.angle(arc2)), -44.996385007218926, 13)
-
- arc1 = Arc(SCoordinate(np.deg2rad(112.5),
- np.deg2rad(89.234600944314138)),
- SCoordinate(np.deg2rad(90), np.deg2rad(89)))
- arc2 = Arc(SCoordinate(np.deg2rad(112.5),
- np.deg2rad(89.234600944314138)),
- SCoordinate(np.deg2rad(45), np.deg2rad(89)))
-
- self.assertAlmostEqual(np.rad2deg(arc1.angle(arc2)), 44.996385007218883, 13)
-
- arc1 = Arc(SCoordinate(0, 0), SCoordinate(1, 0))
- self.assertEqual(arc1.angle(arc1), 0)
-
- arc2 = Arc(SCoordinate(1, 0), SCoordinate(0, 0))
- self.assertEqual(arc1.angle(arc2), 0)
-
- arc2 = Arc(SCoordinate(0, 0), SCoordinate(-1, 0))
- self.assertEqual(arc1.angle(arc2), np.pi)
-
- arc2 = Arc(SCoordinate(2, 0), SCoordinate(1, 0))
- self.assertEqual(arc1.angle(arc2), np.pi)
-
- arc2 = Arc(SCoordinate(2, 0), SCoordinate(3, 0))
- self.assertRaises(ValueError, arc1.angle, arc2)
-
-
-class TestSphericalPolygon(unittest.TestCase):
-
- """Test the spherical polygon.
- """
-
- def test_area(self):
- """Test the area function
- """
- vertices = np.array([[1, 2, 3, 4, 3, 2],
- [3, 4, 3, 2, 1, 2]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.00121732523118, polygon.area())
-
- vertices = np.array([[1, 2, 3, 2],
- [3, 4, 3, 2]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.000608430665842, polygon.area())
-
- vertices = np.array([[0, 0, 1, 1],
- [0, 1, 1, 0]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.000304609684862, polygon.area())
-
- # Across the dateline
-
- vertices = np.array([[179.5, -179.5, -179.5, 179.5],
- [1, 1, 0, 0]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.000304609684862, polygon.area())
-
- vertices = np.array([[0, 90, 90, 0],
- [1, 1, 0, 0]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.0349012696772, polygon.area())
-
- vertices = np.array([[90, 0, 0],
- [0, 0, 90]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(np.pi / 2, polygon.area())
-
- # Around the north pole
-
- vertices = np.array([[0, -90, 180, 90],
- [89, 89, 89, 89]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.000609265770322, polygon.area())
-
- # Around the south pole
-
- vertices = np.array([[0, 90, 180, -90],
- [-89, -89, -89, -89]]).T
- polygon = SphPolygon(np.deg2rad(vertices))
-
- self.assertAlmostEqual(0.000609265770322, polygon.area())
-
- def test_is_inside(self):
- """Test checking if a polygon is inside of another.
- """
-
- vertices = np.array([[1, 1, 20, 20],
- [1, 20, 20, 1]]).T
-
- polygon1 = SphPolygon(np.deg2rad(vertices))
-
- vertices = np.array([[0, 0, 30, 30],
- [0, 30, 30, 0]]).T
-
- polygon2 = SphPolygon(np.deg2rad(vertices))
-
- self.assertTrue(polygon1._is_inside(polygon2))
- self.assertFalse(polygon2._is_inside(polygon1))
- self.assertTrue(polygon2.area() > polygon1.area())
-
- polygon2.invert()
- self.assertFalse(polygon1._is_inside(polygon2))
- self.assertFalse(polygon2._is_inside(polygon1))
-
- vertices = np.array([[0, 0, 30, 30],
- [21, 30, 30, 21]]).T
-
- polygon2 = SphPolygon(np.deg2rad(vertices))
- self.assertFalse(polygon1._is_inside(polygon2))
- self.assertFalse(polygon2._is_inside(polygon1))
-
- polygon2.invert()
-
- self.assertTrue(polygon1._is_inside(polygon2))
- self.assertFalse(polygon2._is_inside(polygon1))
-
- vertices = np.array([[100, 100, 130, 130],
- [41, 50, 50, 41]]).T
-
- polygon2 = SphPolygon(np.deg2rad(vertices))
-
- self.assertFalse(polygon1._is_inside(polygon2))
- self.assertFalse(polygon2._is_inside(polygon1))
-
- polygon2.invert()
-
- self.assertTrue(polygon1._is_inside(polygon2))
- self.assertFalse(polygon2._is_inside(polygon1))
-
- vertices = np.array([[-1.54009253, 82.62402855],
- [3.4804808, 82.8105746],
- [20.7214892, 83.00875812],
- [32.8857629, 82.7607758],
- [41.53844302, 82.36024339],
- [47.92062759, 81.91317164],
- [52.82785062, 81.45769791],
- [56.75107895, 81.00613046],
- [59.99843787, 80.56042986],
- [62.76998034, 80.11814453],
- [65.20076209, 79.67471372],
- [67.38577498, 79.22428],
- [69.39480149, 78.75981318],
- [71.28163984, 78.27283234],
- [73.09016378, 77.75277976],
- [74.85864685, 77.18594725],
- [76.62327682, 76.55367303],
- [78.42162204, 75.82918893],
- [80.29698409, 74.97171721],
- [82.30538638, 73.9143231],
- [84.52973107, 72.53535661],
- [87.11696138, 70.57600156],
- [87.79163209, 69.98712409],
- [72.98142447, 67.1760143],
- [61.79517279, 63.2846272],
- [53.50600609, 58.7098766],
- [47.26725347, 53.70533139],
- [42.44083259, 48.42199571],
- [38.59682041, 42.95008531],
- [35.45189206, 37.3452509],
- [32.43435578, 30.72373327],
- [31.73750748, 30.89485287],
- [29.37284023, 31.44344415],
- [27.66001308, 31.81016309],
- [26.31358296, 32.08057499],
- [25.1963477, 32.29313986],
- [24.23118049, 32.46821821],
- [23.36993508, 32.61780082],
- [22.57998837, 32.74952569],
- [21.8375532, 32.86857867],
- [21.12396693, 32.97868717],
- [20.42339605, 33.08268331],
- [19.72121983, 33.18284728],
- [19.00268283, 33.28113306],
- [18.2515215, 33.3793305],
- [17.4482606, 33.47919405],
- [16.56773514, 33.58255576],
- [15.57501961, 33.6914282],
- [14.4180087, 33.8080799],
- [13.01234319, 33.93498577],
- [11.20625437, 34.0742239],
- [8.67990371, 34.22415978],
- [7.89344478, 34.26018768],
- [8.69446485, 41.19823568],
- [9.25707165, 47.17351118],
- [9.66283477, 53.14128114],
- [9.84134875, 59.09937166],
- [9.65054241, 65.04458004],
- [8.7667375, 70.97023122],
- [6.28280904, 76.85731403]])
- polygon1 = SphPolygon(np.deg2rad(vertices))
-
- vertices = np.array([[49.94506701, 46.52610743],
- [51.04293649, 46.52610743],
- [62.02163129, 46.52610743],
- [73.0003261, 46.52610743],
- [83.9790209, 46.52610743],
- [85.05493299, 46.52610743],
- [85.05493299, 45.76549301],
- [85.05493299, 37.58315571],
- [85.05493299, 28.39260587],
- [85.05493299, 18.33178739],
- [85.05493299, 17.30750918],
- [83.95706351, 17.30750918],
- [72.97836871, 17.30750918],
- [61.9996739, 17.30750918],
- [51.0209791, 17.30750918],
- [49.94506701, 17.30750918],
- [49.94506701, 18.35262921],
- [49.94506701, 28.41192025],
- [49.94506701, 37.60055422],
- [49.94506701, 45.78080831]])
- polygon2 = SphPolygon(np.deg2rad(vertices))
-
- self.assertFalse(polygon2._is_inside(polygon1))
- self.assertFalse(polygon1._is_inside(polygon2))
-
- def test_bool(self):
- """Test the intersection and union functions.
- """
- vertices = np.array([[180, 90, 0, -90],
- [89, 89, 89, 89]]).T
- poly1 = SphPolygon(np.deg2rad(vertices))
- vertices = np.array([[-45, -135, 135, 45],
- [89, 89, 89, 89]]).T
- poly2 = SphPolygon(np.deg2rad(vertices))
-
- uni = np.array([[157.5, 89.23460094],
- [-225., 89.],
- [112.5, 89.23460094],
- [90., 89.],
- [67.5, 89.23460094],
- [45., 89.],
- [22.5, 89.23460094],
- [0., 89.],
- [-22.5, 89.23460094],
- [-45., 89.],
- [-67.5, 89.23460094],
- [-90., 89.],
- [-112.5, 89.23460094],
- [-135., 89.],
- [-157.5, 89.23460094],
- [-180., 89.]])
- inter = np.array([[157.5, 89.23460094],
- [112.5, 89.23460094],
- [67.5, 89.23460094],
- [22.5, 89.23460094],
- [-22.5, 89.23460094],
- [-67.5, 89.23460094],
- [-112.5, 89.23460094],
- [-157.5, 89.23460094]])
- poly_inter = poly1.intersection(poly2)
- poly_union = poly1.union(poly2)
-
- self.assertTrue(poly_inter.area() <= poly_union.area())
-
- self.assertTrue(np.allclose(poly_inter.vertices,
- np.deg2rad(inter)))
- self.assertTrue(np.allclose(poly_union.vertices,
- np.deg2rad(uni)))
-
- # Test 2 polygons sharing 2 contiguous edges.
-
- vertices1 = np.array([[-10, 10],
- [-5, 10],
- [0, 10],
- [5, 10],
- [10, 10],
- [10, -10],
- [-10, -10]])
-
- vertices2 = np.array([[-5, 10],
- [0, 10],
- [5, 10],
- [5, -5],
- [-5, -5]])
-
- vertices3 = np.array([[5, 10],
- [5, -5],
- [-5, -5],
- [-5, 10],
- [0, 10]])
-
- poly1 = SphPolygon(np.deg2rad(vertices1))
- poly2 = SphPolygon(np.deg2rad(vertices2))
- poly_inter = poly1.intersection(poly2)
-
- self.assertTrue(np.allclose(poly_inter.vertices,
- np.deg2rad(vertices3)))
-
- # Test when last node of the intersection is the last vertice of the
- # second polygon.
-
- swath_vertices = np.array([[-115.32268301, 66.32946139],
- [-61.48397172, 58.56799254],
- [-60.25004314, 58.00754686],
- [-71.35057076, 49.60229517],
- [-113.746486, 56.03008985]])
- area_vertices = np.array([[-68.32812107, 52.3480829],
- [-67.84993896, 53.07015692],
- [-55.54651296, 64.9254637],
- [-24.63341856, 74.24628796],
- [-31.8996363, 27.99907764],
- [-39.581043, 37.0639821],
- [-50.90185988, 45.56296169],
- [-67.43022017, 52.12399581]])
-
- res = np.array([[-62.77837918, 59.12607053],
- [-61.48397172, 58.56799254],
- [-60.25004314, 58.00754686],
- [-71.35057076, 49.60229517],
- [-113.746486, 56.03008985],
- [-115.32268301, 66.32946139]])
-
- poly1 = SphPolygon(np.deg2rad(swath_vertices))
- poly2 = SphPolygon(np.deg2rad(area_vertices))
-
- poly_inter = poly1.intersection(poly2)
- self.assertTrue(np.allclose(poly_inter.vertices,
- np.deg2rad(res)))
-
- poly_inter = poly2.intersection(poly1)
- self.assertTrue(np.allclose(poly_inter.vertices,
- np.deg2rad(res)))
-
- # vertices = np.array([[ -84.54058691, 71.80094043],
- # [ -74.68557932, 72.16812631],
- # [ -68.06987203, 72.1333064 ],
- # [ -63.17961469, 71.96265 ],
- # [ -59.33392061, 71.73824792],
- # [ -56.16798418, 71.49047832],
- # [ -53.46489053, 71.231076 ],
- # [ -51.08551155, 70.96395329],
- # [ -48.93484325, 70.68929276],
- # [ -46.94415494, 70.40519826],
- # [ -45.06071892, 70.10832093],
- # [ -43.24140861, 69.7939738 ],
- # [ -41.44830671, 69.45591086],
- # [ -39.64527217, 69.08578252],
- # [ -37.79474271, 68.6721527 ],
- # [ -35.85408829, 68.1987858 ],
- # [ -33.7705704 , 67.64156121],
- # [ -31.47314483, 66.9625364 ],
- # [ -28.85703847, 66.09736791],
- # [ -25.74961912, 64.92465312],
- # [ -21.81516555, 63.17261421],
- # [ -18.62398733, 62.28633798],
- # [ -16.93359509, 62.89011263],
- # [ -15.17161807, 63.47161418],
- # [ -13.33621801, 64.02936211],
- # [ -11.42593772, 64.56180886],
- # [ -9.43979715, 65.0673476 ],
- # [ -7.37739816, 65.54432277],
- # [ -5.23903263, 65.99104411],
- # [ -3.02579085, 66.40580433],
- # [ -0.73966571, 66.78690012],
- # [ 1.61635637, 67.13265703],
- # [ 4.03822468, 67.44145758],
- # [ 6.52078043, 67.71177166],
- # [ 9.05775043, 67.94218891],
- # [ 11.64178394, 68.13145134],
- # [ 14.26453542, 68.27848476],
- # [ 16.9167971 , 68.38242749],
- # [ 19.58867724, 68.44265471],
- # [ 22.26981526, 68.45879658],
- # [ 24.94962586, 68.43074943],
- # [ 27.61755654, 68.35867876],
- # [ 30.26334172, 68.24301426],
- # [ 32.87724117, 68.08443684],
- # [ 35.45024798, 67.88385879],
- # [ 37.97425437, 67.64239838],
- # [ 40.44217258, 67.36135027],
- # [ 42.84800609, 67.04215364],
- # [ 45.18687531, 66.68635947],
- # [ 47.45500013, 66.2955988 ],
- # [ 49.64965026, 65.87155246],
- # [ 52.34514841, 66.28428851],
- # [ 56.04377347, 68.57914951],
- # [ 59.05474396, 70.10401937],
- # [ 61.66799965, 71.23110288],
- # [ 64.02929638, 72.12002156],
- # [ 66.22835251, 72.85391032],
- # [ 68.32829893, 73.48143318],
- # [ 70.37866226, 74.03347161],
- # [ 72.42237212, 74.53085444],
- # [ 74.50035309, 74.98833047],
- # [ 76.65524775, 75.41675945],
- # [ 78.93517067, 75.824363 ],
- # [ 81.39826053, 76.21741056],
- # [ 84.11897279, 76.600482 ],
- # [ 87.19757467, 76.97627542],
- # [ 90.77537201, 77.3447072 ],
- # [ 95.06035831, 77.70058684],
- # [ 100.37229526, 78.02797258],
- # [ 107.22498444, 78.28582497],
- # [ 116.481466 , 78.36746171],
- # [ 129.66805239, 77.96163057],
- # [ 134.67038545, 78.4115401 ],
- # [ 136.40302873, 79.30544125],
- # [ 138.4763311 , 80.18558961],
- # [ 140.98282558, 81.04796485],
- # [ 144.04700981, 81.88693584],
- # [ 147.83664747, 82.6944745 ],
- # [ 152.57512293, 83.45896996],
- # [ 158.54810167, 84.16352558],
- # [ 166.0844409 , 84.78383882],
- # [ 175.46720475, 85.28657382],
- # [-173.27937931, 85.6309921 ],
- # [-160.67741256, 85.77820349],
- # [-147.84352095, 85.70789809],
- # [-136.01435526, 85.4301266 ],
- # [-125.94447471, 84.97922118],
- # [-117.77450148, 84.39683471],
- # [-111.28213275, 83.71944226],
- # [-106.1391311 , 82.97447237],
- # [-102.03983076, 82.18121521],
- # [ -98.73868716, 81.3529452 ],
- # [ -96.04944891, 80.49880811],
- # [ -93.83359781, 79.62518236],
- # [ -91.98834044, 78.73659234],
- # [ -90.43691725, 77.83630659],
- # [ -89.12142407, 76.92672961],
- # [ -87.99766337, 76.0096614 ],
- # [ -87.03148527, 75.08647127],
- # [ -86.19618441, 74.15821627],
- # [ -85.47063566, 73.22572391],
- # [ -84.83794555, 72.28964996]])
- # polygon = SphPolygon(np.deg2rad(vertices))
- # polygon.invert()
- # from datetime import datetime
- # utctime = datetime(2013, 12, 12, 9, 31, 54, 485719)
- # utctime = datetime(2013, 11, 11, 11, 11)
- # twi = get_twilight_poly(utctime)
- # poly_inter_day = twi.intersection(polygon)
- # twi.invert()
- # poly_inter_night = twi.intersection(polygon)
- # import matplotlib.pyplot as plt
- # from mpl_toolkits.basemap import Basemap
- # map = Basemap(projection='nsper', lat_0 = 58, lon_0 = 16,
- # resolution = 'l', area_thresh = 1000.)
- # map = Basemap(resolution = "l")
- # map.drawcoastlines()
- # map.drawcountries()
- # map.drawmapboundary(fill_color='white')
- # map.drawmeridians(np.arange(0, 360, 30))
- # map.drawparallels(np.arange(-90, 90, 30))
-
- # poly_inter_day.draw(map, "-r")
- # poly_inter_night.draw(map, "-b")
- # plt.show()
-
- # def test_twilight(self):
- # """Test the twilight polygon.
- # """
- # from datetime import datetime
- # utctime = datetime(2013, 3, 20, 12, 0)
-
- # print np.rad2deg(get_twilight_poly(utctime).vertices)
-
- # vertices = np.array([[0, -90, 180, 90],
- # [89, 89, 89, 89]]).T
-
-
-def suite():
- """The suite for test_spherical
- """
- loader = unittest.TestLoader()
- mysuite = unittest.TestSuite()
- mysuite.addTest(loader.loadTestsFromTestCase(TestSCoordinate))
- mysuite.addTest(loader.loadTestsFromTestCase(TestCCoordinate))
- mysuite.addTest(loader.loadTestsFromTestCase(TestArc))
- mysuite.addTest(loader.loadTestsFromTestCase(TestSphericalPolygon))
-
- return mysuite
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/trollsched/utils.py b/trollsched/utils.py
index c16f5bc..f610e8c 100644
--- a/trollsched/utils.py
+++ b/trollsched/utils.py
@@ -22,16 +22,14 @@
"""Utility functions and config reading for the pytroll-scheduler
"""
+import warnings
import yaml
import logging
-from collections import Mapping
-from six.moves.configparser import ConfigParser
+from collections.abc import Mapping
+from configparser import ConfigParser
-try:
- from trollsched import schedule
-except ImportError:
- import schedule
+from trollsched import schedule
logger = logging.getLogger("trollsched")
@@ -44,13 +42,14 @@ def read_yaml_file(file_name):
conf_dict = {}
for file_obj in file_name:
with open(file_obj) as fp:
- tmp_dict = yaml.load(fp)
+ tmp_dict = yaml.safe_load(fp)
conf_dict = recursive_dict_update(conf_dict, tmp_dict)
return conf_dict
def recursive_dict_update(d, u):
"""Recursive dictionary update.
+
Copied from:
http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
"""
@@ -67,55 +66,9 @@ def read_config(filename):
try:
return read_config_yaml(filename)
except yaml.parser.ParserError as e:
- return read_config_cfg(filename)
-
-
-def read_config_cfg(filename):
- """Read the config file *filename* and replace the values in global
- variables.
- """
- cfg = ConfigParser()
- cfg.read(filename)
-
- def read_cfg_opts(section):
- """Read the option:value pairs in one section,
- converting value to int/float if applicable.
- """
- kv_dict = {}
- for k, v in cfg.items(section):
- try:
- kv_dict[k] = int(v)
- except:
- try:
- kv_dict[k] = float(v)
- except:
- kv_dict[k] = v
- return kv_dict
-
- default_params = read_cfg_opts("default")
- pattern = {}
- for k, v in cfg.items("pattern"):
- pattern[k] = v
- station_list = []
- for station_id in default_params["station"].split(","):
- station_params = read_cfg_opts(station_id)
- satellites = cfg.get(station_id, "satellites").split(",")
- sat_list = []
- for sat_name in satellites:
- sat_list.append(schedule.Satellite(sat_name,
- **read_cfg_opts(sat_name)
- ))
- new_station = schedule.Station(station_id, **station_params)
- new_station.satellites = sat_list
- station_list.append(new_station)
- scheduler = schedule.Scheduler(stations=station_list,
- min_pass=default_params.get("min_pass", 4),
- forward=default_params.get("forward"),
- start=default_params.get("start"),
- dump_url=default_params.get("dump_url", None),
- patterns=pattern,
- center_id=default_params.get("center_id", "unknown"))
- return scheduler
+ logger.error("INI format for scheduler config is deprecated since v0.3.4, "
+ "please update your configuration to YAML.")
+ raise
def read_config_yaml(filename):
@@ -145,6 +98,9 @@ def read_config_yaml(filename):
pattern[k] = v
sched_params = cfg['default']
+ plot_parameters = sched_params.get('plot_parameters', {})
+ plot_title = sched_params.get('plot_title', None)
+
scheduler = schedule.Scheduler(stations=[stations[st_id]
for st_id in sched_params['station']],
min_pass=sched_params.get('min_pass', 4),
@@ -152,6 +108,8 @@ def read_config_yaml(filename):
start=sched_params['start'],
dump_url=sched_params.get('dump_url'),
patterns=pattern,
- center_id=sched_params.get('center_id', 'unknown'))
+ center_id=sched_params.get('center_id', 'unknown'),
+ plot_parameters=plot_parameters,
+ plot_title=plot_title)
return scheduler
diff --git a/trollsched/version.py b/trollsched/version.py
index 6fdd17d..67fd964 100644
--- a/trollsched/version.py
+++ b/trollsched/version.py
@@ -5,8 +5,9 @@
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# This file is released into the public domain.
+# Generated by versioneer-0.29
+# https://github.com/python-versioneer/python-versioneer
"""Git implementation of _version.py."""
@@ -15,9 +16,11 @@
import re
import subprocess
import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple
+import functools
-def get_keywords():
+def get_keywords() -> Dict[str, str]:
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
@@ -33,8 +36,15 @@ def get_keywords():
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
+ VCS: str
+ style: str
+ tag_prefix: str
+ parentdir_prefix: str
+ versionfile_source: str
+ verbose: bool
-def get_config():
+
+def get_config() -> VersioneerConfig:
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
@@ -52,13 +62,13 @@ class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
- def decorate(f):
+def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
+ """Create decorator to mark a method as the handler of a VCS."""
+ def decorate(f: Callable) -> Callable:
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
@@ -67,22 +77,35 @@ def decorate(f):
return decorate
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
+def run_command(
+ commands: List[str],
+ args: List[str],
+ cwd: Optional[str] = None,
+ verbose: bool = False,
+ hide_stderr: bool = False,
+ env: Optional[Dict[str, str]] = None,
+) -> Tuple[Optional[str], Optional[int]]:
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs: Dict[str, Any] = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
- e = sys.exc_info()[1]
+ except OSError as e:
if e.errno == errno.ENOENT:
continue
if verbose:
@@ -93,18 +116,20 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
-def versions_from_parentdir(parentdir_prefix, root, verbose):
+def versions_from_parentdir(
+ parentdir_prefix: str,
+ root: str,
+ verbose: bool,
+) -> Dict[str, Any]:
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
@@ -113,15 +138,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
@@ -130,41 +154,48 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
+def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
- keywords = {}
+ keywords: Dict[str, str] = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
+def git_versions_from_keywords(
+ keywords: Dict[str, str],
+ tag_prefix: str,
+ verbose: bool,
+) -> Dict[str, Any]:
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -177,11 +208,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -190,7 +221,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@@ -199,6 +230,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
if verbose:
print("picking %s" % r)
return {"version": r,
@@ -214,7 +250,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(
+ tag_prefix: str,
+ root: str,
+ verbose: bool,
+ runner: Callable = run_command
+) -> Dict[str, Any]:
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -225,8 +266,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=not verbose)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
@@ -234,24 +282,57 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s*" % tag_prefix],
- cwd=root)
+ describe_out, rc = runner(GITS, [
+ "describe", "--tags", "--dirty", "--always", "--long",
+ "--match", f"{tag_prefix}[[:digit:]]*"
+ ], cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
- pieces = {}
+ pieces: Dict[str, Any] = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -268,7 +349,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
@@ -293,26 +374,27 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+ pieces["distance"] = len(out.split()) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
-def plus_or_dot(pieces):
+def plus_or_dot(pieces: Dict[str, Any]) -> str:
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
-def render_pep440(pieces):
+def render_pep440(pieces: Dict[str, Any]) -> str:
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
@@ -337,23 +419,71 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces: Dict[str, Any]) -> str:
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces: Dict[str, Any]) -> str:
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%d" % (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
+ rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
-def render_pep440_post(pieces):
+def render_pep440_post(pieces: Dict[str, Any]) -> str:
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
@@ -380,12 +510,41 @@ def render_pep440_post(pieces):
return rendered
-def render_pep440_old(pieces):
+def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_old(pieces: Dict[str, Any]) -> str:
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -402,7 +561,7 @@ def render_pep440_old(pieces):
return rendered
-def render_git_describe(pieces):
+def render_git_describe(pieces: Dict[str, Any]) -> str:
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
@@ -422,7 +581,7 @@ def render_git_describe(pieces):
return rendered
-def render_git_describe_long(pieces):
+def render_git_describe_long(pieces: Dict[str, Any]) -> str:
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
@@ -442,7 +601,7 @@ def render_git_describe_long(pieces):
return rendered
-def render(pieces, style):
+def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
@@ -456,10 +615,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -474,7 +637,7 @@ def render(pieces, style):
"date": pieces.get("date")}
-def get_versions():
+def get_versions() -> Dict[str, Any]:
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
@@ -495,7 +658,7 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for _ in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
diff --git a/trollsched/writers.py b/trollsched/writers.py
new file mode 100644
index 0000000..f39c46c
--- /dev/null
+++ b/trollsched/writers.py
@@ -0,0 +1,138 @@
+"""Writers for different schedule formats."""
+import os
+from datetime import datetime
+
+
+def generate_meos_file(output_file, allpasses, coords, start, report_mode=False):
+ """Generate a meos file."""
+ with open(output_file, "w") as out:
+ out.write(" No. Date Satellite Orbit Max EL AOS Ovlp LOS Durtn Az(AOS/MAX)\n")
+ line_no = 1
+ for overpass in sorted(allpasses, key=lambda x: x.risetime):
+ if (overpass.rec or report_mode) and overpass.risetime > start:
+ out.write(overpass.print_meos(coords, line_no) + "\n")
+ line_no += 1
+ out.close()
+ return output_file
+
+
+def generate_sch_file(output_file, overpasses, coords):
+ """Generate a vcs/scisys/cgi schedule file."""
+ with open(output_file, "w") as out:
+ # create epochs
+ out.write("#Orbital elements\n#\n#SCName Epochtime\n#\n")
+ satellites = set()
+
+ for overpass in overpasses:
+ epoch = "!{0:<16} {1}".format(overpass.satellite.name.upper(),
+ overpass.orb.tle.epoch.strftime("%Y%m%d %H%M%S"))
+ satellites |= set([epoch])
+ sats = "\n".join(satellites) + "\n"
+ out.write(sats)
+ out.write("#\n#\n#Pass List\n#\n")
+
+ out.write(
+ "#SCName RevNum Risetime Falltime Elev Dura ANL Rec Dir Man Ovl OvlSCName "
+ "OvlRev OvlRisetime OrigRisetime OrigFalltime OrigDuration\n#\n")
+
+ for overpass in sorted(overpasses):
+ out.write(overpass.print_vcs(coords) + "\n")
+
+
+def generate_metno_xml_file(output_file, allpasses, coords, start, end, station_name, center_id, report_mode=False):
+ """Generate a meto xml file."""
+ import defusedxml.ElementTree as ET
+
+ reqtime = datetime.utcnow()
+ time_format = "%Y-%m-%dT%H:%M:%S"
+
+ with open(output_file, "w") as out:
+ out.write("")
+
+ root = ET.Element("acquisition-schedule")
+ props = ET.SubElement(root, "properties")
+ proj = ET.SubElement(props, "project")
+ proj.text = "Pytroll"
+ typep = ET.SubElement(props, "type")
+ if report_mode:
+ typep.text = "report"
+ else:
+ typep.text = "request"
+ station = ET.SubElement(props, "station")
+ station.text = station_name
+ file_start = ET.SubElement(props, "file-start")
+ file_start.text = start.strftime(time_format)
+ file_end = ET.SubElement(props, "file-end")
+ file_end.text = end.strftime(time_format)
+ reqby = ET.SubElement(props, "requested-by")
+ reqby.text = center_id
+ reqon = ET.SubElement(props, "requested-on")
+ reqon.text = reqtime.strftime(time_format)
+
+ for overpass in sorted(allpasses, key=lambda x: x.risetime):
+ if (overpass.rec or report_mode) and overpass.risetime > start:
+ overpass.generate_metno_xml(coords, root)
+
+ out.write(ET.tostring(root).decode("utf-8"))
+ out.close()
+ return output_file
+
+
+def generate_xml_requests(sched, start, end, station_name, center_id, report_mode=False):
+ """Create xml requests."""
+ # defusedxml is not needed here as we only generate an xml file (ie no reading of potentially harmful data)
+ from xml.etree import ElementTree as ET # noqa
+
+ reqtime = datetime.utcnow()
+ time_format = "%Y-%m-%d-%H:%M:%S"
+
+ root = ET.Element("acquisition-schedule")
+ props = ET.SubElement(root, "properties")
+ proj = ET.SubElement(props, "project")
+ proj.text = "Pytroll"
+ typep = ET.SubElement(props, "type")
+ if report_mode:
+ typep.text = "report"
+ else:
+ typep.text = "request"
+ station = ET.SubElement(props, "station")
+ station.text = station_name
+ file_start = ET.SubElement(props, "file-start")
+ file_start.text = start.strftime(time_format)
+ file_end = ET.SubElement(props, "file-end")
+ file_end.text = end.strftime(time_format)
+ reqby = ET.SubElement(props, "requested-by")
+ reqby.text = center_id
+ reqon = ET.SubElement(props, "requested-on")
+ reqon.text = reqtime.strftime(time_format)
+
+ for overpass in sorted(sched):
+ if (overpass.rec or report_mode) and overpass.risetime > start:
+ ovpass = ET.SubElement(root, "pass")
+ sat_name = overpass.satellite.schedule_name or overpass.satellite.name
+ ovpass.set("satellite", sat_name)
+ ovpass.set("start-time", overpass.risetime.strftime(time_format))
+ ovpass.set("end-time", overpass.falltime.strftime(time_format))
+ if report_mode:
+ if overpass.fig is not None:
+ ovpass.set("img", overpass.fig)
+ ovpass.set("rec", str(overpass.rec))
+
+ return root, reqtime
+
+
+def generate_xml_file(sched, start, end, xml_file, station, center_id, report_mode=False):
+ """Create an xml request file."""
+ import defusedxml.ElementTree as ET
+ tree, reqtime = generate_xml_requests(sched,
+ start, end,
+ station, center_id, report_mode)
+ filename = xml_file
+ tmp_filename = xml_file + reqtime.strftime("%Y-%m-%d-%H-%M-%S") + ".tmp"
+ with open(tmp_filename, "w") as fp_:
+ if report_mode:
+ fp_.write(""
+ "")
+ fp_.write(ET.tostring(tree).decode("utf-8"))
+ os.rename(tmp_filename, filename)
+ return filename
diff --git a/versioneer.py b/versioneer.py
index 64fea1c..1e3753e 100644
--- a/versioneer.py
+++ b/versioneer.py
@@ -1,5 +1,5 @@
-# Version: 0.18
+# Version: 0.29
"""The Versioneer - like a rocketeer, but for versions.
@@ -7,18 +7,14 @@
==============
* like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
+* https://github.com/python-versioneer/python-versioneer
* Brian Warner
-* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
+* License: Public Domain (Unlicense)
+* Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3
+* [![Latest Version][pypi-image]][pypi-url]
+* [![Build Status][travis-image]][travis-url]
+
+This is a tool for managing a recorded version number in setuptools-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
@@ -27,9 +23,38 @@
## Quick Install
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
-* run `versioneer install` in your source tree, commit the results
+Versioneer provides two installation modes. The "classic" vendored mode installs
+a copy of versioneer into your repository. The experimental build-time dependency mode
+is intended to allow you to skip this step and simplify the process of upgrading.
+
+### Vendored mode
+
+* `pip install versioneer` to somewhere in your $PATH
+ * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
+ available, so you can also use `conda install -c conda-forge versioneer`
+* add a `[tool.versioneer]` section to your `pyproject.toml` or a
+ `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
+ * Note that you will need to add `tomli; python_version < "3.11"` to your
+ build-time dependencies if you use `pyproject.toml`
+* run `versioneer install --vendor` in your source tree, commit the results
+* verify version information with `python setup.py version`
+
+### Build-time dependency mode
+
+* `pip install versioneer` to somewhere in your $PATH
+ * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
+ available, so you can also use `conda install -c conda-forge versioneer`
+* add a `[tool.versioneer]` section to your `pyproject.toml` or a
+ `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
+* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`)
+ to the `requires` key of the `build-system` table in `pyproject.toml`:
+ ```toml
+ [build-system]
+ requires = ["setuptools", "versioneer[toml]"]
+ build-backend = "setuptools.build_meta"
+ ```
+* run `versioneer install --no-vendor` in your source tree, commit the results
+* verify version information with `python setup.py version`
## Version Identifiers
@@ -61,7 +86,7 @@
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
+uncommitted changes).
The version identifier is used for multiple purposes:
@@ -166,7 +191,7 @@
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
+[issues page](https://github.com/python-versioneer/python-versioneer/issues).
### Subprojects
@@ -180,7 +205,7 @@
`setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
distributions (and upload multiple independently-installable tarballs).
* Source trees whose main purpose is to contain a C library, but which also
- provide bindings to Python (and perhaps other langauges) in subdirectories.
+ provide bindings to Python (and perhaps other languages) in subdirectories.
Versioneer will look for `.git` in parent directories, and most operations
should get the right version string. However `pip` and `setuptools` have bugs
@@ -194,9 +219,9 @@
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
+[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
+[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
@@ -224,31 +249,20 @@
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
+[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
- indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
+* edit `setup.cfg` and `pyproject.toml`, if necessary,
+ to include any new configuration settings indicated by the release notes.
+ See [UPGRADING](./UPGRADING.md) for details.
+* re-run `versioneer install --[no-]vendor` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
@@ -265,35 +279,70 @@
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
+## Similar projects
+
+* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
+ dependency
+* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
+ versioneer
+* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
+ plugin
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
+Specifically, both are released under the "Unlicense", as described in
+https://unlicense.org/.
+
+[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
+[pypi-url]: https://pypi.python.org/pypi/versioneer/
+[travis-image]:
+https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
+[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
"""
+# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
+# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
+# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
+# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
+# pylint:disable=attribute-defined-outside-init,too-many-arguments
-from __future__ import print_function
-try:
- import configparser
-except ImportError:
- import ConfigParser as configparser
+import configparser
import errno
import json
import os
import re
import subprocess
import sys
+from pathlib import Path
+from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union
+from typing import NoReturn
+import functools
+
+have_tomllib = True
+if sys.version_info >= (3, 11):
+ import tomllib
+else:
+ try:
+ import tomli as tomllib
+ except ImportError:
+ have_tomllib = False
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
+ VCS: str
+ style: str
+ tag_prefix: str
+ versionfile_source: str
+ versionfile_build: Optional[str]
+ parentdir_prefix: Optional[str]
+ verbose: Optional[bool]
+
-def get_root():
+def get_root() -> str:
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
@@ -301,13 +350,23 @@ def get_root():
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
+ pyproject_toml = os.path.join(root, "pyproject.toml")
versioneer_py = os.path.join(root, "versioneer.py")
- if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
+ if not (
+ os.path.exists(setup_py)
+ or os.path.exists(pyproject_toml)
+ or os.path.exists(versioneer_py)
+ ):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
+ pyproject_toml = os.path.join(root, "pyproject.toml")
versioneer_py = os.path.join(root, "versioneer.py")
- if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
+ if not (
+ os.path.exists(setup_py)
+ or os.path.exists(pyproject_toml)
+ or os.path.exists(versioneer_py)
+ ):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
@@ -321,43 +380,62 @@ def get_root():
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
- me = os.path.realpath(os.path.abspath(__file__))
- me_dir = os.path.normcase(os.path.splitext(me)[0])
+ my_path = os.path.realpath(os.path.abspath(__file__))
+ me_dir = os.path.normcase(os.path.splitext(my_path)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
- if me_dir != vsr_dir:
+ if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals():
print("Warning: build in %s is using versioneer.py from %s"
- % (os.path.dirname(me), versioneer_py))
+ % (os.path.dirname(my_path), versioneer_py))
except NameError:
pass
return root
-def get_config_from_root(root):
+def get_config_from_root(root: str) -> VersioneerConfig:
"""Read the project setup.cfg file to determine Versioneer config."""
- # This might raise EnvironmentError (if setup.cfg is missing), or
+ # This might raise OSError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
- setup_cfg = os.path.join(root, "setup.cfg")
- parser = configparser.SafeConfigParser()
- with open(setup_cfg, "r") as f:
- parser.readfp(f)
- VCS = parser.get("versioneer", "VCS") # mandatory
-
- def get(parser, name):
- if parser.has_option("versioneer", name):
- return parser.get("versioneer", name)
- return None
+ root_pth = Path(root)
+ pyproject_toml = root_pth / "pyproject.toml"
+ setup_cfg = root_pth / "setup.cfg"
+ section: Union[Dict[str, Any], configparser.SectionProxy, None] = None
+ if pyproject_toml.exists() and have_tomllib:
+ try:
+ with open(pyproject_toml, 'rb') as fobj:
+ pp = tomllib.load(fobj)
+ section = pp['tool']['versioneer']
+ except (tomllib.TOMLDecodeError, KeyError) as e:
+ print(f"Failed to load config from {pyproject_toml}: {e}")
+ print("Try to load it from setup.cfg")
+ if not section:
+ parser = configparser.ConfigParser()
+ with open(setup_cfg) as cfg_file:
+ parser.read_file(cfg_file)
+ parser.get("versioneer", "VCS") # raise error if missing
+
+ section = parser["versioneer"]
+
+ # `cast`` really shouldn't be used, but its simplest for the
+ # common VersioneerConfig users at the moment. We verify against
+ # `None` values elsewhere where it matters
+
cfg = VersioneerConfig()
- cfg.VCS = VCS
- cfg.style = get(parser, "style") or ""
- cfg.versionfile_source = get(parser, "versionfile_source")
- cfg.versionfile_build = get(parser, "versionfile_build")
- cfg.tag_prefix = get(parser, "tag_prefix")
- if cfg.tag_prefix in ("''", '""'):
+ cfg.VCS = section['VCS']
+ cfg.style = section.get("style", "")
+ cfg.versionfile_source = cast(str, section.get("versionfile_source"))
+ cfg.versionfile_build = section.get("versionfile_build")
+ cfg.tag_prefix = cast(str, section.get("tag_prefix"))
+ if cfg.tag_prefix in ("''", '""', None):
cfg.tag_prefix = ""
- cfg.parentdir_prefix = get(parser, "parentdir_prefix")
- cfg.verbose = get(parser, "verbose")
+ cfg.parentdir_prefix = section.get("parentdir_prefix")
+ if isinstance(section, configparser.SectionProxy):
+ # Make sure configparser translates to bool
+ cfg.verbose = section.getboolean("verbose")
+ else:
+ cfg.verbose = section.get("verbose")
+
return cfg
@@ -366,37 +444,48 @@ class NotThisMethod(Exception):
# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
- def decorate(f):
+def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
+ """Create decorator to mark a method as the handler of a VCS."""
+ def decorate(f: Callable) -> Callable:
"""Store f in HANDLERS[vcs][method]."""
- if vcs not in HANDLERS:
- HANDLERS[vcs] = {}
- HANDLERS[vcs][method] = f
+ HANDLERS.setdefault(vcs, {})[method] = f
return f
return decorate
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
+def run_command(
+ commands: List[str],
+ args: List[str],
+ cwd: Optional[str] = None,
+ verbose: bool = False,
+ hide_stderr: bool = False,
+ env: Optional[Dict[str, str]] = None,
+) -> Tuple[Optional[str], Optional[int]]:
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs: Dict[str, Any] = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
- e = sys.exc_info()[1]
+ except OSError as e:
if e.errno == errno.ENOENT:
continue
if verbose:
@@ -407,26 +496,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
-LONG_VERSION_PY['git'] = '''
+LONG_VERSION_PY['git'] = r'''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# This file is released into the public domain.
+# Generated by versioneer-0.29
+# https://github.com/python-versioneer/python-versioneer
"""Git implementation of _version.py."""
@@ -435,9 +523,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
import re
import subprocess
import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple
+import functools
-def get_keywords():
+def get_keywords() -> Dict[str, str]:
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
@@ -453,8 +543,15 @@ def get_keywords():
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
+ VCS: str
+ style: str
+ tag_prefix: str
+ parentdir_prefix: str
+ versionfile_source: str
+ verbose: bool
+
-def get_config():
+def get_config() -> VersioneerConfig:
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
@@ -472,13 +569,13 @@ class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
-def register_vcs_handler(vcs, method): # decorator
- """Decorator to mark a method as the handler for a particular VCS."""
- def decorate(f):
+def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
+ """Create decorator to mark a method as the handler of a VCS."""
+ def decorate(f: Callable) -> Callable:
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
@@ -487,22 +584,35 @@ def decorate(f):
return decorate
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
- env=None):
+def run_command(
+ commands: List[str],
+ args: List[str],
+ cwd: Optional[str] = None,
+ verbose: bool = False,
+ hide_stderr: bool = False,
+ env: Optional[Dict[str, str]] = None,
+) -> Tuple[Optional[str], Optional[int]]:
"""Call the given command(s)."""
assert isinstance(commands, list)
- p = None
- for c in commands:
+ process = None
+
+ popen_kwargs: Dict[str, Any] = {}
+ if sys.platform == "win32":
+ # This hides the console window if pythonw.exe is used
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ popen_kwargs["startupinfo"] = startupinfo
+
+ for command in commands:
try:
- dispcmd = str([c] + args)
+ dispcmd = str([command] + args)
# remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen([c] + args, cwd=cwd, env=env,
- stdout=subprocess.PIPE,
- stderr=(subprocess.PIPE if hide_stderr
- else None))
+ process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+ stdout=subprocess.PIPE,
+ stderr=(subprocess.PIPE if hide_stderr
+ else None), **popen_kwargs)
break
- except EnvironmentError:
- e = sys.exc_info()[1]
+ except OSError as e:
if e.errno == errno.ENOENT:
continue
if verbose:
@@ -513,18 +623,20 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
- stdout = p.communicate()[0].strip()
- if sys.version_info[0] >= 3:
- stdout = stdout.decode()
- if p.returncode != 0:
+ stdout = process.communicate()[0].strip().decode()
+ if process.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
- return None, p.returncode
- return stdout, p.returncode
+ return None, process.returncode
+ return stdout, process.returncode
-def versions_from_parentdir(parentdir_prefix, root, verbose):
+def versions_from_parentdir(
+ parentdir_prefix: str,
+ root: str,
+ verbose: bool,
+) -> Dict[str, Any]:
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
@@ -533,15 +645,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
@@ -550,41 +661,48 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
+def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
- keywords = {}
+ keywords: Dict[str, str] = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
+def git_versions_from_keywords(
+ keywords: Dict[str, str],
+ tag_prefix: str,
+ verbose: bool,
+) -> Dict[str, Any]:
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -597,11 +715,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
@@ -610,7 +728,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
@@ -619,6 +737,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
if verbose:
print("picking %%s" %% r)
return {"version": r,
@@ -634,7 +757,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(
+ tag_prefix: str,
+ root: str,
+ verbose: bool,
+ runner: Callable = run_command
+) -> Dict[str, Any]:
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -645,8 +773,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=not verbose)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
@@ -654,24 +789,57 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%%s*" %% tag_prefix],
- cwd=root)
+ describe_out, rc = runner(GITS, [
+ "describe", "--tags", "--dirty", "--always", "--long",
+ "--match", f"{tag_prefix}[[:digit:]]*"
+ ], cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
- pieces = {}
+ pieces: Dict[str, Any] = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -688,7 +856,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
@@ -713,26 +881,27 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+ pieces["distance"] = len(out.split()) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
-def plus_or_dot(pieces):
+def plus_or_dot(pieces: Dict[str, Any]) -> str:
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
-def render_pep440(pieces):
+def render_pep440(pieces: Dict[str, Any]) -> str:
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
@@ -757,23 +926,71 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces: Dict[str, Any]) -> str:
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces: Dict[str, Any]) -> str:
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%%d" %% pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%%d" %% (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%%d" %% pieces["distance"]
+ rendered = "0.post0.dev%%d" %% pieces["distance"]
return rendered
-def render_pep440_post(pieces):
+def render_pep440_post(pieces: Dict[str, Any]) -> str:
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
@@ -800,12 +1017,41 @@ def render_pep440_post(pieces):
return rendered
-def render_pep440_old(pieces):
+def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%%d" %% pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%%s" %% pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%%d" %% pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%%s" %% pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_old(pieces: Dict[str, Any]) -> str:
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -822,7 +1068,7 @@ def render_pep440_old(pieces):
return rendered
-def render_git_describe(pieces):
+def render_git_describe(pieces: Dict[str, Any]) -> str:
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
@@ -842,7 +1088,7 @@ def render_git_describe(pieces):
return rendered
-def render_git_describe_long(pieces):
+def render_git_describe_long(pieces: Dict[str, Any]) -> str:
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
@@ -862,7 +1108,7 @@ def render_git_describe_long(pieces):
return rendered
-def render(pieces, style):
+def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
@@ -876,10 +1122,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -894,7 +1144,7 @@ def render(pieces, style):
"date": pieces.get("date")}
-def get_versions():
+def get_versions() -> Dict[str, Any]:
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
@@ -915,7 +1165,7 @@ def get_versions():
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
- for i in cfg.versionfile_source.split('/'):
+ for _ in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
@@ -942,41 +1192,48 @@ def get_versions():
@register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
+def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
- keywords = {}
+ keywords: Dict[str, str] = {}
try:
- f = open(versionfile_abs, "r")
- for line in f.readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["full"] = mo.group(1)
- if line.strip().startswith("git_date ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- keywords["date"] = mo.group(1)
- f.close()
- except EnvironmentError:
+ with open(versionfile_abs, "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith("git_refnames ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["refnames"] = mo.group(1)
+ if line.strip().startswith("git_full ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["full"] = mo.group(1)
+ if line.strip().startswith("git_date ="):
+ mo = re.search(r'=\s*"(.*)"', line)
+ if mo:
+ keywords["date"] = mo.group(1)
+ except OSError:
pass
return keywords
@register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
+def git_versions_from_keywords(
+ keywords: Dict[str, str],
+ tag_prefix: str,
+ verbose: bool,
+) -> Dict[str, Any]:
"""Get version information from git keywords."""
- if not keywords:
- raise NotThisMethod("no keywords at all, weird")
+ if "refnames" not in keywords:
+ raise NotThisMethod("Short version file found")
date = keywords.get("date")
if date is not None:
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
+
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
@@ -989,11 +1246,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
+ refs = {r.strip() for r in refnames.strip("()").split(",")}
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
- tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+ tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
@@ -1002,7 +1259,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
- tags = set([r for r in refs if re.search(r'\d', r)])
+ tags = {r for r in refs if re.search(r'\d', r)}
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
@@ -1011,6 +1268,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
+ # Filter out refs that exactly match prefix or that don't start
+ # with a number once the prefix is stripped (mostly a concern
+ # when prefix is '')
+ if not re.match(r'\d', r):
+ continue
if verbose:
print("picking %s" % r)
return {"version": r,
@@ -1026,7 +1288,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
@register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(
+ tag_prefix: str,
+ root: str,
+ verbose: bool,
+ runner: Callable = run_command
+) -> Dict[str, Any]:
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
@@ -1037,8 +1304,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
- hide_stderr=True)
+ # GIT_DIR can interfere with correct operation of Versioneer.
+ # It may be intended to be passed to the Versioneer-versioned project,
+ # but that should not change where we get our version from.
+ env = os.environ.copy()
+ env.pop("GIT_DIR", None)
+ runner = functools.partial(runner, env=env)
+
+ _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+ hide_stderr=not verbose)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
@@ -1046,24 +1320,57 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
- describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
- "--always", "--long",
- "--match", "%s*" % tag_prefix],
- cwd=root)
+ describe_out, rc = runner(GITS, [
+ "describe", "--tags", "--dirty", "--always", "--long",
+ "--match", f"{tag_prefix}[[:digit:]]*"
+ ], cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
- full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+ full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
- pieces = {}
+ pieces: Dict[str, Any] = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
+ branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+ cwd=root)
+ # --abbrev-ref was added in git-1.6.3
+ if rc != 0 or branch_name is None:
+ raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+ branch_name = branch_name.strip()
+
+ if branch_name == "HEAD":
+ # If we aren't exactly on a branch, pick a branch which represents
+ # the current commit. If all else fails, we are on a branchless
+ # commit.
+ branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+ # --contains was added in git-1.5.4
+ if rc != 0 or branches is None:
+ raise NotThisMethod("'git branch --contains' returned error")
+ branches = branches.split("\n")
+
+ # Remove the first line if we're running detached
+ if "(" in branches[0]:
+ branches.pop(0)
+
+ # Strip off the leading "* " from the list of branches.
+ branches = [branch[2:] for branch in branches]
+ if "master" in branches:
+ branch_name = "master"
+ elif not branches:
+ branch_name = None
+ else:
+ # Pick the first branch that is returned. Good or bad.
+ branch_name = branches[0]
+
+ pieces["branch"] = branch_name
+
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
@@ -1080,7 +1387,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
- # unparseable. Maybe git-describe is misbehaving?
+ # unparsable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
@@ -1105,19 +1412,20 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
else:
# HEX: no tags
pieces["closest-tag"] = None
- count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
- cwd=root)
- pieces["distance"] = int(count_out) # total number of commits
+ out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+ pieces["distance"] = len(out.split()) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
- date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
- cwd=root)[0].strip()
+ date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+ # Use only the last line. Previous lines may contain GPG signature
+ # information.
+ date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
-def do_vcs_install(manifest_in, versionfile_source, ipy):
+def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None:
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
@@ -1126,36 +1434,40 @@ def do_vcs_install(manifest_in, versionfile_source, ipy):
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
- files = [manifest_in, versionfile_source]
+ files = [versionfile_source]
if ipy:
files.append(ipy)
- try:
- me = __file__
- if me.endswith(".pyc") or me.endswith(".pyo"):
- me = os.path.splitext(me)[0] + ".py"
- versioneer_file = os.path.relpath(me)
- except NameError:
- versioneer_file = "versioneer.py"
- files.append(versioneer_file)
+ if "VERSIONEER_PEP518" not in globals():
+ try:
+ my_path = __file__
+ if my_path.endswith((".pyc", ".pyo")):
+ my_path = os.path.splitext(my_path)[0] + ".py"
+ versioneer_file = os.path.relpath(my_path)
+ except NameError:
+ versioneer_file = "versioneer.py"
+ files.append(versioneer_file)
present = False
try:
- f = open(".gitattributes", "r")
- for line in f.readlines():
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- f.close()
- except EnvironmentError:
+ with open(".gitattributes", "r") as fobj:
+ for line in fobj:
+ if line.strip().startswith(versionfile_source):
+ if "export-subst" in line.strip().split()[1:]:
+ present = True
+ break
+ except OSError:
pass
if not present:
- f = open(".gitattributes", "a+")
- f.write("%s export-subst\n" % versionfile_source)
- f.close()
+ with open(".gitattributes", "a+") as fobj:
+ fobj.write(f"{versionfile_source} export-subst\n")
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
-def versions_from_parentdir(parentdir_prefix, root, verbose):
+def versions_from_parentdir(
+ parentdir_prefix: str,
+ root: str,
+ verbose: bool,
+) -> Dict[str, Any]:
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
@@ -1164,15 +1476,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
"""
rootdirs = []
- for i in range(3):
+ for _ in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
- else:
- rootdirs.append(root)
- root = os.path.dirname(root) # up a level
+ rootdirs.append(root)
+ root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
@@ -1181,7 +1492,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
+# This file was generated by 'versioneer.py' (0.29) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
@@ -1198,12 +1509,12 @@ def get_versions():
"""
-def versions_from_file(filename):
+def versions_from_file(filename: str) -> Dict[str, Any]:
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
- except EnvironmentError:
+ except OSError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
@@ -1215,9 +1526,8 @@ def versions_from_file(filename):
return json.loads(mo.group(1))
-def write_to_version_file(filename, versions):
+def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None:
"""Write the given version number to the given _version.py file."""
- os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
@@ -1226,14 +1536,14 @@ def write_to_version_file(filename, versions):
print("set %s to '%s'" % (filename, versions["version"]))
-def plus_or_dot(pieces):
+def plus_or_dot(pieces: Dict[str, Any]) -> str:
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
-def render_pep440(pieces):
+def render_pep440(pieces: Dict[str, Any]) -> str:
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
@@ -1258,23 +1568,71 @@ def render_pep440(pieces):
return rendered
-def render_pep440_pre(pieces):
- """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces: Dict[str, Any]) -> str:
+ """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+ The ".dev0" means not master branch. Note that .dev0 sorts backwards
+ (a feature branch will appear "older" than the master branch).
Exceptions:
- 1: no tags. 0.post.devDISTANCE
+ 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0"
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+untagged.%d.g%s" % (pieces["distance"],
+ pieces["short"])
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
+ """Split pep440 version string at the post-release segment.
+
+ Returns the release segments before the post-release and the
+ post-release version number (or -1 if no post-release segment is present).
+ """
+ vc = str.split(ver, ".post")
+ return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces: Dict[str, Any]) -> str:
+ """TAG[.postN.devDISTANCE] -- No -dirty.
+
+ Exceptions:
+ 1: no tags. 0.post0.devDISTANCE
+ """
+ if pieces["closest-tag"]:
if pieces["distance"]:
- rendered += ".post.dev%d" % pieces["distance"]
+ # update the post release segment
+ tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+ rendered = tag_version
+ if post_version is not None:
+ rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
+ else:
+ rendered += ".post0.dev%d" % (pieces["distance"])
+ else:
+ # no commits, use the tag as the version
+ rendered = pieces["closest-tag"]
else:
# exception #1
- rendered = "0.post.dev%d" % pieces["distance"]
+ rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
-def render_pep440_post(pieces):
+def render_pep440_post(pieces: Dict[str, Any]) -> str:
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
@@ -1301,12 +1659,41 @@ def render_pep440_post(pieces):
return rendered
-def render_pep440_old(pieces):
+def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
+ """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+ The ".dev0" means not master branch.
+
+ Exceptions:
+ 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+ """
+ if pieces["closest-tag"]:
+ rendered = pieces["closest-tag"]
+ if pieces["distance"] or pieces["dirty"]:
+ rendered += ".post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += plus_or_dot(pieces)
+ rendered += "g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ else:
+ # exception #1
+ rendered = "0.post%d" % pieces["distance"]
+ if pieces["branch"] != "master":
+ rendered += ".dev0"
+ rendered += "+g%s" % pieces["short"]
+ if pieces["dirty"]:
+ rendered += ".dirty"
+ return rendered
+
+
+def render_pep440_old(pieces: Dict[str, Any]) -> str:
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
- Eexceptions:
+ Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
@@ -1323,7 +1710,7 @@ def render_pep440_old(pieces):
return rendered
-def render_git_describe(pieces):
+def render_git_describe(pieces: Dict[str, Any]) -> str:
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
@@ -1343,7 +1730,7 @@ def render_git_describe(pieces):
return rendered
-def render_git_describe_long(pieces):
+def render_git_describe_long(pieces: Dict[str, Any]) -> str:
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
@@ -1363,7 +1750,7 @@ def render_git_describe_long(pieces):
return rendered
-def render(pieces, style):
+def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
@@ -1377,10 +1764,14 @@ def render(pieces, style):
if style == "pep440":
rendered = render_pep440(pieces)
+ elif style == "pep440-branch":
+ rendered = render_pep440_branch(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
+ elif style == "pep440-post-branch":
+ rendered = render_pep440_post_branch(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
@@ -1399,7 +1790,7 @@ class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
-def get_versions(verbose=False):
+def get_versions(verbose: bool = False) -> Dict[str, Any]:
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
@@ -1414,7 +1805,7 @@ def get_versions(verbose=False):
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
- verbose = verbose or cfg.verbose
+ verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None`
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
@@ -1475,13 +1866,17 @@ def get_versions(verbose=False):
"date": None}
-def get_version():
+def get_version() -> str:
"""Get the short version string for this project."""
return get_versions()["version"]
-def get_cmdclass():
- """Get the custom setuptools/distutils subclasses used by Versioneer."""
+def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None):
+ """Get the custom setuptools subclasses used by Versioneer.
+
+ If the package uses a different cmdclass (e.g. one from numpy), it
+ should be provide as an argument.
+ """
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
@@ -1495,25 +1890,25 @@ def get_cmdclass():
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
- # Also see https://github.com/warner/python-versioneer/issues/52
+ # Also see https://github.com/python-versioneer/python-versioneer/issues/52
- cmds = {}
+ cmds = {} if cmdclass is None else cmdclass.copy()
- # we add "version" to both distutils and setuptools
- from distutils.core import Command
+ # we add "version" to setuptools
+ from setuptools import Command
class cmd_version(Command):
description = "report generated version string"
- user_options = []
- boolean_options = []
+ user_options: List[Tuple[str, str, str]] = []
+ boolean_options: List[str] = []
- def initialize_options(self):
+ def initialize_options(self) -> None:
pass
- def finalize_options(self):
+ def finalize_options(self) -> None:
pass
- def run(self):
+ def run(self) -> None:
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
@@ -1523,7 +1918,7 @@ def run(self):
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
- # we override "build_py" in both distutils and setuptools
+ # we override "build_py" in setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
@@ -1538,18 +1933,25 @@ def run(self):
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
+ # pip install -e . and setuptool/editable_wheel will invoke build_py
+ # but the build_py command is not expected to copy any files.
+
# we override different "build_py" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.build_py import build_py as _build_py
+ if 'build_py' in cmds:
+ _build_py: Any = cmds['build_py']
else:
- from distutils.command.build_py import build_py as _build_py
+ from setuptools.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
- def run(self):
+ def run(self) -> None:
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
+ if getattr(self, "editable_mode", False):
+ # During editable installs `.py` and data files are
+ # not copied to build_lib
+ return
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
@@ -1559,8 +1961,40 @@ def run(self):
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
+ if 'build_ext' in cmds:
+ _build_ext: Any = cmds['build_ext']
+ else:
+ from setuptools.command.build_ext import build_ext as _build_ext
+
+ class cmd_build_ext(_build_ext):
+ def run(self) -> None:
+ root = get_root()
+ cfg = get_config_from_root(root)
+ versions = get_versions()
+ _build_ext.run(self)
+ if self.inplace:
+ # build_ext --inplace will only build extensions in
+ # build/lib<..> dir with no _version.py to write to.
+ # As in place builds will already have a _version.py
+ # in the module dir, we do not need to write one.
+ return
+ # now locate _version.py in the new build/ directory and replace
+ # it with an updated value
+ if not cfg.versionfile_build:
+ return
+ target_versionfile = os.path.join(self.build_lib,
+ cfg.versionfile_build)
+ if not os.path.exists(target_versionfile):
+ print(f"Warning: {target_versionfile} does not exist, skipping "
+ "version update. This can happen if you are running build_ext "
+ "without first running build_py.")
+ return
+ print("UPDATING %s" % target_versionfile)
+ write_to_version_file(target_versionfile, versions)
+ cmds["build_ext"] = cmd_build_ext
+
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
- from cx_Freeze.dist import build_exe as _build_exe
+ from cx_Freeze.dist import build_exe as _build_exe # type: ignore
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
@@ -1569,7 +2003,7 @@ def run(self):
# ...
class cmd_build_exe(_build_exe):
- def run(self):
+ def run(self) -> None:
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
@@ -1593,12 +2027,12 @@ def run(self):
if 'py2exe' in sys.modules: # py2exe enabled?
try:
- from py2exe.distutils_buildexe import py2exe as _py2exe # py3
+ from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore
except ImportError:
- from py2exe.build_exe import py2exe as _py2exe # py2
+ from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore
class cmd_py2exe(_py2exe):
- def run(self):
+ def run(self) -> None:
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
@@ -1619,14 +2053,51 @@ def run(self):
})
cmds["py2exe"] = cmd_py2exe
+ # sdist farms its file list building out to egg_info
+ if 'egg_info' in cmds:
+ _egg_info: Any = cmds['egg_info']
+ else:
+ from setuptools.command.egg_info import egg_info as _egg_info
+
+ class cmd_egg_info(_egg_info):
+ def find_sources(self) -> None:
+ # egg_info.find_sources builds the manifest list and writes it
+ # in one shot
+ super().find_sources()
+
+ # Modify the filelist and normalize it
+ root = get_root()
+ cfg = get_config_from_root(root)
+ self.filelist.append('versioneer.py')
+ if cfg.versionfile_source:
+ # There are rare cases where versionfile_source might not be
+ # included by default, so we must be explicit
+ self.filelist.append(cfg.versionfile_source)
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+
+ # The write method is hidden in the manifest_maker instance that
+ # generated the filelist and was thrown away
+ # We will instead replicate their final normalization (to unicode,
+ # and POSIX-style paths)
+ from setuptools import unicode_utils
+ normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/')
+ for f in self.filelist.files]
+
+ manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt')
+ with open(manifest_filename, 'w') as fobj:
+ fobj.write('\n'.join(normalized))
+
+ cmds['egg_info'] = cmd_egg_info
+
# we override different "sdist" commands for both environments
- if "setuptools" in sys.modules:
- from setuptools.command.sdist import sdist as _sdist
+ if 'sdist' in cmds:
+ _sdist: Any = cmds['sdist']
else:
- from distutils.command.sdist import sdist as _sdist
+ from setuptools.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
- def run(self):
+ def run(self) -> None:
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
@@ -1634,7 +2105,7 @@ def run(self):
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
- def make_release_tree(self, base_dir, files):
+ def make_release_tree(self, base_dir: str, files: List[str]) -> None:
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
@@ -1687,21 +2158,26 @@ def make_release_tree(self, base_dir, files):
"""
-INIT_PY_SNIPPET = """
+OLD_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
+INIT_PY_SNIPPET = """
+from . import {0}
+__version__ = {0}.get_versions()['version']
+"""
-def do_setup():
- """Main VCS-independent setup function for installing Versioneer."""
+
+def do_setup() -> int:
+ """Do main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
- except (EnvironmentError, configparser.NoSectionError,
+ except (OSError, configparser.NoSectionError,
configparser.NoOptionError) as e:
- if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
+ if isinstance(e, (OSError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
@@ -1721,62 +2197,37 @@ def do_setup():
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
+ maybe_ipy: Optional[str] = ipy
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
- except EnvironmentError:
+ except OSError:
old = ""
- if INIT_PY_SNIPPET not in old:
+ module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
+ snippet = INIT_PY_SNIPPET.format(module)
+ if OLD_SNIPPET in old:
+ print(" replacing boilerplate in %s" % ipy)
+ with open(ipy, "w") as f:
+ f.write(old.replace(OLD_SNIPPET, snippet))
+ elif snippet not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
- f.write(INIT_PY_SNIPPET)
+ f.write(snippet)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
- ipy = None
-
- # Make sure both the top-level "versioneer.py" and versionfile_source
- # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
- # they'll be copied into source distributions. Pip won't be able to
- # install the package without this.
- manifest_in = os.path.join(root, "MANIFEST.in")
- simple_includes = set()
- try:
- with open(manifest_in, "r") as f:
- for line in f:
- if line.startswith("include "):
- for include in line.split()[1:]:
- simple_includes.add(include)
- except EnvironmentError:
- pass
- # That doesn't cover everything MANIFEST.in can do
- # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
- # it might give some false negatives. Appending redundant 'include'
- # lines is safe, though.
- if "versioneer.py" not in simple_includes:
- print(" appending 'versioneer.py' to MANIFEST.in")
- with open(manifest_in, "a") as f:
- f.write("include versioneer.py\n")
- else:
- print(" 'versioneer.py' already in MANIFEST.in")
- if cfg.versionfile_source not in simple_includes:
- print(" appending versionfile_source ('%s') to MANIFEST.in" %
- cfg.versionfile_source)
- with open(manifest_in, "a") as f:
- f.write("include %s\n" % cfg.versionfile_source)
- else:
- print(" versionfile_source already in MANIFEST.in")
+ maybe_ipy = None
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
- do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
+ do_vcs_install(cfg.versionfile_source, maybe_ipy)
return 0
-def scan_setup_py():
+def scan_setup_py() -> int:
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
@@ -1813,10 +2264,14 @@ def scan_setup_py():
return errors
+def setup_command() -> NoReturn:
+ """Set up Versioneer and exit with appropriate error code."""
+ errors = do_setup()
+ errors += scan_setup_py()
+ sys.exit(1 if errors else 0)
+
+
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
- errors = do_setup()
- errors += scan_setup_py()
- if errors:
- sys.exit(1)
+ setup_command()