diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..9fb85ec --- /dev/null +++ b/.gitattributes @@ -0,0 +1,14 @@ +# Set the default behavior, in case people don't have core.autocrlf set. +* text=auto + +# Explicitly declare text files you want to always be normalized and converted +# to native line endings on checkout. +*.c text +*.h text + +# Declare files that will always have CRLF line endings on checkout. +*.sln text eol=crlf + +# Denote all files that are truly binary and should not be modified. +*.png binary +*.jpg binary \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..dd84ea7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser [e.g. stock browser, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..bbcbbe7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.gitignore b/.gitignore index d1fa5b7..43f7914 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,214 @@ *.pyc Default*Settings +cameranetwork.wpr +cameranetwork.wpu +### VirtualEnv template +# Virtualenv +# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ +.Python +[Bb]in +[Ii]nclude +[Ll]ib +[Ll]ib64 +[Ll]ocal +[Ss]cripts +pyvenv.cfg +.venv +pip-selfcheck.json + +### JetBrains template +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + diff --git a/CameraNetwork/__init__.py b/CameraNetwork/__init__.py index af6284e..33b70e3 100644 --- a/CameraNetwork/__init__.py +++ b/CameraNetwork/__init__.py @@ -33,12 +33,15 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -from CameraNetwork.utils import * -from CameraNetwork.internet import * -from CameraNetwork.client import * -from CameraNetwork.calibration import * -from CameraNetwork.hg import * -import CameraNetwork.mdp as mdp -import CameraNetwork.image_utils as image_utils -import CameraNetwork.integral_sphere as integral_sphere -import CameraNetwork.radiosonde as radiosonde \ No newline at end of file + + +from CameraNetwork.utils import * +from CameraNetwork.internet import * +from CameraNetwork.client import * +from CameraNetwork.calibration import * +from CameraNetwork.hg import * +from CameraNetwork.global_settings import * +import CameraNetwork.mdp as mdp +import CameraNetwork.image_utils as image_utils +import CameraNetwork.integral_sphere as integral_sphere +import CameraNetwork.radiosonde as radiosonde diff --git a/CameraNetwork/calibration.py b/CameraNetwork/calibration.py index 67332dd..6788930 100644 --- a/CameraNetwork/calibration.py +++ b/CameraNetwork/calibration.py @@ -33,10 +33,12 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## + """ Utilities used in the process of calibration. """ +from __future__ import print_function from __future__ import division from CameraNetwork.image_utils import raw2RGB, RGB2raw import cPickle @@ -56,19 +58,19 @@ READY_REPLY = 'READY' -class Gimbal( object ): +class Gimbal(object): """Calibration Gimbal - This class encapsulates the use of the Caliration Gimbal. During + This class encapsulates the use of the Calibration Gimbal. During vignetting calibration, a camera is connected to the gimbal and placed - infront of a light source. The Gimbal rotates the camera in all directions. + in front of a light source. The Gimbal rotates the camera in all directions. This way the spatial response of the camera (vignetting) is captured. Args: com (str, optional): The serial port of the Arduino that controls the gimbal. baudrate (int, optional): Baud rate of serial port. - timeout (int, optional): timeout for tryingto connect to the Arduino. + timeout (int, optional): timeout for trying to connect to the Arduino. Note: To use this class, one needs to first install the gimbal.ino file @@ -80,7 +82,7 @@ class Gimbal( object ): def __init__(self, com='COM13', baudrate=9600, timeout=20): import serial - self._port = serial.Serial(com, baudrate=baudrate, timeout = timeout) + self._port = serial.Serial(com, baudrate=baudrate, timeout=timeout) # # Wait for the motor to finish the reset move @@ -95,12 +97,11 @@ def _finalize(self): # Finalize the serial port. # try: - print self._port.read(size=1000) + print(self._port.read(size=1000)) self._port.close() except: pass - def _waitReady(self): """Wait for a ready reply from the arduino """ @@ -116,7 +117,6 @@ def __del__(self): self._finalize() - def _checkEcho(self, cmd): """Check the answer echo validity. @@ -140,12 +140,11 @@ def _sendCmd(self, cmd): """ - self._port.write( cmd ) + self._port.write(cmd) - self._checkEcho( cmd ) + self._checkEcho(cmd) #self._waitReady() - def flush(self): """Empty the input & output buffers """ @@ -161,7 +160,7 @@ def resetPosition(self): """ cmd = 'z\n' - self._sendCmd( cmd ) + self._sendCmd(cmd) def move(self, x, y): """Move to x, y position @@ -186,7 +185,7 @@ def move(self, x, y): y_str = ('0000' + str(y))[-4:] cmd = 'm'+x_str+y_str+'\n' - self._sendCmd( cmd ) + self._sendCmd(cmd) class GimbalCamera(object): @@ -232,7 +231,7 @@ def meanColor(c): Returns: Mean of non zero values in c. """ - nnz_total = (c>0).sum() + nnz_total = (c > 0).sum() if nnz_total == 0: return 0 @@ -259,7 +258,7 @@ def findSpot(img, threshold=5): # Calculate a spot mask. # kernel = np.ones((3, 3),np.uint8) - mask = (img>threshold) + mask = (img > threshold) mask = cv2.dilate(mask.astype(np.uint8), kernel) mask = cv2.erode(mask.astype(np.uint8), kernel, iterations=2) @@ -349,7 +348,7 @@ def calibrate(self, color_measurements): self._calcRatio() def _calcRatio(self): - """Calc the vignnetting ratios in each pixel of the image.""" + """Calc the vignetting ratios in each pixel of the image.""" # # The models were learnt for RGB (600x800) images therefore @@ -420,7 +419,7 @@ def processSpotImages(base_path, color_index=None): pass # - # Arrange the mesurements as a list of colors + # Arrange the measurements as a list of colors # measurements = zip(*measurements) @@ -455,7 +454,7 @@ def readMeasurements(base_path, *args, **kwds): # x, y, vals = zip(*data) measurements.append( - [(i/2, j/2, k[color_index]) for i, j, k in zip(x, y, vals) if i is not None] + [(i/2, j/2, k[color_index]) for i, j, k in zip(x, y, vals) if i is not None] ) vc = VignettingCalibration(*args, **kwds) diff --git a/CameraNetwork/client.py b/CameraNetwork/client.py index edfdd74..9e7e1b9 100644 --- a/CameraNetwork/client.py +++ b/CameraNetwork/client.py @@ -49,6 +49,7 @@ import CameraNetwork.global_settings as gs from CameraNetwork.mdp import * +from CameraNetwork.mdp import MDP from CameraNetwork.server import Server from CameraNetwork.utils import extractImgArray @@ -134,7 +135,9 @@ def handle_server_failure(self, server): pass def handle_receive(self, msg_extra, service, status, cmd, args, kwds): - """Callback to handle receive. This is called only if there are no other callbacks to handle the message. Derived classes should override this method.""" + """Callback to handle receive. + This is called only if there are no other callbacks to handle the message. + Derived classes should override this method.""" raise Warning('Unattended message: ', str((status, cmd, args, kwds))) @@ -160,12 +163,12 @@ def on_message(self, msg): status, cmd, args, kwds = cPickle.loads(msg[0]) # - # Call the coresponding cmd callback. + # Call the corresponding cmd callback. # self.handle_receive(msg_extra, service, status, cmd, args, kwds) def on_timeout(self): - """Public method called when a timeout occured. + """Public method called when a timeout occurred. .. note:: Does nothing. Should be overloaded! """ @@ -267,7 +270,7 @@ def __getattr__(self, name): """Dynamically create messages.""" if not hasattr(Server, 'handle_{}'.format(name)): - raise AttributeError("Unkown server command: {}".format(name)) + raise AttributeError("Unknown server command: {}".format(name)) # # Create sendmessage method. @@ -310,9 +313,9 @@ def __getitem__(self, servers_id): servers_id = [servers_id] unknown_servers = set(servers_id).difference(set(self.client_instance.servers)) - if len(unknown_servers) > 0: + if len(unknown_servers) > 0: raise IndexError( - 'Unkown servers: {}. List of known servers: {}.'.format( + 'Unknown servers: {}. List of known servers: {}.'.format( unknown_servers, self.client_instance.servers ) ) @@ -322,7 +325,7 @@ def __getitem__(self, servers_id): def __getattr__(self, name): if not hasattr(Server, 'handle_{}'.format(name)): - raise AttributeError("Unkown server command: {}".format(name)) + raise AttributeError("Unknown server command: {}".format(name)) def proxy_func(servers_id, *args, **kwds): return getattr(self[servers_id], name)(*args, **kwds) @@ -463,7 +466,7 @@ def sunshader( angle, ): - assert angle >= 20 and angle <= 160, \ + assert 20 <= angle <= 160, \ 'angle must be between 20-160, got {}'.format(angle) self.send_message( @@ -529,7 +532,7 @@ def seek( return img_arrays, img_datas -def main (): +def main(): import CameraNetwork from CameraNetwork.sunphotometer import findClosestImageTime diff --git a/CameraNetwork/controller.py b/CameraNetwork/controller.py index 3e6caf6..4f9f4de 100644 --- a/CameraNetwork/controller.py +++ b/CameraNetwork/controller.py @@ -1,38 +1,40 @@ -## -## Copyright (C) 2017, Amit Aides, all rights reserved. -## -## This file is part of Camera Network -## (see https://bitbucket.org/amitibo/cameranetwork_git). -## -## Redistribution and use in source and binary forms, with or without modification, -## are permitted provided that the following conditions are met: -## -## 1) The software is provided under the terms of this license strictly for -## academic, non-commercial, not-for-profit purposes. -## 2) Redistributions of source code must retain the above copyright notice, this -## list of conditions (license) and the following disclaimer. -## 3) Redistributions in binary form must reproduce the above copyright notice, -## this list of conditions (license) and the following disclaimer in the -## documentation and/or other materials provided with the distribution. -## 4) The name of the author may not be used to endorse or promote products derived -## from this software without specific prior written permission. -## 5) As this software depends on other libraries, the user must adhere to and keep -## in place any licensing terms of those libraries. -## 6) Any publications arising from the use of this software, including but not -## limited to academic journal and conference publications, technical reports and -## manuals, must cite the following works: -## Dmitry Veikherman, Amit Aides, Yoav Y. Schechner and Aviad Levis, "Clouds in The Cloud" Proc. ACCV, pp. 659-674 (2014). -## -## THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -## EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## +# +# Copyright (C) 2017, Amit Aides, all rights reserved. +# +# This file is part of Camera Network +# (see https://bitbucket.org/amitibo/cameranetwork_git). +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1) The software is provided under the terms of this license strictly for +# academic, non-commercial, not-for-profit purposes. +# 2) Redistributions of source code must retain the above copyright notice, this +# list of conditions (license) and the following disclaimer. +# 3) Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions (license) and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 4) The name of the author may not be used to endorse or promote products derived +# from this software without specific prior written permission. +# 5) As this software depends on other libraries, the user must adhere to and keep +# in place any licensing terms of those libraries. +# 6) Any publications arising from the use of this software, including but not +# limited to academic journal and conference publications, technical reports and +# manuals, must cite the following works: +# Dmitry Veikherman, Amit Aides, Yoav Y. Schechner and Aviad Levis, +# "Clouds in The Cloud" Proc. ACCV, pp. 659-674 (2014). +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +# EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + from __future__ import division import bisect from CameraNetwork.arduino_utils import ArduinoAPI @@ -62,6 +64,7 @@ from datetime import timedelta import ephem import fisheye + try: import futures except: @@ -69,9 +72,16 @@ # Support also python 2.7 # from concurrent import futures + import glob import json -import Image + +try: + from PIL import Image +except: + # In case of old version + import Image + import logging import numpy as np import os @@ -131,7 +141,7 @@ def interpolate_dark_images(exposure, exposures, dark_images): def time2seconds(dt): """Convert datetime object to seconds.""" - seconds = (dt.hour*60 + dt.minute)*60 + dt.second + dt.microsecond*1e-6 + seconds = (dt.hour * 60 + dt.minute) * 60 + dt.second + dt.microsecond * 1e-6 return seconds @@ -180,7 +190,7 @@ def __init__(self, offline=False, local_path=None): self.loadDarkImages() # - # Load todays celestial position measurements + # Load today's celestial position measurements # if not os.path.exists(gs.SUN_POSITIONS_PATH): os.makedirs(gs.SUN_POSITIONS_PATH) @@ -291,7 +301,7 @@ def loadCameraCalibration(self, capture_date=None, serial_num=None): logging.debug("Calibration path is: {}".format(calibration_path)) if self._last_calibration_path is not None and \ - self._last_calibration_path == calibration_path: + self._last_calibration_path == calibration_path: # # No need to load new calibration data. # @@ -306,14 +316,14 @@ def loadCameraCalibration(self, capture_date=None, serial_num=None): # If so, the data is copied to the home folder. # Note: # This is done to support old cameras that were not calibrated - # using the testbench. + # using the test bench. # if os.path.exists(self.base_calibration_path): for base_path, file_name, dst_path in zip( - (calibration_path, calibration_path, self.base_calibration_path), - (gs.INTRINSIC_SETTINGS_FILENAME, gs.VIGNETTING_SETTINGS_FILENAME, gs.RADIOMETRIC_SETTINGS_FILENAME), - (gs.INTRINSIC_SETTINGS_PATH, gs.VIGNETTING_SETTINGS_PATH, gs.RADIOMETRIC_SETTINGS_PATH) - ): + (calibration_path, calibration_path, self.base_calibration_path), + (gs.INTRINSIC_SETTINGS_FILENAME, gs.VIGNETTING_SETTINGS_FILENAME, gs.RADIOMETRIC_SETTINGS_FILENAME), + (gs.INTRINSIC_SETTINGS_PATH, gs.VIGNETTING_SETTINGS_PATH, gs.RADIOMETRIC_SETTINGS_PATH) + ): try: shutil.copyfile( os.path.join(base_path, file_name), @@ -388,8 +398,8 @@ def loadDarkImages(self): di_paths = sorted(glob.glob(os.path.join(gs.DARK_IMAGES_PATH, '*.mat'))) if di_paths: self._dark_images = { - False:{'exposures':[], 'images':[]}, - True:{'exposures':[], 'images':[]}, + False: {'exposures': [], 'images': []}, + True: {'exposures': [], 'images': []}, } # @@ -397,7 +407,7 @@ def loadDarkImages(self): # for path in di_paths: d = sio.loadmat(path) - gain_boost = d['gain_boost'][0][0] == 1 + gain_boost = d['gain_boost'][0][0] == 1 self._dark_images[gain_boost]['exposures'].append(d['exposure'][0][0]) self._dark_images[gain_boost]['images'].append(d['image']) @@ -440,23 +450,24 @@ def loadSunMeasurements(self): # # Limit the data to sun measurements only. # - data = data[data['object']=='Sun'] + data = data[data['object'] == 'Sun'] # # Limit the data to angles between a range of "valid" # angles. # data = data[ - (data['sunshader_angle']>gs.SUNSHADER_MIN_MEASURED)&\ - (data['sunshader_angle'] gs.SUNSHADER_MIN_MEASURED) & + (data['sunshader_angle'] < gs.SUNSHADER_MAX_MEASURED) + ] data.index = data.index.time angles.append(data['sunshader_angle']) - - self.sunshader_angles_df = pd.concat( - angles, axis=1 - ).mean(axis=1).to_frame(name='angle') + # pandas backwards compatibility + silence sort warning + if pd.__version__ < '0.23.0': + self.sunshader_angles_df = pd.concat(angles, axis=1).mean(axis=1).to_frame(name='angle') + else: + self.sunshader_angles_df = pd.concat(angles, axis=1, sort=True).mean(axis=1).to_frame(name='angle') else: self.sunshader_angles_df = pd.DataFrame(dict(angle=[])) @@ -506,7 +517,7 @@ def safe_capture(self, settings, frames_num=1, except Exception as e: if retries <= 0: logging.exception( - 'The camera failed too many consequtive times. Reboot.' + 'The camera failed too many consecutive times. Reboot.' ) logging.shutdown() os.system('sudo reboot') @@ -524,7 +535,7 @@ def safe_capture(self, settings, frames_num=1, self.start_camera() except Exception as e: logging.exception( - 'The camera failed restarting. Reboot.' + 'The camera failed restarting. Rebooting.' ) logging.shutdown() time.sleep(120) @@ -547,7 +558,7 @@ def handle_sunshader_update(self, sunshader_min, sunshader_max): # # Take value large enough to force scan # - dt = timedelta(seconds=2*gs.SUNSHADER_SCAN_PERIOD_LONG) + dt = timedelta(seconds=2 * gs.SUNSHADER_SCAN_PERIOD_LONG) # # current_time_only is without date, and used for interpolating @@ -565,10 +576,10 @@ def handle_sunshader_update(self, sunshader_min, sunshader_max): # current_angle = self._arduino_api.getAngle() sunshader_scan_min = max( - current_angle-gs.SUNSHADER_SCAN_DELTA_ANGLE, sunshader_min + current_angle - gs.SUNSHADER_SCAN_DELTA_ANGLE, sunshader_min ) sunshader_scan_max = min( - current_angle+gs.SUNSHADER_SCAN_DELTA_ANGLE, sunshader_max + current_angle + gs.SUNSHADER_SCAN_DELTA_ANGLE, sunshader_max ) sunshader_scan_period = gs.SUNSHADER_SCAN_PERIOD_LONG else: @@ -580,7 +591,7 @@ def handle_sunshader_update(self, sunshader_min, sunshader_max): # Is it time to do a scan? # measured_angle = None - if dt > timedelta(seconds=sunshader_scan_period): + if dt > timedelta(seconds=sunshader_scan_period): self.last_sunshader_time = current_time logging.info('Time to scan') @@ -590,18 +601,17 @@ def handle_sunshader_update(self, sunshader_min, sunshader_max): # future = Future() yield self.handle_sunshader_scan(future, reply=False, - sunshader_min=sunshader_scan_min, - sunshader_max=sunshader_scan_max - ) + sunshader_min=sunshader_scan_min, + sunshader_max=sunshader_scan_max + ) measured_angle, _ = future.result() logging.info("Measured angle: {}".format(measured_angle)) # - # Update datebase with new measurment - # First, add new measurment to dataframe of angles. + # Update database with new measurement + # First, add new measurement to dataframe of angles. # - if measured_angle > gs.SUNSHADER_MIN_MEASURED and \ - measured_angle < gs.SUNSHADER_MAX_MEASURED: + if gs.SUNSHADER_MIN_MEASURED < measured_angle < gs.SUNSHADER_MAX_MEASURED: self.sunshader_angles_df.loc[current_time_only] = measured_angle self.sunshader_angles_df = self.sunshader_angles_df.sort_index() @@ -621,11 +631,11 @@ def handle_sunshader_update(self, sunshader_min, sunshader_max): self.sunshader_fit = False # - # If model fitting failed or there are not enough measurments for + # If model fitting failed or there are not enough measurements for # interpolation angle use measured angle. # if (not self.sunshader_fit) or \ - len(self.sunshader_angles_df) < gs.SUNSHADER_MIN_ANGLES: + len(self.sunshader_angles_df) < gs.SUNSHADER_MIN_ANGLES: logging.info("Either failed fitting or not enough measurements") if measured_angle is not None: logging.info("Using measured angle: {}".format(measured_angle)) @@ -688,7 +698,8 @@ def handle_sunshader_scan(self, reply, sunshader_min, sunshader_max): "color_mode": gs.COLOR_RGB } ) - val = img[img>128].sum() / img.size + # TODO CONST 128 and why 128 and not something else? + val = img[img > 128].sum() / img.size logging.debug( "Exp.: {}, Gain: {}, image range: [{}, {}], Value: {}".format( @@ -724,7 +735,7 @@ def handle_sunshader_scan(self, reply, sunshader_min, sunshader_max): # saturated_array = pd.Series(saturated_array).fillna(method='bfill').values - N = min(8, int((len(saturated_array)-1)/3)-1) + N = min(8, int((len(saturated_array) - 1) / 3) - 1) if N >= 4: b, a = signal.butter(N, 0.125) sun_signal = signal.filtfilt(b, a, saturated_array) @@ -757,8 +768,7 @@ def handle_sunshader_scan(self, reply, sunshader_min, sunshader_max): # if reply: angles = np.arange(sunshader_min, sunshader_max) - return angles, np.array(saturated_array), sun_signal, \ - measured_angle, centroid + return angles, np.array(saturated_array), sun_signal, measured_angle, centroid return measured_angle, centroid @@ -773,13 +783,13 @@ def update_sky_mask(self, img): # Calculate the mask factor # mat = img.astype(np.float) - r = mat[...,0] - g = mat[...,1] - b = mat[...,2] - new_mask = (b>30) & (b>1.5*r) + r = mat[..., 0] + g = mat[..., 1] + b = mat[..., 2] + new_mask = (b > 30) & (b > 1.5 * r) # - # Accomulate the mask factor + # Accumulate the mask factor # if self.sky_mask_base is None: self.sky_mask_base = new_mask @@ -804,8 +814,7 @@ def update_sky_mask(self, img): contour = sorted(contours, key=cv2.contourArea, reverse=True)[0] self.sky_mask = np.zeros_like(mask) - self.sky_mask = cv2.drawContours( - self.sky_mask, [contour], -1, 255, -1) + self.sky_mask = cv2.drawContours(self.sky_mask, [contour], -1, 255, -1) # # Store the masks @@ -849,7 +858,7 @@ def handle_calibration(self, nx, ny, imgs_num, delay, exposure_us, # imgs = [] for i in range(imgs_num): - self._arduino_api.setAngle(sunshader_min+2) + self._arduino_api.setAngle(sunshader_min + 2) img, real_exposure_us, real_gain_db = self._camera.capture( settings={ "exposure_us": exposure_us, @@ -902,7 +911,7 @@ def handle_calibration(self, nx, ny, imgs_num, delay, exposure_us, def handle_sunshader(self, angle, sunshader_min, sunshader_max): """Set the sunshader to an angle""" - if anglesunshader_max: + if angle < sunshader_min or angle > sunshader_max: raise ValueError( "Sunshader angle ({}) not in range ({},{})".format( angle, sunshader_min, sunshader_max @@ -954,13 +963,13 @@ def handle_moon(self, sunshader_min): @cmd_callback @run_on_executor def handle_extrinsic( - self, - date, - latitude, - longitude, - altitude, - residual_threshold, - save): + self, + date, + latitude, + longitude, + altitude, + residual_threshold, + save): """Handle extrinsic calibration""" # @@ -973,11 +982,11 @@ def handle_extrinsic( except: logging.warn( "Failed loading calibration for extrinsic date {}\n{}".format( - date, traceback.format_exc()) + date, traceback.format_exc()) ) # - # Load sun measurments. + # Load sun measurements. # today_positions_path = os.path.join( gs.SUN_POSITIONS_PATH, "{}.csv".format(date)) @@ -986,10 +995,10 @@ def handle_extrinsic( raise Exception('No sun positions for date: {}'.format(date)) # - # Calibratoin is done using the sun position. + # Calibration is done using the sun position. # positions_df = pd.read_csv(today_positions_path, index_col=0, parse_dates=True) - positions_df = positions_df[positions_df['object']=='Sun'] + positions_df = positions_df[positions_df['object'] == 'Sun'] positions_df = positions_df.dropna() if positions_df.shape[0] < gs.EXTRINSIC_CALIBRATION_MIN_PTS: @@ -997,17 +1006,15 @@ def handle_extrinsic( positions_df.shape[0])) # - # Convert sun measurments to directions. + # Convert sun measurements to directions. # - measured_postions = \ - positions_df[['pos_x', 'pos_y']].as_matrix() - phi, theta, mask = self._normalization._fisheye_model.undistortDirections( - measured_postions) + measured_positions = positions_df[['pos_x', 'pos_y']].as_matrix() + phi, theta, mask = self._normalization._fisheye_model.undistortDirections(measured_positions) measured_directions = np.array( ( - np.sin(theta)*np.cos(phi), - -np.sin(theta)*np.sin(phi), + np.sin(theta) * np.cos(phi), + -np.sin(theta) * np.sin(phi), np.cos(theta) ) ).T @@ -1029,7 +1036,7 @@ def handle_extrinsic( calculated_directions = np.array(calculated_directions) # - # Estimate oreintation + # Estimate orientation # R, rotated_directions = find_camera_orientation_ransac( calculated_directions, measured_directions, residual_threshold) @@ -1049,7 +1056,7 @@ def handle_extrinsic( os.path.join( calibration_day_path, gs.EXTRINSIC_SETTINGS_FILENAME - ), + ), R ) else: @@ -1064,10 +1071,7 @@ def handle_extrinsic( @cmd_callback @gen.coroutine - def handle_save_extrinsic( - self, - date - ): + def handle_save_extrinsic(self, date): """Handle save extrinsic calibration command This command saves the current extrinsic calibration on a specific @@ -1082,19 +1086,19 @@ def handle_save_extrinsic( gs.CAPTURE_PATH, date, gs.EXTRINSIC_SETTINGS_FILENAME - ), + ), self._normalization.R ) @cmd_callback @run_on_executor def handle_radiometric( - self, - date, - time_index, - residual_threshold, - save, - camera_settings): + self, + date, + time_index, + residual_threshold, + save, + camera_settings): """Handle radiometric calibration""" # @@ -1117,7 +1121,7 @@ def handle_radiometric( # df = spm.parseSunPhotoMeter(path[0]) spm_df = df[date.strftime("%Y-%m-%d")] - spm_df = [spm_df[spm_df["Wavelength(um)"]==wl] for wl in (0.6744, 0.5000, 0.4405)] + spm_df = [spm_df[spm_df["Wavelength(um)"] == wl] for wl in (0.6744, 0.5000, 0.4405)] # # Get the image list for this day. @@ -1176,6 +1180,7 @@ def handle_radiometric( def sampleAlmucantarData(self, spm_df, t, camera_df, camera_settings, resolution=301): """Samples almucantar rgb values of some camera at specific time.""" + angles, values = spm.readSunPhotoMeter(spm_df, t) closest_time = spm.findClosestImageTime(camera_df, t, hdr='2') img_datas, img = self.seekImageArray( @@ -1189,8 +1194,8 @@ def sampleAlmucantarData(self, spm_df, t, camera_df, camera_settings, resolution correct_radiometric=False ) almucantar_samples, almucantar_angles, almucantar_coords, \ - _, _, _ = spm.sampleImage(img, img_datas[0], almucantar_angles=angles) - + _, _, _ = spm.sampleImage(img, img_datas[0], almucantar_angles=angles) + # values- are sunphotometer measurments, almucantar_samples are the corresponding samples on the image plane. return angles, values, almucantar_samples @cmd_callback @@ -1242,7 +1247,7 @@ def handle_array(self, capture_settings, frames_num, normalize, jpeg, # Average the images. # if frames_num > 1: - img_array = img_array.mean(axis=img_array.ndim-1) + img_array = img_array.mean(axis=img_array.ndim - 1) logging.debug('Averaged %d arrays' % frames_num) # @@ -1265,18 +1270,18 @@ def handle_array(self, capture_settings, frames_num, normalize, jpeg, return img_array, img_data def seekImageArray( - self, - df, - seek_time, - hdr_index, - normalize, - resolution, - jpeg, - camera_settings, - correct_radiometric=True, - ignore_date_extrinsic=False, - timedelta_threshold=60 - ): + self, + df, + seek_time, + hdr_index, + normalize, + resolution, + jpeg, + camera_settings, + correct_radiometric=True, + ignore_date_extrinsic=False, + timedelta_threshold=60 + ): """Seek an image array. Args: @@ -1342,7 +1347,7 @@ def seekImageArray( img_array = sio.loadmat(mat_path)['img_array'] base_path = os.path.splitext(mat_path)[0] - if os.path.exists(base_path+'.json'): + if os.path.exists(base_path + '.json'): # # Support old json data files. # @@ -1357,11 +1362,11 @@ def seekImageArray( with open(data_path, mode='rb') as f: img_data.update(**json.load(f)) - elif os.path.exists(base_path+'.pkl'): + elif os.path.exists(base_path + '.pkl'): # # New pickle data files. # - with open(base_path+'.pkl', 'rb') as f: + with open(base_path + '.pkl', 'rb') as f: img_data = cPickle.load(f) img_arrays.append(img_array) @@ -1390,9 +1395,9 @@ def preprocess_array( jpeg=False, correct_radiometric=True, ignore_date_extrinsic=False - ): - """Apply preprocessing to the raw array: - dark_image substraction, normalization, vignetting, HDR... + ): + """Apply pre-processing to the raw array: + dark_image subtraction, normalization, vignetting, HDR... Args: ... @@ -1433,7 +1438,7 @@ def preprocess_array( except: logging.error( "Failed loading extrinsic data from {}\n{}".format( - extrinsic_path, traceback.format_exc()) + extrinsic_path, traceback.format_exc()) ) # @@ -1475,7 +1480,7 @@ def preprocess_array( else: if len(img_arrays) == 1: img_array = \ - img_arrays[0].astype(np.float) / (img_datas[0].exposure_us / 1000) + img_arrays[0].astype(np.float) / (img_datas[0].exposure_us / 1000) else: img_exposures = [img_data.exposure_us / 1000 for img_data in img_datas] img_array = calcHDR(img_arrays, img_exposures) @@ -1517,7 +1522,7 @@ def preprocess_array( # Scale to Watts. # img_array = \ - self._radiometric.applyRadiometric(img_array).astype(np.float32) + self._radiometric.applyRadiometric(img_array).astype(np.float32) return np.ascontiguousarray(img_array) @@ -1559,11 +1564,11 @@ def handle_dark_images(self): frames_num=FRAMES_NUM ) - img_array = img_array.mean(axis=img_array.ndim-1) + img_array = img_array.mean(axis=img_array.ndim - 1) sio.savemat( os.path.join(gs.DARK_IMAGES_PATH, '{}_{}.mat'.format(img_index, gain_boost)), - {'image': img_array, 'exposure': exposure_us, 'gain_boost':gain_boost}, + {'image': img_array, 'exposure': exposure_us, 'gain_boost': gain_boost}, do_compression=True ) img_index += 1 @@ -1585,9 +1590,7 @@ def handle_loop(self, capture_settings, frames_num, hdr_mode, img_data): # # Capture the array. # - img_array, exposure_us, gain_db = self.safe_capture( - capture_settings, frames_num - ) + img_array, exposure_us, gain_db = self.safe_capture(capture_settings, frames_num) # # update image data object. @@ -1603,7 +1606,7 @@ def handle_loop(self, capture_settings, frames_num, hdr_mode, img_data): # Average the images. # if frames_num > 1: - img_array = img_array.mean(axis=img_array.ndim-1) + img_array = img_array.mean(axis=img_array.ndim - 1) logging.debug('Averaged %d arrays' % frames_num) # @@ -1620,7 +1623,7 @@ def handle_loop(self, capture_settings, frames_num, hdr_mode, img_data): break # - # Mutliply the next exposure for HDR. + # Multiply the next exposure for HDR. # if capture_settings['exposure_us'] >= 6000000: break @@ -1706,9 +1709,9 @@ def process_cmds(self): # cb = getattr(self, 'handle_{}'.format(cmd), None) if cb is None: - logging.debug("Controller received unkown command: {}".format(cmd)) + logging.debug("Controller received unknown command: {}".format(cmd)) future.set_exception( - Exception("Controller received unkown command: {}".format(cmd))) + Exception("Controller received unknown command: {}".format(cmd))) else: try: try: @@ -1727,4 +1730,4 @@ def process_cmds(self): future.set_exception(e) self._in_queue.task_done() - logging.debug("Finished procesing cmd: {}".format(cmd)) + logging.debug("Finished processing cmd: {}".format(cmd)) diff --git a/CameraNetwork/export.py b/CameraNetwork/export.py index 74eb4ae..1f782fa 100644 --- a/CameraNetwork/export.py +++ b/CameraNetwork/export.py @@ -33,251 +33,264 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -"""Utilites for handling export to solver. -""" -from __future__ import division -from CameraNetwork.utils import sun_direction -import cPickle -import cv2 -from enaml.application import deferred_call, is_main_thread -import logging -import math -import os -import numpy as np -import pymap3d -import traceback - - -def exportToShdom( - base_path, - array_items, - grid, - lat, - lon, - alt, - progress_callback): - """Process export of reconstruction data on separate thread. - The export is intended for use in SHDOM. - - Args: - base_path (str): Path to store export data. - array_items (list): List of array items. - grid (list): List of grid array. This is the grid to reconstruct. - lat, lon, lat (float): The latitude, longitude and altitude of the center - of the grid. - progress_callback (function): Callback function to update the (GUI) with - the progress of the export. - - Note: - The directions in the Qt view are as follows: - x axis (horizontal) goes from West (left) to East (right) - y axis (vertical) goes from South (down) to North (up). - this makes it a EN axis system - """ - - # - # Reset the progress indicatort. - # - progress_cnt = len(array_items) - deferred_call(progress_callback, 0) - - # - # Convert the grid from NED to ECEF - # - GRID = np.meshgrid(*grid) - ecef_grid = pymap3d.ned2ecef(GRID[0], GRID[1], GRID[2], lat, lon, alt) - - # - # Loop on all servers and collect information. - # - export_data = {} - for i, (server_id, (array_model, array_view)) in enumerate(array_items.items()): - try: - # - # Store extra data like camera center, etc. - # - extra_data, sun_alt, sun_az = extraReconstructionData( - array_model, array_view, lat0=lat, lon0=lon, h0=alt) - - img_array = array_model.img_array - - # - # Calculate azimuth and elevation of each pixel. - # Note: - # The interpolation is done in the Y_shdom, X_shdom to avoid - # the seam artifact of PHI at 180 degrees. - # - Y_shdom, X_shdom = np.meshgrid( - np.linspace(-1, 1, array_model.img_array.shape[1]), - np.linspace(-1, 1, array_model.img_array.shape[0]) - ) - Y_shdom = array_view.image_widget.getArrayRegion(Y_shdom) - X_shdom = array_view.image_widget.getArrayRegion(X_shdom) - PHI_shdom, PSI_shdom = getShdomDirections(Y_shdom, X_shdom, array_model.fov) - - # - # Calculate Masks. - # Note: - # sunshader mask is calculate using grabcut. This is used for removing the - # sunshader. - # Manual mask is the (ROI) mask marked by the user. - # sun mask is a mask the blocks the sun. - # - manual_mask = array_view.image_widget.mask - joint_mask = (manual_mask * array_model.sunshader_mask).astype(np.uint8) - - # - # Project the grid on the image and check viewed voxels. - # Note: - # This measurement is used for checking how many cameras see each voxel. - # TODO: - # This procedure is time expensive and can be cached. - # This should probably be a method of the camera, and this method should - # cache the result, or even be triggered by setting the grid. - # - visibility = projectGridOnCamera(ecef_grid, array_model, joint_mask) - except Exception, e: - logging.error( - "Server {} ignored due to exception:\n{}".format( - server_id, - traceback.format_exc() - ) - ) - continue - - export_data[server_id] = dict( - extra_data=extra_data, - R=array_view.image_widget.getArrayRegion(img_array[..., 0]), - G=array_view.image_widget.getArrayRegion(img_array[..., 1]), - B=array_view.image_widget.getArrayRegion(img_array[..., 2]), - PHI=PHI_shdom, - PSI=PSI_shdom, - MASK=array_view.image_widget.getArrayRegion(joint_mask), - SUN_MASK=array_view.image_widget.getArrayRegion(array_model.sun_mask), - Visibility=visibility, - ) - - deferred_call(progress_callback, i / progress_cnt) - - # - # Save the results. - # - with open(os.path.join(base_path, 'export_data.pkl'), 'wb') as f: - cPickle.dump(export_data, f) - - deferred_call(progress_callback, 0) - - -def getShdomDirections(Y_shdom, X_shdom, fov=math.pi/2): - """Calculate the (SHDOM) direction of each pixel. - - Directions are calculated in SHDOM convention where the direction is - of the photons. - """ - - PHI_shdom = np.pi + np.arctan2(Y_shdom, X_shdom) - PSI_shdom = -np.pi + fov * np.sqrt(X_shdom**2 + Y_shdom**2) - return PHI_shdom, PSI_shdom - - -def extraReconstructionData(array_model, array_view, lat0, lon0, h0): - """Get extra data for the reconstruction - - This includes camera position, sun angle, time etc. - - Note: - The coordinates are given in the following conventions: - 1) Camera position is given in NEU. - 2) sun_mu, sun_az are given in the SHDOM convention - of photons directions. - """ - - # - # Calculate the center of the camera. - # Note that the coords are stored as NEU (in contrast to NED) - # - n, e, d = pymap3d.geodetic2ned( - array_model.latitude, array_model.longitude, array_model.altitude, - lat0=lat0, lon0=lon0, h0=h0) - - # - # Calculate bounding coords (useful for debug visualization) - # - #bounding_phi, bounding_psi = calcROIbounds(array_model, array_view) - - # - # Sun azimuth and altitude - # - sun_alt, sun_az = sun_direction( - latitude=str(array_model.latitude), - longitude=str(array_model.longitude), - altitude=array_model.altitude, - at_time=array_model.img_data.name_time) - - # - # Note: - # shdom_mu = cos(pi/2-alt-pi)=cos(-alt-pi/2)=cos(alt+pi/2) - # - extra_data = \ - dict( - at_time=array_model.img_data.name_time, - sun_mu=math.cos(float(sun_alt)+np.pi/2), - sun_az=float(sun_az)-np.pi, - x=n, - y=e, - z=-d, - #bounding_phi=bounding_phi, - #bounding_psi=bounding_psi - ) - return extra_data, sun_alt, sun_az - - -def projectGridOnCamera(ecef_grid, array_model, joint_mask): - """Project reconstruction grid on camera. - - This is used to estimate the visibility of each voxel by the camera. - """ - - xs, ys, fov_mask = array_model.projectECEF(ecef_grid, filter_fov=False) - xs = xs.astype(np.uint32).flatten() - ys = ys.astype(np.uint32).flatten() - - grid_visibility = np.zeros_like(xs, dtype=np.uint8) - grid_visibility[fov_mask] = \ - joint_mask[ys[fov_mask], xs[fov_mask]].astype(np.uint8) - - return grid_visibility.reshape(*ecef_grid[0].shape) - - -def calcROIbounds(array_model, array_view): - """Calculate bounds of ROI in array_view - - Useful for debug visualization. - """ - - # - # Get the ROI size - # - size = array_model.ROI_state['size'] - - # - # Get the transform from the ROI to the data. - # - _, tr = roi.getArraySlice(array_model.img_array, array_view.image_widget.img_item) - - # - # Calculate the bounds. - # - center = float(array_model.img_array.shape[0])/2 - pts = np.array( - [tr.map(x, y) for x, y in \ - ((0, 0), (size.x(), 0), (0, size.y()), (size.x(), size.y()))] - ) - pts = (pts - center) / center - X, Y = pts[:, 1], pts[:, 0] - bounding_phi = np.arctan2(X, Y) - bounding_psi = array_model.fov * np.sqrt(X**2 + Y**2) - - return bounding_phi, bounding_psi - - +"""Utilites for handling export to solver. +""" +from __future__ import division +from CameraNetwork.utils import sun_direction +from CameraNetwork.image_utils import calcSunMaskRect +import cPickle +import cv2 +from enaml.application import deferred_call, is_main_thread +import logging +import math +import os +import numpy as np +import pymap3d +import traceback + + +def exportToShdom( + base_path, + array_items, + grid, + lat, + lon, + alt, + progress_callback): + """Process export of reconstruction data on separate thread. + The export is intended for use in SHDOM. + + Args: + base_path (str): Path to store export data. + array_items (list): List of array items. + grid (list): List of grid array. This is the grid to reconstruct. + lat, lon, alt (float): The latitude, longitude and altitude of the center + of the grid. + progress_callback (function): Callback function to update the (GUI) with + the progress of the export. + + Note: + The directions in the Qt view are as follows: + x axis (horizontal) goes from West (left) to East (right) + y axis (vertical) goes from South (down) to North (up). + this makes it a EN axis system + """ + + # + # Reset the progress indicatort. + # + progress_cnt = len(array_items) + deferred_call(progress_callback, 0) + + # + # Convert the grid from NED to ECEF + # + GRID = np.meshgrid(*grid) + ecef_grid = pymap3d.ned2ecef(GRID[0], GRID[1], GRID[2], lat, lon, alt) + + # + # Loop on all servers and collect information. + # + export_data = {} + for i, (server_id, (array_model, array_view)) in enumerate(array_items.items()): + try: + # + # Store extra data like camera center, etc. + # + extra_data, sun_alt, sun_az = extraReconstructionData( + array_model, array_view, lat0=lat, lon0=lon, h0=alt) + + img_array = array_model.img_array + + # + # Calculate azimuth and elevation of each pixel. + # Note: + # The interpolation is done in the Y_shdom, X_shdom to avoid + # the seam artifact of PHI at 180 degrees. + # + Y_shdom, X_shdom = np.meshgrid( + np.linspace(-1, 1, array_model.img_array.shape[1]), + np.linspace(-1, 1, array_model.img_array.shape[0]) + ) + Y_shdom = array_view.image_widget.getArrayRegion(Y_shdom) + X_shdom = array_view.image_widget.getArrayRegion(X_shdom) + PHI_shdom, PSI_shdom = getShdomDirections(Y_shdom, X_shdom, array_model.fov) + + # + # Calculate Masks. + # Note: + # sunshader mask is calculate using grabcut. This is used for removing the + # sunshader. + # Manual mask is the (ROI) mask marked by the user. + # sun mask is a mask the blocks the sun. + # + manual_mask = array_view.image_widget.mask + joint_mask = (manual_mask * array_model.sunshader_mask).astype(np.uint8) + + # calculate rectangle around sun mask + rect_sun_mask = calcSunMaskRect ( + array_model.img_array.shape , + sun_alt , + sun_az , + radius = array_model.sun_mask_radius + ) + + # + # Project the grid on the image and check viewed voxels. + # Note: + # This measurement is used for checking how many cameras see each voxel. + # TODO: + # This procedure is time expensive and can be cached. + # This should probably be a method of the camera, and this method should + # cache the result, or even be triggered by setting the grid. + # + visibility = projectGridOnCamera(ecef_grid, array_model, joint_mask) + except Exception, e: + logging.error( + "Server {} ignored due to exception:\n{}".format( + server_id, + traceback.format_exc() + ) + ) + continue + + + export_data[server_id] = dict( + extra_data=extra_data, + R=array_view.image_widget.getArrayRegion(img_array[..., 0]), + G=array_view.image_widget.getArrayRegion(img_array[..., 1]), + B=array_view.image_widget.getArrayRegion(img_array[..., 2]), + PHI=PHI_shdom, + PSI=PSI_shdom, + MASK = array_view.image_widget.getArrayRegion(joint_mask), + SUN_MASK = array_view.image_widget.getArrayRegion(array_model.sun_mask), + Visibility = visibility, + manual_mask = array_view.image_widget.getArrayRegion(manual_mask), + cloud_mask = array_view.image_widget.getArrayRegion(array_model.cloud_weights), + sunshader_mask = array_view.image_widget.getArrayRegion(array_model.sunshader_mask), + rect_sun_mask = array_view.image_widget.getArrayRegion(rect_sun_mask) + # TODO: check why getArrayRegion() fails when sending returnMappedCoords=True + ) + + deferred_call(progress_callback, i / progress_cnt) + + # + # Save the results. + # + with open(os.path.join(base_path, 'export_data.pkl'), 'wb') as f: + cPickle.dump(export_data, f) + + deferred_call(progress_callback, 0) + + +def getShdomDirections(Y_shdom, X_shdom, fov=math.pi/2): + """Calculate the (SHDOM) direction of each pixel. + + Directions are calculated in SHDOM convention where the direction is + of the photons. + """ + + PHI_shdom = np.pi + np.arctan2(Y_shdom, X_shdom) + PSI_shdom = -np.pi + fov * np.sqrt(X_shdom**2 + Y_shdom**2) + return PHI_shdom, PSI_shdom + + +def extraReconstructionData(array_model, array_view, lat0, lon0, h0): + """Get extra data for the reconstruction + + This includes camera position, sun angle, time etc. + + Note: + The coordinates are given in the following conventions: + 1) Camera position is given in NEU. + 2) sun_mu, sun_az are given in the SHDOM convention + of photons directions. + """ + + # + # Calculate the center of the camera. + # Note that the coords are stored as NEU (in contrast to NED) + # + n, e, d = pymap3d.geodetic2ned( + array_model.latitude, array_model.longitude, array_model.altitude, + lat0=lat0, lon0=lon0, h0=h0) + + # + # Calculate bounding coords (useful for debug visualization) + # + #bounding_phi, bounding_psi = calcROIbounds(array_model, array_view) + + # + # Sun azimuth and altitude + # + sun_alt, sun_az = sun_direction( + latitude=str(array_model.latitude), + longitude=str(array_model.longitude), + altitude=array_model.altitude, + at_time=array_model.img_data.name_time) + + # + # Note: + # shdom_mu = cos(pi/2-alt-pi)=cos(-alt-pi/2)=cos(alt+pi/2) + # + extra_data = \ + dict( + at_time=array_model.img_data.name_time, + sun_mu=math.cos(float(sun_alt)+np.pi/2), + sun_az=float(sun_az)-np.pi, + x=n, + y=e, + z=-d, + #bounding_phi=bounding_phi, + #bounding_psi=bounding_psi + ) + return extra_data, sun_alt, sun_az + + +def projectGridOnCamera(ecef_grid, array_model, joint_mask): + """Project reconstruction grid on camera. + + This is used to estimate the visibility of each voxel by the camera. + """ + + xs, ys, fov_mask = array_model.projectECEF(ecef_grid, filter_fov=False) + xs = xs.astype(np.uint32).flatten() + ys = ys.astype(np.uint32).flatten() + + grid_visibility = np.zeros_like(xs, dtype=np.uint8) + grid_visibility[fov_mask] = \ + joint_mask[ys[fov_mask], xs[fov_mask]].astype(np.uint8) + + return grid_visibility.reshape(*ecef_grid[0].shape) + + +def calcROIbounds(array_model, array_view): + """Calculate bounds of ROI in array_view + + Useful for debug visualization. + """ + + # + # Get the ROI size + # + size = array_model.ROI_state['size'] + + # + # Get the transform from the ROI to the data. + # + + #~, tr = roi.getArraySlice(array_model.img_array, array_view.image_widget.img_item) + # + # Calculate the bounds. + # + center = float(array_model.img_array.shape[0])/2 + pts = np.array( + [tr.map(x, y) for x, y in \ + ((0, 0), (size.x(), 0), (0, size.y()), (size.x(), size.y()))] + ) + pts = (pts - center) / center + X, Y = pts[:, 1], pts[:, 0] + bounding_phi = np.arctan2(X, Y) + bounding_psi = array_model.fov * np.sqrt(X**2 + Y**2) + + return bounding_phi, bounding_psi \ No newline at end of file diff --git a/CameraNetwork/global_settings.py b/CameraNetwork/global_settings.py index 9a31eb8..ca5ff8f 100644 --- a/CameraNetwork/global_settings.py +++ b/CameraNetwork/global_settings.py @@ -1,38 +1,40 @@ -## -## Copyright (C) 2017, Amit Aides, all rights reserved. -## -## This file is part of Camera Network -## (see https://bitbucket.org/amitibo/cameranetwork_git). -## -## Redistribution and use in source and binary forms, with or without modification, -## are permitted provided that the following conditions are met: -## -## 1) The software is provided under the terms of this license strictly for -## academic, non-commercial, not-for-profit purposes. -## 2) Redistributions of source code must retain the above copyright notice, this -## list of conditions (license) and the following disclaimer. -## 3) Redistributions in binary form must reproduce the above copyright notice, -## this list of conditions (license) and the following disclaimer in the -## documentation and/or other materials provided with the distribution. -## 4) The name of the author may not be used to endorse or promote products derived -## from this software without specific prior written permission. -## 5) As this software depends on other libraries, the user must adhere to and keep -## in place any licensing terms of those libraries. -## 6) Any publications arising from the use of this software, including but not -## limited to academic journal and conference publications, technical reports and -## manuals, must cite the following works: -## Dmitry Veikherman, Amit Aides, Yoav Y. Schechner and Aviad Levis, "Clouds in The Cloud" Proc. ACCV, pp. 659-674 (2014). -## -## THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -## EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## +# +# Copyright (C) 2017, Amit Aides, all rights reserved. +# +# This file is part of Camera Network +# (see https://bitbucket.org/amitibo/cameranetwork_git). +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# 1) The software is provided under the terms of this license strictly for +# academic, non-commercial, not-for-profit purposes. +# 2) Redistributions of source code must retain the above copyright notice, this +# list of conditions (license) and the following disclaimer. +# 3) Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions (license) and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# 4) The name of the author may not be used to endorse or promote products derived +# from this software without specific prior written permission. +# 5) As this software depends on other libraries, the user must adhere to and keep +# in place any licensing terms of those libraries. +# 6) Any publications arising from the use of this software, including but not +# limited to academic journal and conference publications, technical reports and +# manuals, must cite the following works: +# Dmitry Veikherman, Amit Aides, Yoav Y. Schechner and Aviad Levis, +# "Clouds in The Cloud" Proc. ACCV, pp. 659-674 (2014). +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +# EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + """ Globals """ @@ -55,12 +57,12 @@ # Internet watchdog # WD_TEST_INTERNET_PERIOD = 120 -WD_INTERNET_FAILURE_THRESH = 0 +WD_INTERNET_FAILURE_THRESH = 2 # Changed from 0 DEFAULT_PROXY_PARAMS = """ { "autossh_monitor_port": 10000, - "ip": "35.157.27.102", + "ip": "3.123.47.23", "proxy_port": 1980, "client_port": 1981, "hb_port": 1985, @@ -73,8 +75,8 @@ # Configuration server # CONFIGURATION_SERVER = 'tx.technion.ac.il' -CONFIGURATION_SERVER_USER = 'amitibo' -CONFIGURATION_SERVER_URL_BASE = 'http://%s/~amitibo/cameras_settings/' % CONFIGURATION_SERVER +CONFIGURATION_SERVER_USER = 'amitibo' # TODO Update? +CONFIGURATION_SERVER_URL_BASE = 'http://%s/~amitibo/cameras_settings/' % CONFIGURATION_SERVER # TODO Update? CONFIGURATION_SERVER_BASE_PATH = 'public_html/cameras_settings' PROXY_SETTINGS_FILE_NAME = 'proxy_server_{timestamp}.json' @@ -120,7 +122,7 @@ SSH_TUNNEL_WAIT_TIME = 2 # -# Identities of the proxy sockets used for rounting the messages. +# Identities of the proxy sockets used for routing the messages. # PROXY_DEALER_IDENTITY = 'PROXY_DEALER' PROXY_ROUTER_IDENTITY = 'PROXY_ROUTER' @@ -135,7 +137,7 @@ ODROID_USER = 'odroid' ODROID_PW = 'odroid' -IDENTITY_FILE = os.path.join(os.path.expanduser('~'), 'cameranetwork.pem') +IDENTITY_FILE = os.path.join(os.path.expanduser('~'), 'AdiKP2.pem') # changed from 'cameranetwork.pem' - Adi # # Dropbox folder @@ -150,9 +152,15 @@ # without the exec, the shell=True option causes the shell to open a separate process with a different # pid. # -REVERSE_AUTOSSH_CMD = 'AUTOSSH_DEBUG=1 exec autossh -M 0 -v -i {identity_file} -o "ExitOnForwardFailure yes" -o "ServerAliveInterval 30" -o "ServerAliveCountMax 3" -N -R {tunnel_port}:localhost:{local_port} {server_user}@{server_ip}' -REVERSE_SSH_CMD = "exec ssh -i {identity_file} -oExitOnForwardFailure=yes -oServerAliveInterval=60 -N -R {tunnel_port}:localhost:{local_port} {server_user}@{server_ip}" +REVERSE_AUTOSSH_CMD = 'AUTOSSH_DEBUG=1 exec autossh -M 0 -v -i {identity_file} -o "ExitOnForwardFailure yes" -o' \ + ' "ServerAliveInterval 30" -o "ServerAliveCountMax 3" -N -R' \ + ' {tunnel_port}:localhost:{local_port} {server_user}@{server_ip}' + +REVERSE_SSH_CMD = "exec ssh -i {identity_file} -oExitOnForwardFailure=yes -oServerAliveInterval=60 -N -R" \ + " {tunnel_port}:localhost:{local_port} {server_user}@{server_ip}" + SCP_CMD = "scp -i {identity_file} {src_path} {server_user}@{server_ip}:{dst_path}" + TUNNEL_DESCRIPTION = "{tunnel_port}:localhost:{local_port} {server_user}@{server_ip}" # @@ -212,8 +220,8 @@ UPLOAD_JPG_FILE: False, UPLOAD_MAT_FILE: False, DAY_SETTINGS: { - LOOP_DELAY: 300, - IMAGE_EXPOSURE: 50, + LOOP_DELAY: 300, # [sec] + IMAGE_EXPOSURE: 50, # [usec] IMAGE_GAIN: 0, GAIN_BOOST: False, FRAMES_NUM: 5, @@ -221,8 +229,8 @@ HDR_MODE: 4 }, NIGHT_SETTINGS: { - LOOP_DELAY: 1800, - IMAGE_EXPOSURE: 8000000, + LOOP_DELAY: 1800, # [sec] + IMAGE_EXPOSURE: 8000000, # [usec] IMAGE_GAIN: 0, GAIN_BOOST: True, FRAMES_NUM: 1, @@ -256,7 +264,7 @@ # # Amit: # The default radiometric settings were taken from camera 109. -# +# TODO: what are the units? DEFAULT_RADIOMETRIC_SETTINGS = \ [0.077523644728250449, 0.11163146919636868, 0.11847859109577621] @@ -299,7 +307,6 @@ MSG_TYPE_TUNNEL_CHECK = 'tunnel_details' MSG_TYPE_LOCAL = 'local_ip' MSG_TYPE_UPDATE = 'update' -MSG_TYPE_UPDATE = 'update' MSG_STATUS_FIELD = 'status' MSG_STATUS_OK = 'ok' @@ -382,6 +389,7 @@ SUN_ALTITUDE_SUNSHADER_THRESH = 0 SUN_ALTITUDE_EXPOSURE_THRESH = 0.001 + # # Setup paths # @@ -420,7 +428,7 @@ def initPaths(HOME_PATH=None): SUN_POSITIONS_PATH = os.path.join(HOME_PATH, 'sun_positions') DARK_IMAGES_PATH = os.path.join(HOME_PATH, 'dark_images') - UPLOAD_CMD = os.path.join(HOME_PATH, ".local/bin/dropbox_uploader.sh -k upload {capture_path} {upload_path}") + UPLOAD_CMD = os.path.join(HOME_PATH, ".local/bin/dropbox_uploader.sh -k upload {capture_path} {upload_path}") ################################################################################ @@ -433,4 +441,4 @@ def initPaths(HOME_PATH=None): # Visualization stuff ################################################################################ LOS_PTS_NUM = 300 -LOS_LENGTH = 8000 \ No newline at end of file +LOS_LENGTH = 8000 diff --git a/CameraNetwork/gui/data_frame_table.py b/CameraNetwork/gui/data_frame_table.py index eacdd98..39b3998 100644 --- a/CameraNetwork/gui/data_frame_table.py +++ b/CameraNetwork/gui/data_frame_table.py @@ -53,9 +53,11 @@ from enaml.core.declarative import d_ from enaml.widgets.api import RawWidget from enaml.qt.QtCore import QAbstractTableModel, QModelIndex, Qt -from enaml.qt.QtGui import ( - QTableView, QHeaderView, QAbstractItemView, QFontMetrics) - +# fixing import from enaml.qt.QtGui. During the transision from Qt4 to Qt5: QtGui was split into QtGui and QtWidgets +#from enaml.qt.QtGui import ( + #QTableView, QHeaderView, QAbstractItemView, QFontMetrics) # ADI - fixing import of QTableView in Qt5. This line was relevant to Qt4 +from enaml.qt.QtGui import QFontMetrics # ADI - preserving include of QFontMetrics in Qt5 +from enaml.qt.QtWidgets import (QTableView,QHeaderView, QAbstractItemView) # ADI - fixing import of (QTableView,QHeaderView, QAbstractItemView) in Qt5 from traits_enaml.utils import get_unicode_string, format_value @@ -228,14 +230,23 @@ def _setup_headers(self): max_width = fmetrics.width(u" {0} ".format( unicode(self.df_model.rowCount()))) self.vheader.setMinimumWidth(max_width) - self.vheader.setClickable(True) + + # self.vheader.setClickable(True) # ADI - this comment is relevant for Qt 4 + self.vheader.setSectionsClickable(True) # ADI - this is relevant for Qt > 4 + self.vheader.setStretchLastSection(False) - self.vheader.setResizeMode(QHeaderView.Fixed) + + # self.vheader.setResizeMode(QHeaderView.Fixed) # ADI - this comment is relevant for Qt 4 + self.vheader.setSectionResizeMode(QHeaderView.Fixed) # ADI - this is relevant for Qt > 4 self.hheader = self.horizontalHeader() self.hheader.setStretchLastSection(False) - self.hheader.setClickable(True) - self.hheader.setMovable(True) + + # self.vheader.setClickable(True) # ADI - this comment is relevant for Qt 4 + self.vheader.setSectionsClickable(True) # ADI - this is relevant for Qt > 4 + + # self.hheader.setMovable(True) # ADI - this comment is relevant for Qt 4 + self.hheader.setSectionsMovable(True) # ADI - this is relevant for Qt > 4 def _setup_style(self): self.setWordWrap(False) @@ -246,14 +257,14 @@ def _current_row_changed(self, model_index): class DataFrameTable(RawWidget): """ A widget that displays a table view tied to a pandas DataFrame.""" - + # # The data frame to display # data_frame = d_(Typed(DataFrame)) selected_row = d_(Int()) selected_index = d_(Typed(object)) - + # # Expand the table by default # @@ -262,19 +273,19 @@ class DataFrameTable(RawWidget): def create_widget(self, parent): """Create the DataFrameTable Qt widget.""" - + widget = QDataFrameTableView.from_data_frame( self.data_frame, parent=parent ) widget.currentChanged = self.current_changed - + return widget @observe('data_frame') def _data_frame_changed(self, change): """ Proxy changes in `data_frame` down to the Qt widget.""" - + table = self.get_widget() if table is not None: df_model = QDataFrameModel(change['value']) @@ -283,7 +294,6 @@ def _data_frame_changed(self, change): table.setModel(df_model) def current_changed(self, current_item, previous_item): - + self.selected_row = current_item.row() self.selected_index = self.data_frame.index[current_item.row()] - \ No newline at end of file diff --git a/CameraNetwork/gui/enaml_files/docks.enaml b/CameraNetwork/gui/enaml_files/docks.enaml index a882162..29af62f 100644 --- a/CameraNetwork/gui/enaml_files/docks.enaml +++ b/CameraNetwork/gui/enaml_files/docks.enaml @@ -14,7 +14,8 @@ from CameraNetwork.gui.image_analysis import ImageAnalysis from CameraNetwork.gui.mayavi_canvas import MayaviCanvas from .extra import GradientButtonSheet -from .popups import ImageDataPopup +from .popups import (ImageDataPopup, MsgDataPopup) +#from .popups import ImageDataPopup TODO Check if fixes issue #6 from .server_pages import (MainControlsPage, ImageControlPage, SeekControlPage, SunshaderControlPage, SprinklerControlPage, IntrinsicControlPage, ExtrinsicControlPage, RadiometricControlPage) @@ -315,7 +316,7 @@ enamldef MapperView(DockItem): mapper_view: clouds_pb, clouds_cb, beta_pb, - beta_cb, + beta_cb ), ), ] @@ -364,6 +365,7 @@ enamldef MapperView(DockItem): mapper_view: text = "Show Beta" + ################################################################################ # Times # Allows querying, displaying and seeking time table of captured images. @@ -463,18 +465,19 @@ enamldef TimesView(DockItem): clicked :: main.create_map() index = times.img_index - main.broadcast_message( - gs.MSG_TYPE_SEEK, - kwds=dict( - seek_time=index[0], - hdr_index=-1 if HDR_cb.checked else index[1], - normalize=True, - jpeg=quality_fld.value if JPEG_cb.checked else False, - resolution=resolution_fld.value, - correct_radiometric=correct_radiometric_cb.checked, - ignore_date_extrinsic=ignore_date_extrinsics_cb.checked, + if len(index) >1: + main.broadcast_message( + gs.MSG_TYPE_SEEK, + kwds=dict( + seek_time=index[0], + hdr_index=-1 if HDR_cb.checked else index[1], + normalize=True, + jpeg=quality_fld.value if JPEG_cb.checked else False, + resolution=resolution_fld.value, + correct_radiometric=correct_radiometric_cb.checked, + ignore_date_extrinsic=ignore_date_extrinsics_cb.checked, + ) ) - ) diff --git a/CameraNetwork/gui/enaml_files/server_pages.enaml b/CameraNetwork/gui/enaml_files/server_pages.enaml index 8bdb173..502075b 100644 --- a/CameraNetwork/gui/enaml_files/server_pages.enaml +++ b/CameraNetwork/gui/enaml_files/server_pages.enaml @@ -580,7 +580,7 @@ enamldef RadiometricControlPage(Page): server_model_, gs.MSG_TYPE_RADIOMETRIC, kwds=dict( - date=radiometric_day.date, + date=query_day.selected, # date=radiometric_day.date time_index=time_combo.items.index(time_combo.selected), residual_threshold=residual_threshold.value, save=cb_save.checked diff --git a/CameraNetwork/gui/main.py b/CameraNetwork/gui/main.py index d4a2d89..cfa9372 100644 --- a/CameraNetwork/gui/main.py +++ b/CameraNetwork/gui/main.py @@ -36,7 +36,7 @@ ## """Run a GUI Client. -A GUI client allows easy access to cameras thier settings and their +A GUI client allows easy access to cameras their settings and their measurements. """ from __future__ import division @@ -560,12 +560,13 @@ def _draw_map_callback(self, X, Y, Z, map_file_name): mayavi_scene = self.map_scene.mayavi_scene - s = self.map_scene.mlab.surf(Y, X, MAP_ZSCALE * Z, figure=mayavi_scene, color=(1.,1.,1.)) + s = self.map_scene.mlab.surf(Y, X, MAP_ZSCALE * Z, figure=mayavi_scene, color=(1.,1.,1.), name='map1') s.actor.actor.mapper.scalar_visibility = False s.actor.enable_texture = True s.actor.tcoord_generator_mode = 'plane' s.actor.actor.texture = texture + def updateROImesh(self, server_id, pts, shape): """Update the 3D visualization of the camera ROI.""" @@ -727,7 +728,12 @@ def updateTimes(self, server_id, images_df): if server_id in self.images_df.columns: new_df.drop(server_id, axis=1, inplace=True) new_df = pd.concat((new_df, images_series), axis=1) - new_df = new_df.reindex_axis(sorted(new_df.columns), axis=1) + + # Pandas backwards compatibility + if pd.__version__ < '0.21.0': + new_df = new_df.reindex_axis(sorted(new_df.columns), axis=1) + else: + new_df = new_df.reindex(sorted(new_df.columns), axis=1) self.images_df = new_df @@ -878,7 +884,7 @@ def projectECEF(self, ECEF_pts, filter_fov=True, errstate='warn'): Args: ECEF_pts (tuple of arrays): points in ECEF coords. - fiter_fov (bool, optional): If True, points below the horizion + filter_fov (bool, optional): If True, points below the horizion will not be returned. If false, the indices of these points will be returned. @@ -933,17 +939,20 @@ def _update_cloud_weights(self, change): if change["value"] is None: return + # The following calculation is based on page 23 in Amit's thesis (Computation of cloud scores maps) + # TODO: consider to change cloud score in 2D . r = self.img_array[..., 0].astype(np.float) b = self.img_array[..., 2].astype(np.float) cloud_weights = np.zeros_like(r) eps = np.finfo(b.dtype).eps - threshold = self.cloud_weight_threshold + threshold = self.cloud_weight_threshold # currently 0.8 ratio = r / (b+eps) ratio_mask = ratio>threshold cloud_weights[ratio_mask] = \ (2-threshold)/(1-threshold)*(ratio[ratio_mask]-threshold)/(ratio[ratio_mask]+1-threshold) + #The current calculation: ( 1.2 / 0.2 ) * (r - 0.8 ) / (r + 0.2 ) # # Limit cloud_weights to 1. @@ -1398,15 +1407,15 @@ class MainModel(Atom): # # Reconstruction Grid parameters. # Note: - # There are two grids used: + # There are three grids used: # - GRID_VIS_ECEF: Used for visualization on the camera array. # - GRID_ECEF: Used for the visual hull algorithm. # - GRID_NED: The grid exported for reconstruction. # - delx = Float(150) - dely = Float(150) - delz = Float(100) - TOG = Float(12000) + delx = Float(150) # [meters] + dely = Float(150) # [meters] + delz = Float(100) # [meters] + TOG = Float(12000) # [meters] GRID_VIS_ECEF = Tuple() GRID_ECEF = Tuple() GRID_NED = Tuple() diff --git a/CameraNetwork/image_utils.py b/CameraNetwork/image_utils.py index 3ba93f3..463d333 100644 --- a/CameraNetwork/image_utils.py +++ b/CameraNetwork/image_utils.py @@ -33,7 +33,8 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -"""General utilities for image processing. +""" +General utilities for image processing. """ from __future__ import division, absolute_import, print_function from CameraNetwork.utils import obj @@ -247,8 +248,9 @@ def undistortDirections(self, distorted): Args: distorted (array): nx2 array of distorted image coords (x, y). - Retruns: + Returns: Phi, Theta (array): Phi and Theta undistorted directions. + TODO: mask is also returned... """ if self._ocamcalib_flag: @@ -270,7 +272,7 @@ def undistortDirections(self, distorted): class Normalization(object): """Normalized Image Class - This class encapsulates the conversion between caputered image and + This class encapsulates the conversion between captured image and the normalized image. """ @@ -300,7 +302,7 @@ def calc_normalization_map(self, resolution): # # Create a grid of directions. # The coordinates create a 'linear' fisheye, where the distance - # from the center ranges between 0-pi/2 linearily. + # from the center ranges between 0-pi/2 linearly. # X, Y = np.meshgrid( np.linspace(-1, 1, self.resolution), @@ -410,8 +412,7 @@ def normalize(self, img): ).astype(np.bool) # - # TODO: - # Implement radiometric correction + # TODO: Implement radiometric correction compare with RadiometricCalibration.applyRadiometric() in calibration.py # #normalized_img = radiometric_correction(normalized_img, self._radiometric_model).astype(np.uint8) normalized_img = normalized_img.astype(img_dtype) @@ -534,7 +535,7 @@ def calcSunshaderMask( # # Dilate the mask. # Note: - # The actual action is ersion, as the mask is inversion of the sunshader. + # The actual action is erosion, as the mask is inversion of the sunshader. # if dilate_size > 1: kernel = cv2.getStructuringElement( @@ -583,12 +584,34 @@ def projectECEFThread( # deferred_call(setattr, array_model, 'grid_2D', grid_2D) +def rectangle(center_x, center_y, width_x=0.25, width_y=0.25): + """Returns a gaussian function with the given parameters""" + return lambda x,y : ((np.abs ( center_x - x ) <= width_x) & + (np.abs ( center_y - y ) <= width_y)).astype(np.int) + + def gaussian(center_x, center_y, height=1., width_x=0.25, width_y=0.25): """Returns a gaussian function with the given parameters""" return lambda x, y: height*np.exp(-(((center_x-x)/width_x)**2+((center_y-y)/width_y)**2)/2) +def calcSunMaskRect(img_shape, sun_alt, sun_az, radius=0.25): + """Calculate a rectangle of the mask for the sun. + + The sun pixels are weighted by a gaussian. + """ + + sun_r = (np.pi / 2 - sun_alt) / (np.pi / 2) + sun_x = sun_r * np.sin ( sun_az ) + sun_y = sun_r * np.cos ( sun_az ) + + X , Y = np.meshgrid ( + np.linspace ( -1 , 1 , img_shape [ 1 ] ) , + np.linspace ( -1 , 1 , img_shape [ 0 ] ) + ) + + return rectangle(center_x = sun_x,center_y = sun_y,width_x = radius,width_y = radius)(X,Y) def calcSunMask(img_shape, sun_alt, sun_az, radius=0.25): """Calculate a mask for the sun. @@ -702,23 +725,26 @@ def calc_visual_hull( # Calculate the collective clouds weight. # weights = np.array(grid_scores).prod(axis=0) - + # TODO: Consider to change the prod to other arithmethic calculation. (maybe OR ?) (I think this removes clouds edges from the space carving) # # voxels that are not seen (outside the fov/sun_mask) by at least two cameras # are zeroed. # grid_masks = np.array(grid_masks).sum(axis=0) weights[grid_masks<2] = 0 + + # + # More cameras viewing the voxel increase it's own score. + # nzi = weights > 0 weights[nzi] = weights[nzi]**(1/grid_masks[nzi]) # - # Calculate color consistency as described in the article + # Calculate color consistency as described in the article. (see Equation 5.5 in p.54 , Amit's thesis) # std_rgb = np.dstack(cloud_rgb).std(axis=2).mean(axis=1) mean_rgb = np.dstack(cloud_rgb).mean(axis=2).mean(axis=1) - color_consistency = np.exp(-(std_rgb/mean_rgb)/color_consistency_sigma) - + color_consistency = np.exp(-(std_rgb/(mean_rgb + np.finfo(float).eps))/color_consistency_sigma) # # Take into account both the clouds weights and photo consistency. # diff --git a/CameraNetwork/mdp/__init__.py b/CameraNetwork/mdp/__init__.py index eaddb76..7854c7d 100644 --- a/CameraNetwork/mdp/__init__.py +++ b/CameraNetwork/mdp/__init__.py @@ -46,7 +46,7 @@ process offering exactly one service in request/reply fashion. client - independant process using a service in request/reply fashion. + independent process using a service in request/reply fashion. broker process routing messages from a client to a worker and back. @@ -77,6 +77,7 @@ __author__ = 'Guido Goldstein' __email__ = 'gst-py@a-nugget.de' + from client import MDPClient from worker import MDPWorker from broker import MDPBroker diff --git a/CameraNetwork/mdp/broker.py b/CameraNetwork/mdp/broker.py index 0e62011..bbe918c 100644 --- a/CameraNetwork/mdp/broker.py +++ b/CameraNetwork/mdp/broker.py @@ -81,7 +81,7 @@ class MDPBroker(object): requested service. This base class defines the overall functionality and the API. Subclasses are - ment to implement additional features (like logging). + meant to implement additional features (like logging). The broker uses ZMQ ROUTER sockets to deal with clients and workers. These sockets are wrapped in pyzmq streams to fit well into IOLoop. @@ -628,7 +628,7 @@ class WorkerRep(object): """Helper class to represent a worker in the broker. Instances of this class are used to track the state of the attached worker - and carry the timers for incomming and outgoing heartbeats. + and carry the timers for incoming and outgoing heartbeats. :param proto: the worker protocol id. :type wid: str @@ -655,7 +655,7 @@ def __init__(self, proto, wid, service, stream): def send_uniqueid(self): """Called on W_READY from worker. - Sends unique id tu worker. + Sends unique id to worker. """ logging.debug('Broker to Worker {} sending unique id: {}'.format( diff --git a/CameraNetwork/mdp/worker.py b/CameraNetwork/mdp/worker.py index 70b7934..fa59cdd 100644 --- a/CameraNetwork/mdp/worker.py +++ b/CameraNetwork/mdp/worker.py @@ -158,7 +158,6 @@ def _tick(self): # logging.debug('Worker lost connection') self.shutdown() - # # try to recreate the connection # @@ -278,7 +277,7 @@ def _on_message(self, msg): self.curr_liveness = 0 elif msg_type == W_READY: # - # The message contains the unique id attahced to the worker. + # The message contains the unique id attached to the worker. # if len(msg) > 0: # diff --git a/CameraNetwork/server.py b/CameraNetwork/server.py index de2483b..0832f46 100644 --- a/CameraNetwork/server.py +++ b/CameraNetwork/server.py @@ -509,7 +509,7 @@ def loop_timer(self): # Store time here so that hopefully it will be as synchronized # as possible. # - name_time=datetime.utcnow() + name_time = datetime.utcnow() # # Select capture settings according day night. @@ -548,9 +548,7 @@ def loop_timer(self): # capture_delay = max(1, capture_settings[gs.LOOP_DELAY]) - next_capture_time = ( - int(time.time() / capture_delay) + 1 - ) * capture_delay - time.time() + next_capture_time = (int(time.time() / capture_delay) + 1) * capture_delay - time.time() nxt = gen.sleep(next_capture_time) @@ -611,7 +609,7 @@ def loop_timer(self): ########################################################### - # Message handlders + # Message handlers ########################################################### @gen.coroutine @@ -831,7 +829,7 @@ def handle_array( gain_boost (bool, optional): Analog gain boost. Default False. color_mode (int, optional): Color mode of image. Default RAW. frames_num (int, optional): Number of frames to capture. Default 1. - jpeg (bool, optional): Whether to whether to compress as jpeg. Default False. + jpeg (bool, optional): Whether to compress as jpeg. Default False. normalize (bool, optional): Whether to normalize the image. Default True. Returns: @@ -1355,4 +1353,5 @@ def handle_reboot(self): logging.info("Performing reboot") logging.shutdown() - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging diff --git a/CameraNetwork/sunphotometer.py b/CameraNetwork/sunphotometer.py index 78c75f3..5257fcc 100644 --- a/CameraNetwork/sunphotometer.py +++ b/CameraNetwork/sunphotometer.py @@ -1,38 +1,38 @@ -## -## Copyright (C) 2017, Amit Aides, all rights reserved. -## -## This file is part of Camera Network -## (see https://bitbucket.org/amitibo/cameranetwork_git). -## -## Redistribution and use in source and binary forms, with or without modification, -## are permitted provided that the following conditions are met: -## -## 1) The software is provided under the terms of this license strictly for -## academic, non-commercial, not-for-profit purposes. -## 2) Redistributions of source code must retain the above copyright notice, this -## list of conditions (license) and the following disclaimer. -## 3) Redistributions in binary form must reproduce the above copyright notice, -## this list of conditions (license) and the following disclaimer in the -## documentation and/or other materials provided with the distribution. -## 4) The name of the author may not be used to endorse or promote products derived -## from this software without specific prior written permission. -## 5) As this software depends on other libraries, the user must adhere to and keep -## in place any licensing terms of those libraries. -## 6) Any publications arising from the use of this software, including but not -## limited to academic journal and conference publications, technical reports and -## manuals, must cite the following works: -## Dmitry Veikherman, Amit Aides, Yoav Y. Schechner and Aviad Levis, "Clouds in The Cloud" Proc. ACCV, pp. 659-674 (2014). -## -## THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -## EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## +## +## Copyright (C) 2017, Amit Aides, all rights reserved. +## +## This file is part of Camera Network +## (see https://bitbucket.org/amitibo/cameranetwork_git). +## +## Redistribution and use in source and binary forms, with or without modification, +## are permitted provided that the following conditions are met: +## +## 1) The software is provided under the terms of this license strictly for +## academic, non-commercial, not-for-profit purposes. +## 2) Redistributions of source code must retain the above copyright notice, this +## list of conditions (license) and the following disclaimer. +## 3) Redistributions in binary form must reproduce the above copyright notice, +## this list of conditions (license) and the following disclaimer in the +## documentation and/or other materials provided with the distribution. +## 4) The name of the author may not be used to endorse or promote products derived +## from this software without specific prior written permission. +## 5) As this software depends on other libraries, the user must adhere to and keep +## in place any licensing terms of those libraries. +## 6) Any publications arising from the use of this software, including but not +## limited to academic journal and conference publications, technical reports and +## manuals, must cite the following works: +## Dmitry Veikherman, Amit Aides, Yoav Y. Schechner and Aviad Levis, "Clouds in The Cloud" Proc. ACCV, pp. 659-674 (2014). +## +## THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +## WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +## MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +## EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +## INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## from __future__ import division, print_function, absolute_import import cv2 import datetime @@ -43,7 +43,7 @@ import pandas as pd WAVELENGTHS = ['0.4405', '0.5000', '0.6744'] -SUNPHOTOMETER_WAVELENGTHS = (0.4405, 0.5000, 0.6744) +SUNPHOTOMETER_WAVELENGTHS = (0.4405, 0.5000, 0.6744) def calcAlmucantarPrinciplePlanes( @@ -223,90 +223,90 @@ def sampleImage(img, img_data, almucantar_angles=None, principleplane_angles=Non #cv2.circle(img, (int(x), int(y)), 2, (255, 255, 0)) #return angles, values, almucantar_samples, img, closest_time - - -def sampleData( - camera_client, - spm_dfs, - QEs, - ch_index, - time_index, - camera_df, - camera_id, - resolution=301, - overlay_angles=True): - """Samples almucantar values of some camera at specific time and color channel. - - Args: - camera_client (camera client object): Client to access the camera servers. - spm_dfs (list of DataFrames): Sunphotometer readings (one for each visible - in the order BGR). - QEs (list of DataFrames): Quantum Efficiency graphs of the camera in RGB order. - ch_index (int): Color channel to process (in order [R, G, B]) - time_index (int): Time index for the spm dataframes. - camera_df (DataFrame): DataFrames of images captured for the specific day. - camera_id (str): The camera to read from. - resoluiton (int): The resolution in which to sample the image. - overlay_angles (boolean): Overlay almucantar angles on the image. - - Returns: - angles, values, almucantar_samples, img, closest_time: Almacuntar angles, - sunphotometer values, image values measured at the spm angles, etc. - - Note: - This function is supposed to be used from a notebook (it uses the camera - clinet object). - """ - - # - # Read the SunPhotometer values at specific time. - # - angles_blue, values_blue = readSunPhotoMeter(spm_dfs[0], spm_dfs[0].index[time_index]) - angles_green, values_green = readSunPhotoMeter(spm_dfs[1], spm_dfs[1].index[time_index]) - angles_red, values_red = readSunPhotoMeter(spm_dfs[2], spm_dfs[2].index[time_index]) - - # - # Join all datasets. This is important as not all datasets are sampled - # at all angles. Therefore I use dropna() at the end. - # Note: - # The sun-photometer Dataframe is created in the order BGR to allow for the integration - # along the visual spectrum. - # - blue_df = pd.DataFrame(data={SUNPHOTOMETER_WAVELENGTHS[0]: values_blue}, index=angles_blue) - green_df = pd.DataFrame(data={SUNPHOTOMETER_WAVELENGTHS[1]: values_green}, index=angles_green) - red_df = pd.DataFrame(data={SUNPHOTOMETER_WAVELENGTHS[2]: values_red}, index=angles_red) - SPM_df = pd.concat((blue_df, green_df, red_df), axis=1).dropna() - - angles, values = integrate_QE_SP(SPM_df, QEs[ch_index]) - - # - # Get the closest image time. - # - t = spm_dfs[ch_index].index[time_index] - closest_time = findClosestImageTime(camera_df, t, hdr='2') - img, img_data = camera_client.seek( - server_id=camera_id, - seek_time=closest_time, - hdr_index=-1, - jpeg=False, - resolution=resolution, - correct_radiometric=False - ) - img = img[0] - img_data = img_data[0] - - almucantar_samples, almucantar_angles, almucantar_coords, \ - _, _, _ = sampleImage(img, img_data, almucantar_angles=angles) - - # - # Visualize the sampling positions on the image. - # - if overlay_angles: - import cv2 - for x, y in zip(almucantar_coords[0], almucantar_coords[1]): - cv2.circle(img, (int(x), int(y)), 2, (255, 255, 0)) - - return angles, values, almucantar_samples, img, closest_time, img_data + + +def sampleData( + camera_client, + spm_dfs, + QEs, + ch_index, + time_index, + camera_df, + camera_id, + resolution=301, + overlay_angles=True): + """Samples almucantar values of some camera at specific time and color channel. + + Args: + camera_client (camera client object): Client to access the camera servers. + spm_dfs (list of DataFrames): Sunphotometer readings (one for each visible + in the order BGR). + QEs (list of DataFrames): Quantum Efficiency graphs of the camera in RGB order. + ch_index (int): Color channel to process (in order [R, G, B]) + time_index (int): Time index for the spm dataframes. + camera_df (DataFrame): DataFrames of images captured for the specific day. + camera_id (str): The camera to read from. + resoluiton (int): The resolution in which to sample the image. + overlay_angles (boolean): Overlay almucantar angles on the image. + + Returns: + angles, values, almucantar_samples, img, closest_time: Almacuntar angles, + sunphotometer values, image values measured at the spm angles, etc. + + Note: + This function is supposed to be used from a notebook (it uses the camera + clinet object). + """ + + # + # Read the SunPhotometer values at specific time. + # + angles_blue, values_blue = readSunPhotoMeter(spm_dfs[0], spm_dfs[0].index[time_index]) + angles_green, values_green = readSunPhotoMeter(spm_dfs[1], spm_dfs[1].index[time_index]) + angles_red, values_red = readSunPhotoMeter(spm_dfs[2], spm_dfs[2].index[time_index]) + + # + # Join all datasets. This is important as not all datasets are sampled + # at all angles. Therefore I use dropna() at the end. + # Note: + # The sun-photometer Dataframe is created in the order BGR to allow for the integration + # along the visual spectrum. + # + blue_df = pd.DataFrame(data={SUNPHOTOMETER_WAVELENGTHS[0]: values_blue}, index=angles_blue) + green_df = pd.DataFrame(data={SUNPHOTOMETER_WAVELENGTHS[1]: values_green}, index=angles_green) + red_df = pd.DataFrame(data={SUNPHOTOMETER_WAVELENGTHS[2]: values_red}, index=angles_red) + SPM_df = pd.concat((blue_df, green_df, red_df), axis=1).dropna() + + angles, values = integrate_QE_SP(SPM_df, QEs[ch_index]) + + # + # Get the closest image time. + # + t = spm_dfs[ch_index].index[time_index] + closest_time = findClosestImageTime(camera_df, t, hdr='2') + img, img_data = camera_client.seek( + server_id=camera_id, + seek_time=closest_time, + hdr_index=-1, + jpeg=False, + resolution=resolution, + correct_radiometric=False + ) + img = img[0] + img_data = img_data[0] + + almucantar_samples, almucantar_angles, almucantar_coords, \ + _, _, _ = sampleImage(img, img_data, almucantar_angles=angles) + + # + # Visualize the sampling positions on the image. + # + if overlay_angles: + import cv2 + for x, y in zip(almucantar_coords[0], almucantar_coords[1]): + cv2.circle(img, (int(x), int(y)), 2, (255, 255, 0)) + + return angles, values, almucantar_samples, img, closest_time, img_data def readSunPhotoMeter(df, timestamp, sun_angles=5): @@ -348,7 +348,7 @@ def readSunPhotoMeter(df, timestamp, sun_angles=5): def calcSunphometerCoords(img_data, resolution): - """Calculate the Almucantar and PrinciplePlanes for a specifica datetime.""" + """Calculate the Almucantar and PrinciplePlanes for a specific datetime.""" Almucantar_coords, PrincipalPlane_coords, _, _ = \ calcAlmucantarPrinciplePlanes( @@ -360,81 +360,81 @@ def calcSunphometerCoords(img_data, resolution): return Almucantar_coords.T.tolist(), PrincipalPlane_coords.T.tolist() -def calcSunCoords(img_data, resolution): - """Calculate the Sun coords for a specifica datetime.""" - - # Create an Sun/observer at camera position - # - observer = ephem.Observer() - observer.lat, observer.long, observer.date = \ - str(img_data.latitude), str(img_data.longitude), img_data.capture_time - - sun = ephem.Sun(observer) - - # - # Calculate sun angles. - # - sun_az = np.array([sun.az]) - sun_alts = np.array([sun.alt]) - - # - # Convert sun angles to image coords. - # - sun_radius = (np.pi/2 - sun_alts)/(np.pi/2) - sun_x = (sun_radius * np.sin(sun_az) + 1) * resolution / 2 - sun_y = (sun_radius * np.cos(sun_az) + 1) * resolution / 2 - Sun_coords = np.array((sun_x, sun_y)) - - return Sun_coords.T.tolist() - - -def integrate_QE_SP(SPM_df, QE): - """Caclulate the argument: - \int_{\lambda} \mathrm{QE}_{\lambda} \, \lambda \, L^{\mathrm{S-P}}_{\lambda} \, d{\lambda} - - This integral is calculated for each almacuntar angle (for specfic day time). - - Args: - SPM_df (pandas dataframe): Dataframe of Sun Photometer readings, arranged - in BGR order. - QE (pandas Dataframe): Dataframe of Quantum Efficieny of a specific channel. - - Returns: - Integration of the Sun Photometer radiances (per SP almacuntar angle) - scaled by the Quantum Efficiency of the specific channel. - """ - - from scipy.interpolate import InterpolatedUnivariateSpline - - # - # Limits and density of the integraion. - # - start, end = 0.4, 0.7 - dlambda = 0.005 - xspl = np.linspace(start, end, int((end - start) / dlambda)) - - interp = [] - for angle, row, in SPM_df.iterrows(): - # - # Interpolate the sun photometer values along the wavelengths axis. - # - sp_vals = row.values - isp = InterpolatedUnivariateSpline(SUNPHOTOMETER_WAVELENGTHS, sp_vals, k=2) - sp_ipol = isp(xspl) - - # - # Interpolate the Quantum Efficiencies along the wavelenghts axis - # Note: - # The QE wavelengths are given in nm, and values are given in 100 percent. - # So I scale these by 1/1000 and 1/100 respectively. - # - QEp = InterpolatedUnivariateSpline(QE["wavelength"].values/1000, QE["QE"]/100) - QE_ipol = QEp(xspl) - - # - # Integrate the value: - # \int_{\lambda} \mathrm{QE}_{\lambda} \, \lambda \, L^{\mathrm{S-P}}_{\lambda} \, d{\lambda} - # - interp.append(np.trapz(QE_ipol * xspl * sp_ipol, xspl)) - - return SPM_df.index.values, interp +def calcSunCoords(img_data, resolution): + """Calculate the Sun coords for a specifica datetime.""" + + # Create an Sun/observer at camera position + # + observer = ephem.Observer() + observer.lat, observer.long, observer.date = \ + str(img_data.latitude), str(img_data.longitude), img_data.capture_time + + sun = ephem.Sun(observer) + + # + # Calculate sun angles. + # + sun_az = np.array([sun.az]) + sun_alts = np.array([sun.alt]) + + # + # Convert sun angles to image coords. + # + sun_radius = (np.pi/2 - sun_alts)/(np.pi/2) + sun_x = (sun_radius * np.sin(sun_az) + 1) * resolution / 2 + sun_y = (sun_radius * np.cos(sun_az) + 1) * resolution / 2 + Sun_coords = np.array((sun_x, sun_y)) + + return Sun_coords.T.tolist() + + +def integrate_QE_SP(SPM_df, QE): + """Caclulate the argument: + \int_{\lambda} \mathrm{QE}_{\lambda} \, \lambda \, L^{\mathrm{S-P}}_{\lambda} \, d{\lambda} + + This integral is calculated for each almacuntar angle (for specfic day time). + + Args: + SPM_df (pandas dataframe): Dataframe of Sun Photometer readings, arranged + in BGR order. + QE (pandas Dataframe): Dataframe of Quantum Efficieny of a specific channel. + + Returns: + Integration of the Sun Photometer radiances (per SP almacuntar angle) + scaled by the Quantum Efficiency of the specific channel. + """ + + from scipy.interpolate import InterpolatedUnivariateSpline + + # + # Limits and density of the integraion. + # + start, end = 0.4, 0.7 + dlambda = 0.005 + xspl = np.linspace(start, end, int((end - start) / dlambda)) + + interp = [] + for angle, row, in SPM_df.iterrows(): + # + # Interpolate the sun photometer values along the wavelengths axis. + # + sp_vals = row.values + isp = InterpolatedUnivariateSpline(SUNPHOTOMETER_WAVELENGTHS, sp_vals, k=2) + sp_ipol = isp(xspl) + + # + # Interpolate the Quantum Efficiencies along the wavelenghts axis + # Note: + # The QE wavelengths are given in nm, and values are given in 100 percent. + # So I scale these by 1/1000 and 1/100 respectively. + # + QEp = InterpolatedUnivariateSpline(QE["wavelength"].values/1000, QE["QE"]/100) + QE_ipol = QEp(xspl) + + # + # Integrate the value: + # \int_{\lambda} \mathrm{QE}_{\lambda} \, \lambda \, L^{\mathrm{S-P}}_{\lambda} \, d{\lambda} + # + interp.append(np.trapz(QE_ipol * xspl * sp_ipol, xspl)) + + return SPM_df.index.values, interp diff --git a/CameraNetwork/transformation_matrices.py b/CameraNetwork/transformation_matrices.py index 4b0bd1a..e2f2a27 100644 --- a/CameraNetwork/transformation_matrices.py +++ b/CameraNetwork/transformation_matrices.py @@ -1,1919 +1,1919 @@ -# -*- coding: utf-8 -*- -# transformations.py - -# Copyright (c) 2006-2015, Christoph Gohlke -# Copyright (c) 2006-2015, The Regents of the University of California -# Produced at the Laboratory for Fluorescence Dynamics -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of the copyright holders nor the names of any -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -"""Homogeneous Transformation Matrices and Quaternions. - -A library for calculating 4x4 matrices for translating, rotating, reflecting, -scaling, shearing, projecting, orthogonalizing, and superimposing arrays of -3D homogeneous coordinates as well as for converting between rotation matrices, -Euler angles, and quaternions. Also includes an Arcball control object and -functions to decompose transformation matrices. - -:Author: - `Christoph Gohlke `_ - -:Organization: - Laboratory for Fluorescence Dynamics, University of California, Irvine - -:Version: 2015.07.18 - -Requirements ------------- -* `CPython 2.7 or 3.4 `_ -* `Numpy 1.9 `_ -* `Transformations.c 2015.07.18 `_ - (recommended for speedup of some functions) - -Notes ------ -The API is not stable yet and is expected to change between revisions. - -This Python code is not optimized for speed. Refer to the transformations.c -module for a faster implementation of some functions. - -Documentation in HTML format can be generated with epydoc. - -Matrices (M) can be inverted using numpy.linalg.inv(M), be concatenated using -numpy.dot(M0, M1), or transform homogeneous coordinate arrays (v) using -numpy.dot(M, v) for shape (4, \*) column vectors, respectively -numpy.dot(v, M.T) for shape (\*, 4) row vectors ("array of points"). - -This module follows the "column vectors on the right" and "row major storage" -(C contiguous) conventions. The translation components are in the right column -of the transformation matrix, i.e. M[:3, 3]. -The transpose of the transformation matrices may have to be used to interface -with other graphics systems, e.g. with OpenGL's glMultMatrixd(). See also [16]. - -Calculations are carried out with numpy.float64 precision. - -Vector, point, quaternion, and matrix function arguments are expected to be -"array like", i.e. tuple, list, or numpy arrays. - -Return types are numpy arrays unless specified otherwise. - -Angles are in radians unless specified otherwise. - -Quaternions w+ix+jy+kz are represented as [w, x, y, z]. - -A triple of Euler angles can be applied/interpreted in 24 ways, which can -be specified using a 4 character string or encoded 4-tuple: - - *Axes 4-string*: e.g. 'sxyz' or 'ryxy' - - - first character : rotations are applied to 's'tatic or 'r'otating frame - - remaining characters : successive rotation axis 'x', 'y', or 'z' - - *Axes 4-tuple*: e.g. (0, 0, 0, 0) or (1, 1, 1, 1) - - - inner axis: code of axis ('x':0, 'y':1, 'z':2) of rightmost matrix. - - parity : even (0) if inner axis 'x' is followed by 'y', 'y' is followed - by 'z', or 'z' is followed by 'x'. Otherwise odd (1). - - repetition : first and last axis are same (1) or different (0). - - frame : rotations are applied to static (0) or rotating (1) frame. - -Other Python packages and modules for 3D transformations and quaternions: - -* `Transforms3d `_ - includes most code of this module. -* `Blender.mathutils `_ -* `numpy-dtypes `_ - -References ----------- -(1) Matrices and transformations. Ronald Goldman. - In "Graphics Gems I", pp 472-475. Morgan Kaufmann, 1990. -(2) More matrices and transformations: shear and pseudo-perspective. - Ronald Goldman. In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991. -(3) Decomposing a matrix into simple transformations. Spencer Thomas. - In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991. -(4) Recovering the data from the transformation matrix. Ronald Goldman. - In "Graphics Gems II", pp 324-331. Morgan Kaufmann, 1991. -(5) Euler angle conversion. Ken Shoemake. - In "Graphics Gems IV", pp 222-229. Morgan Kaufmann, 1994. -(6) Arcball rotation control. Ken Shoemake. - In "Graphics Gems IV", pp 175-192. Morgan Kaufmann, 1994. -(7) Representing attitude: Euler angles, unit quaternions, and rotation - vectors. James Diebel. 2006. -(8) A discussion of the solution for the best rotation to relate two sets - of vectors. W Kabsch. Acta Cryst. 1978. A34, 827-828. -(9) Closed-form solution of absolute orientation using unit quaternions. - BKP Horn. J Opt Soc Am A. 1987. 4(4):629-642. -(10) Quaternions. Ken Shoemake. - http://www.sfu.ca/~jwa3/cmpt461/files/quatut.pdf -(11) From quaternion to matrix and back. JMP van Waveren. 2005. - http://www.intel.com/cd/ids/developer/asmo-na/eng/293748.htm -(12) Uniform random rotations. Ken Shoemake. - In "Graphics Gems III", pp 124-132. Morgan Kaufmann, 1992. -(13) Quaternion in molecular modeling. CFF Karney. - J Mol Graph Mod, 25(5):595-604 -(14) New method for extracting the quaternion from a rotation matrix. - Itzhack Y Bar-Itzhack, J Guid Contr Dynam. 2000. 23(6): 1085-1087. -(15) Multiple View Geometry in Computer Vision. Hartley and Zissermann. - Cambridge University Press; 2nd Ed. 2004. Chapter 4, Algorithm 4.7, p 130. -(16) Column Vectors vs. Row Vectors. - http://steve.hollasch.net/cgindex/math/matrix/column-vec.html - -Examples --------- ->>> alpha, beta, gamma = 0.123, -1.234, 2.345 ->>> origin, xaxis, yaxis, zaxis = [0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1] ->>> I = identity_matrix() ->>> Rx = rotation_matrix(alpha, xaxis) ->>> Ry = rotation_matrix(beta, yaxis) ->>> Rz = rotation_matrix(gamma, zaxis) ->>> R = concatenate_matrices(Rx, Ry, Rz) ->>> euler = euler_from_matrix(R, 'rxyz') ->>> numpy.allclose([alpha, beta, gamma], euler) -True ->>> Re = euler_matrix(alpha, beta, gamma, 'rxyz') ->>> is_same_transform(R, Re) -True ->>> al, be, ga = euler_from_matrix(Re, 'rxyz') ->>> is_same_transform(Re, euler_matrix(al, be, ga, 'rxyz')) -True ->>> qx = quaternion_about_axis(alpha, xaxis) ->>> qy = quaternion_about_axis(beta, yaxis) ->>> qz = quaternion_about_axis(gamma, zaxis) ->>> q = quaternion_multiply(qx, qy) ->>> q = quaternion_multiply(q, qz) ->>> Rq = quaternion_matrix(q) ->>> is_same_transform(R, Rq) -True ->>> S = scale_matrix(1.23, origin) ->>> T = translation_matrix([1, 2, 3]) ->>> Z = shear_matrix(beta, xaxis, origin, zaxis) ->>> R = random_rotation_matrix(numpy.random.rand(3)) ->>> M = concatenate_matrices(T, R, Z, S) ->>> scale, shear, angles, trans, persp = decompose_matrix(M) ->>> numpy.allclose(scale, 1.23) -True ->>> numpy.allclose(trans, [1, 2, 3]) -True ->>> numpy.allclose(shear, [0, math.tan(beta), 0]) -True ->>> is_same_transform(R, euler_matrix(axes='sxyz', *angles)) -True ->>> M1 = compose_matrix(scale, shear, angles, trans, persp) ->>> is_same_transform(M, M1) -True ->>> v0, v1 = random_vector(3), random_vector(3) ->>> M = rotation_matrix(angle_between_vectors(v0, v1), vector_product(v0, v1)) ->>> v2 = numpy.dot(v0, M[:3,:3].T) ->>> numpy.allclose(unit_vector(v1), unit_vector(v2)) -True - -""" - -from __future__ import division, print_function - -import math - -import numpy - -__version__ = '2015.07.18' -__docformat__ = 'restructuredtext en' -__all__ = () - - -def identity_matrix(): - """Return 4x4 identity/unit matrix. - - >>> I = identity_matrix() - >>> numpy.allclose(I, numpy.dot(I, I)) - True - >>> numpy.sum(I), numpy.trace(I) - (4.0, 4.0) - >>> numpy.allclose(I, numpy.identity(4)) - True - - """ - return numpy.identity(4) - - -def translation_matrix(direction): - """Return matrix to translate by direction vector. - - >>> v = numpy.random.random(3) - 0.5 - >>> numpy.allclose(v, translation_matrix(v)[:3, 3]) - True - - """ - M = numpy.identity(4) - M[:3, 3] = direction[:3] - return M - - -def translation_from_matrix(matrix): - """Return translation vector from translation matrix. - - >>> v0 = numpy.random.random(3) - 0.5 - >>> v1 = translation_from_matrix(translation_matrix(v0)) - >>> numpy.allclose(v0, v1) - True - - """ - return numpy.array(matrix, copy=False)[:3, 3].copy() - - -def reflection_matrix(point, normal): - """Return matrix to mirror at plane defined by point and normal vector. - - >>> v0 = numpy.random.random(4) - 0.5 - >>> v0[3] = 1. - >>> v1 = numpy.random.random(3) - 0.5 - >>> R = reflection_matrix(v0, v1) - >>> numpy.allclose(2, numpy.trace(R)) - True - >>> numpy.allclose(v0, numpy.dot(R, v0)) - True - >>> v2 = v0.copy() - >>> v2[:3] += v1 - >>> v3 = v0.copy() - >>> v2[:3] -= v1 - >>> numpy.allclose(v2, numpy.dot(R, v3)) - True - - """ - normal = unit_vector(normal[:3]) - M = numpy.identity(4) - M[:3, :3] -= 2.0 * numpy.outer(normal, normal) - M[:3, 3] = (2.0 * numpy.dot(point[:3], normal)) * normal - return M - - -def reflection_from_matrix(matrix): - """Return mirror plane point and normal vector from reflection matrix. - - >>> v0 = numpy.random.random(3) - 0.5 - >>> v1 = numpy.random.random(3) - 0.5 - >>> M0 = reflection_matrix(v0, v1) - >>> point, normal = reflection_from_matrix(M0) - >>> M1 = reflection_matrix(point, normal) - >>> is_same_transform(M0, M1) - True - - """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) - # normal: unit eigenvector corresponding to eigenvalue -1 - w, V = numpy.linalg.eig(M[:3, :3]) - i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0] - if not len(i): - raise ValueError("no unit eigenvector corresponding to eigenvalue -1") - normal = numpy.real(V[:, i[0]]).squeeze() - # point: any unit eigenvector corresponding to eigenvalue 1 - w, V = numpy.linalg.eig(M) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] - if not len(i): - raise ValueError("no unit eigenvector corresponding to eigenvalue 1") - point = numpy.real(V[:, i[-1]]).squeeze() - point /= point[3] - return point, normal - - -def rotation_matrix(angle, direction, point=None): - """Return matrix to rotate about axis defined by point and direction. - - >>> R = rotation_matrix(math.pi/2, [0, 0, 1], [1, 0, 0]) - >>> numpy.allclose(numpy.dot(R, [0, 0, 0, 1]), [1, -1, 0, 1]) - True - >>> angle = (random.random() - 0.5) * (2*math.pi) - >>> direc = numpy.random.random(3) - 0.5 - >>> point = numpy.random.random(3) - 0.5 - >>> R0 = rotation_matrix(angle, direc, point) - >>> R1 = rotation_matrix(angle-2*math.pi, direc, point) - >>> is_same_transform(R0, R1) - True - >>> R0 = rotation_matrix(angle, direc, point) - >>> R1 = rotation_matrix(-angle, -direc, point) - >>> is_same_transform(R0, R1) - True - >>> I = numpy.identity(4, numpy.float64) - >>> numpy.allclose(I, rotation_matrix(math.pi*2, direc)) - True - >>> numpy.allclose(2, numpy.trace(rotation_matrix(math.pi/2, - ... direc, point))) - True - - """ - sina = math.sin(angle) - cosa = math.cos(angle) - direction = unit_vector(direction[:3]) - # rotation matrix around unit vector - R = numpy.diag([cosa, cosa, cosa]) - R += numpy.outer(direction, direction) * (1.0 - cosa) - direction *= sina - R += numpy.array([[ 0.0, -direction[2], direction[1]], - [ direction[2], 0.0, -direction[0]], - [-direction[1], direction[0], 0.0]]) - M = numpy.identity(4) - M[:3, :3] = R - if point is not None: - # rotation not around origin - point = numpy.array(point[:3], dtype=numpy.float64, copy=False) - M[:3, 3] = point - numpy.dot(R, point) - return M - - -def rotation_from_matrix(matrix): - """Return rotation angle and axis from rotation matrix. - - >>> angle = (random.random() - 0.5) * (2*math.pi) - >>> direc = numpy.random.random(3) - 0.5 - >>> point = numpy.random.random(3) - 0.5 - >>> R0 = rotation_matrix(angle, direc, point) - >>> angle, direc, point = rotation_from_matrix(R0) - >>> R1 = rotation_matrix(angle, direc, point) - >>> is_same_transform(R0, R1) - True - - """ - R = numpy.array(matrix, dtype=numpy.float64, copy=False) - R33 = R[:3, :3] - # direction: unit eigenvector of R33 corresponding to eigenvalue of 1 - w, W = numpy.linalg.eig(R33.T) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] - if not len(i): - raise ValueError("no unit eigenvector corresponding to eigenvalue 1") - direction = numpy.real(W[:, i[-1]]).squeeze() - # point: unit eigenvector of R33 corresponding to eigenvalue of 1 - w, Q = numpy.linalg.eig(R) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] - if not len(i): - raise ValueError("no unit eigenvector corresponding to eigenvalue 1") - point = numpy.real(Q[:, i[-1]]).squeeze() - point /= point[3] - # rotation angle depending on direction - cosa = (numpy.trace(R33) - 1.0) / 2.0 - if abs(direction[2]) > 1e-8: - sina = (R[1, 0] + (cosa-1.0)*direction[0]*direction[1]) / direction[2] - elif abs(direction[1]) > 1e-8: - sina = (R[0, 2] + (cosa-1.0)*direction[0]*direction[2]) / direction[1] - else: - sina = (R[2, 1] + (cosa-1.0)*direction[1]*direction[2]) / direction[0] - angle = math.atan2(sina, cosa) - return angle, direction, point - - -def scale_matrix(factor, origin=None, direction=None): - """Return matrix to scale by factor around origin in direction. - - Use factor -1 for point symmetry. - - >>> v = (numpy.random.rand(4, 5) - 0.5) * 20 - >>> v[3] = 1 - >>> S = scale_matrix(-1.234) - >>> numpy.allclose(numpy.dot(S, v)[:3], -1.234*v[:3]) - True - >>> factor = random.random() * 10 - 5 - >>> origin = numpy.random.random(3) - 0.5 - >>> direct = numpy.random.random(3) - 0.5 - >>> S = scale_matrix(factor, origin) - >>> S = scale_matrix(factor, origin, direct) - - """ - if direction is None: - # uniform scaling - M = numpy.diag([factor, factor, factor, 1.0]) - if origin is not None: - M[:3, 3] = origin[:3] - M[:3, 3] *= 1.0 - factor - else: - # nonuniform scaling - direction = unit_vector(direction[:3]) - factor = 1.0 - factor - M = numpy.identity(4) - M[:3, :3] -= factor * numpy.outer(direction, direction) - if origin is not None: - M[:3, 3] = (factor * numpy.dot(origin[:3], direction)) * direction - return M - - -def scale_from_matrix(matrix): - """Return scaling factor, origin and direction from scaling matrix. - - >>> factor = random.random() * 10 - 5 - >>> origin = numpy.random.random(3) - 0.5 - >>> direct = numpy.random.random(3) - 0.5 - >>> S0 = scale_matrix(factor, origin) - >>> factor, origin, direction = scale_from_matrix(S0) - >>> S1 = scale_matrix(factor, origin, direction) - >>> is_same_transform(S0, S1) - True - >>> S0 = scale_matrix(factor, origin, direct) - >>> factor, origin, direction = scale_from_matrix(S0) - >>> S1 = scale_matrix(factor, origin, direction) - >>> is_same_transform(S0, S1) - True - - """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) - M33 = M[:3, :3] - factor = numpy.trace(M33) - 2.0 - try: - # direction: unit eigenvector corresponding to eigenvalue factor - w, V = numpy.linalg.eig(M33) - i = numpy.where(abs(numpy.real(w) - factor) < 1e-8)[0][0] - direction = numpy.real(V[:, i]).squeeze() - direction /= vector_norm(direction) - except IndexError: - # uniform scaling - factor = (factor + 2.0) / 3.0 - direction = None - # origin: any eigenvector corresponding to eigenvalue 1 - w, V = numpy.linalg.eig(M) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] - if not len(i): - raise ValueError("no eigenvector corresponding to eigenvalue 1") - origin = numpy.real(V[:, i[-1]]).squeeze() - origin /= origin[3] - return factor, origin, direction - - -def projection_matrix(point, normal, direction=None, - perspective=None, pseudo=False): - """Return matrix to project onto plane defined by point and normal. - - Using either perspective point, projection direction, or none of both. - - If pseudo is True, perspective projections will preserve relative depth - such that Perspective = dot(Orthogonal, PseudoPerspective). - - >>> P = projection_matrix([0, 0, 0], [1, 0, 0]) - >>> numpy.allclose(P[1:, 1:], numpy.identity(4)[1:, 1:]) - True - >>> point = numpy.random.random(3) - 0.5 - >>> normal = numpy.random.random(3) - 0.5 - >>> direct = numpy.random.random(3) - 0.5 - >>> persp = numpy.random.random(3) - 0.5 - >>> P0 = projection_matrix(point, normal) - >>> P1 = projection_matrix(point, normal, direction=direct) - >>> P2 = projection_matrix(point, normal, perspective=persp) - >>> P3 = projection_matrix(point, normal, perspective=persp, pseudo=True) - >>> is_same_transform(P2, numpy.dot(P0, P3)) - True - >>> P = projection_matrix([3, 0, 0], [1, 1, 0], [1, 0, 0]) - >>> v0 = (numpy.random.rand(4, 5) - 0.5) * 20 - >>> v0[3] = 1 - >>> v1 = numpy.dot(P, v0) - >>> numpy.allclose(v1[1], v0[1]) - True - >>> numpy.allclose(v1[0], 3-v1[1]) - True - - """ - M = numpy.identity(4) - point = numpy.array(point[:3], dtype=numpy.float64, copy=False) - normal = unit_vector(normal[:3]) - if perspective is not None: - # perspective projection - perspective = numpy.array(perspective[:3], dtype=numpy.float64, - copy=False) - M[0, 0] = M[1, 1] = M[2, 2] = numpy.dot(perspective-point, normal) - M[:3, :3] -= numpy.outer(perspective, normal) - if pseudo: - # preserve relative depth - M[:3, :3] -= numpy.outer(normal, normal) - M[:3, 3] = numpy.dot(point, normal) * (perspective+normal) - else: - M[:3, 3] = numpy.dot(point, normal) * perspective - M[3, :3] = -normal - M[3, 3] = numpy.dot(perspective, normal) - elif direction is not None: - # parallel projection - direction = numpy.array(direction[:3], dtype=numpy.float64, copy=False) - scale = numpy.dot(direction, normal) - M[:3, :3] -= numpy.outer(direction, normal) / scale - M[:3, 3] = direction * (numpy.dot(point, normal) / scale) - else: - # orthogonal projection - M[:3, :3] -= numpy.outer(normal, normal) - M[:3, 3] = numpy.dot(point, normal) * normal - return M - - -def projection_from_matrix(matrix, pseudo=False): - """Return projection plane and perspective point from projection matrix. - - Return values are same as arguments for projection_matrix function: - point, normal, direction, perspective, and pseudo. - - >>> point = numpy.random.random(3) - 0.5 - >>> normal = numpy.random.random(3) - 0.5 - >>> direct = numpy.random.random(3) - 0.5 - >>> persp = numpy.random.random(3) - 0.5 - >>> P0 = projection_matrix(point, normal) - >>> result = projection_from_matrix(P0) - >>> P1 = projection_matrix(*result) - >>> is_same_transform(P0, P1) - True - >>> P0 = projection_matrix(point, normal, direct) - >>> result = projection_from_matrix(P0) - >>> P1 = projection_matrix(*result) - >>> is_same_transform(P0, P1) - True - >>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=False) - >>> result = projection_from_matrix(P0, pseudo=False) - >>> P1 = projection_matrix(*result) - >>> is_same_transform(P0, P1) - True - >>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=True) - >>> result = projection_from_matrix(P0, pseudo=True) - >>> P1 = projection_matrix(*result) - >>> is_same_transform(P0, P1) - True - - """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) - M33 = M[:3, :3] - w, V = numpy.linalg.eig(M) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] - if not pseudo and len(i): - # point: any eigenvector corresponding to eigenvalue 1 - point = numpy.real(V[:, i[-1]]).squeeze() - point /= point[3] - # direction: unit eigenvector corresponding to eigenvalue 0 - w, V = numpy.linalg.eig(M33) - i = numpy.where(abs(numpy.real(w)) < 1e-8)[0] - if not len(i): - raise ValueError("no eigenvector corresponding to eigenvalue 0") - direction = numpy.real(V[:, i[0]]).squeeze() - direction /= vector_norm(direction) - # normal: unit eigenvector of M33.T corresponding to eigenvalue 0 - w, V = numpy.linalg.eig(M33.T) - i = numpy.where(abs(numpy.real(w)) < 1e-8)[0] - if len(i): - # parallel projection - normal = numpy.real(V[:, i[0]]).squeeze() - normal /= vector_norm(normal) - return point, normal, direction, None, False - else: - # orthogonal projection, where normal equals direction vector - return point, direction, None, None, False - else: - # perspective projection - i = numpy.where(abs(numpy.real(w)) > 1e-8)[0] - if not len(i): - raise ValueError( - "no eigenvector not corresponding to eigenvalue 0") - point = numpy.real(V[:, i[-1]]).squeeze() - point /= point[3] - normal = - M[3, :3] - perspective = M[:3, 3] / numpy.dot(point[:3], normal) - if pseudo: - perspective -= normal - return point, normal, None, perspective, pseudo - - -def clip_matrix(left, right, bottom, top, near, far, perspective=False): - """Return matrix to obtain normalized device coordinates from frustum. - - The frustum bounds are axis-aligned along x (left, right), - y (bottom, top) and z (near, far). - - Normalized device coordinates are in range [-1, 1] if coordinates are - inside the frustum. - - If perspective is True the frustum is a truncated pyramid with the - perspective point at origin and direction along z axis, otherwise an - orthographic canonical view volume (a box). - - Homogeneous coordinates transformed by the perspective clip matrix - need to be dehomogenized (divided by w coordinate). - - >>> frustum = numpy.random.rand(6) - >>> frustum[1] += frustum[0] - >>> frustum[3] += frustum[2] - >>> frustum[5] += frustum[4] - >>> M = clip_matrix(perspective=False, *frustum) - >>> numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1]) - array([-1., -1., -1., 1.]) - >>> numpy.dot(M, [frustum[1], frustum[3], frustum[5], 1]) - array([ 1., 1., 1., 1.]) - >>> M = clip_matrix(perspective=True, *frustum) - >>> v = numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1]) - >>> v / v[3] - array([-1., -1., -1., 1.]) - >>> v = numpy.dot(M, [frustum[1], frustum[3], frustum[4], 1]) - >>> v / v[3] - array([ 1., 1., -1., 1.]) - - """ - if left >= right or bottom >= top or near >= far: - raise ValueError("invalid frustum") - if perspective: - if near <= _EPS: - raise ValueError("invalid frustum: near <= 0") - t = 2.0 * near - M = [[t/(left-right), 0.0, (right+left)/(right-left), 0.0], - [0.0, t/(bottom-top), (top+bottom)/(top-bottom), 0.0], - [0.0, 0.0, (far+near)/(near-far), t*far/(far-near)], - [0.0, 0.0, -1.0, 0.0]] - else: - M = [[2.0/(right-left), 0.0, 0.0, (right+left)/(left-right)], - [0.0, 2.0/(top-bottom), 0.0, (top+bottom)/(bottom-top)], - [0.0, 0.0, 2.0/(far-near), (far+near)/(near-far)], - [0.0, 0.0, 0.0, 1.0]] - return numpy.array(M) - - -def shear_matrix(angle, direction, point, normal): - """Return matrix to shear by angle along direction vector on shear plane. - - The shear plane is defined by a point and normal vector. The direction - vector must be orthogonal to the plane's normal vector. - - A point P is transformed by the shear matrix into P" such that - the vector P-P" is parallel to the direction vector and its extent is - given by the angle of P-P'-P", where P' is the orthogonal projection - of P onto the shear plane. - - >>> angle = (random.random() - 0.5) * 4*math.pi - >>> direct = numpy.random.random(3) - 0.5 - >>> point = numpy.random.random(3) - 0.5 - >>> normal = numpy.cross(direct, numpy.random.random(3)) - >>> S = shear_matrix(angle, direct, point, normal) - >>> numpy.allclose(1, numpy.linalg.det(S)) - True - - """ - normal = unit_vector(normal[:3]) - direction = unit_vector(direction[:3]) - if abs(numpy.dot(normal, direction)) > 1e-6: - raise ValueError("direction and normal vectors are not orthogonal") - angle = math.tan(angle) - M = numpy.identity(4) - M[:3, :3] += angle * numpy.outer(direction, normal) - M[:3, 3] = -angle * numpy.dot(point[:3], normal) * direction - return M - - -def shear_from_matrix(matrix): - """Return shear angle, direction and plane from shear matrix. - - >>> angle = (random.random() - 0.5) * 4*math.pi - >>> direct = numpy.random.random(3) - 0.5 - >>> point = numpy.random.random(3) - 0.5 - >>> normal = numpy.cross(direct, numpy.random.random(3)) - >>> S0 = shear_matrix(angle, direct, point, normal) - >>> angle, direct, point, normal = shear_from_matrix(S0) - >>> S1 = shear_matrix(angle, direct, point, normal) - >>> is_same_transform(S0, S1) - True - - """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False) - M33 = M[:3, :3] - # normal: cross independent eigenvectors corresponding to the eigenvalue 1 - w, V = numpy.linalg.eig(M33) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-4)[0] - if len(i) < 2: - raise ValueError("no two linear independent eigenvectors found %s" % w) - V = numpy.real(V[:, i]).squeeze().T - lenorm = -1.0 - for i0, i1 in ((0, 1), (0, 2), (1, 2)): - n = numpy.cross(V[i0], V[i1]) - w = vector_norm(n) - if w > lenorm: - lenorm = w - normal = n - normal /= lenorm - # direction and angle - direction = numpy.dot(M33 - numpy.identity(3), normal) - angle = vector_norm(direction) - direction /= angle - angle = math.atan(angle) - # point: eigenvector corresponding to eigenvalue 1 - w, V = numpy.linalg.eig(M) - i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] - if not len(i): - raise ValueError("no eigenvector corresponding to eigenvalue 1") - point = numpy.real(V[:, i[-1]]).squeeze() - point /= point[3] - return angle, direction, point, normal - - -def decompose_matrix(matrix): - """Return sequence of transformations from transformation matrix. - - matrix : array_like - Non-degenerative homogeneous transformation matrix - - Return tuple of: - scale : vector of 3 scaling factors - shear : list of shear factors for x-y, x-z, y-z axes - angles : list of Euler angles about static x, y, z axes - translate : translation vector along x, y, z axes - perspective : perspective partition of matrix - - Raise ValueError if matrix is of wrong type or degenerative. - - >>> T0 = translation_matrix([1, 2, 3]) - >>> scale, shear, angles, trans, persp = decompose_matrix(T0) - >>> T1 = translation_matrix(trans) - >>> numpy.allclose(T0, T1) - True - >>> S = scale_matrix(0.123) - >>> scale, shear, angles, trans, persp = decompose_matrix(S) - >>> scale[0] - 0.123 - >>> R0 = euler_matrix(1, 2, 3) - >>> scale, shear, angles, trans, persp = decompose_matrix(R0) - >>> R1 = euler_matrix(*angles) - >>> numpy.allclose(R0, R1) - True - - """ - M = numpy.array(matrix, dtype=numpy.float64, copy=True).T - if abs(M[3, 3]) < _EPS: - raise ValueError("M[3, 3] is zero") - M /= M[3, 3] - P = M.copy() - P[:, 3] = 0.0, 0.0, 0.0, 1.0 - if not numpy.linalg.det(P): - raise ValueError("matrix is singular") - - scale = numpy.zeros((3, )) - shear = [0.0, 0.0, 0.0] - angles = [0.0, 0.0, 0.0] - - if any(abs(M[:3, 3]) > _EPS): - perspective = numpy.dot(M[:, 3], numpy.linalg.inv(P.T)) - M[:, 3] = 0.0, 0.0, 0.0, 1.0 - else: - perspective = numpy.array([0.0, 0.0, 0.0, 1.0]) - - translate = M[3, :3].copy() - M[3, :3] = 0.0 - - row = M[:3, :3].copy() - scale[0] = vector_norm(row[0]) - row[0] /= scale[0] - shear[0] = numpy.dot(row[0], row[1]) - row[1] -= row[0] * shear[0] - scale[1] = vector_norm(row[1]) - row[1] /= scale[1] - shear[0] /= scale[1] - shear[1] = numpy.dot(row[0], row[2]) - row[2] -= row[0] * shear[1] - shear[2] = numpy.dot(row[1], row[2]) - row[2] -= row[1] * shear[2] - scale[2] = vector_norm(row[2]) - row[2] /= scale[2] - shear[1:] /= scale[2] - - if numpy.dot(row[0], numpy.cross(row[1], row[2])) < 0: - numpy.negative(scale, scale) - numpy.negative(row, row) - - angles[1] = math.asin(-row[0, 2]) - if math.cos(angles[1]): - angles[0] = math.atan2(row[1, 2], row[2, 2]) - angles[2] = math.atan2(row[0, 1], row[0, 0]) - else: - #angles[0] = math.atan2(row[1, 0], row[1, 1]) - angles[0] = math.atan2(-row[2, 1], row[1, 1]) - angles[2] = 0.0 - - return scale, shear, angles, translate, perspective - - -def compose_matrix(scale=None, shear=None, angles=None, translate=None, - perspective=None): - """Return transformation matrix from sequence of transformations. - - This is the inverse of the decompose_matrix function. - - Sequence of transformations: - scale : vector of 3 scaling factors - shear : list of shear factors for x-y, x-z, y-z axes - angles : list of Euler angles about static x, y, z axes - translate : translation vector along x, y, z axes - perspective : perspective partition of matrix - - >>> scale = numpy.random.random(3) - 0.5 - >>> shear = numpy.random.random(3) - 0.5 - >>> angles = (numpy.random.random(3) - 0.5) * (2*math.pi) - >>> trans = numpy.random.random(3) - 0.5 - >>> persp = numpy.random.random(4) - 0.5 - >>> M0 = compose_matrix(scale, shear, angles, trans, persp) - >>> result = decompose_matrix(M0) - >>> M1 = compose_matrix(*result) - >>> is_same_transform(M0, M1) - True - - """ - M = numpy.identity(4) - if perspective is not None: - P = numpy.identity(4) - P[3, :] = perspective[:4] - M = numpy.dot(M, P) - if translate is not None: - T = numpy.identity(4) - T[:3, 3] = translate[:3] - M = numpy.dot(M, T) - if angles is not None: - R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz') - M = numpy.dot(M, R) - if shear is not None: - Z = numpy.identity(4) - Z[1, 2] = shear[2] - Z[0, 2] = shear[1] - Z[0, 1] = shear[0] - M = numpy.dot(M, Z) - if scale is not None: - S = numpy.identity(4) - S[0, 0] = scale[0] - S[1, 1] = scale[1] - S[2, 2] = scale[2] - M = numpy.dot(M, S) - M /= M[3, 3] - return M - - -def orthogonalization_matrix(lengths, angles): - """Return orthogonalization matrix for crystallographic cell coordinates. - - Angles are expected in degrees. - - The de-orthogonalization matrix is the inverse. - - >>> O = orthogonalization_matrix([10, 10, 10], [90, 90, 90]) - >>> numpy.allclose(O[:3, :3], numpy.identity(3, float) * 10) - True - >>> O = orthogonalization_matrix([9.8, 12.0, 15.5], [87.2, 80.7, 69.7]) - >>> numpy.allclose(numpy.sum(O), 43.063229) - True - - """ - a, b, c = lengths - angles = numpy.radians(angles) - sina, sinb, _ = numpy.sin(angles) - cosa, cosb, cosg = numpy.cos(angles) - co = (cosa * cosb - cosg) / (sina * sinb) - return numpy.array([ - [ a*sinb*math.sqrt(1.0-co*co), 0.0, 0.0, 0.0], - [-a*sinb*co, b*sina, 0.0, 0.0], - [ a*cosb, b*cosa, c, 0.0], - [ 0.0, 0.0, 0.0, 1.0]]) - - -def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True): - """Return affine transform matrix to register two point sets. - - v0 and v1 are shape (ndims, \*) arrays of at least ndims non-homogeneous - coordinates, where ndims is the dimensionality of the coordinate space. - - If shear is False, a similarity transformation matrix is returned. - If also scale is False, a rigid/Euclidean transformation matrix - is returned. - - By default the algorithm by Hartley and Zissermann [15] is used. - If usesvd is True, similarity and Euclidean transformation matrices - are calculated by minimizing the weighted sum of squared deviations - (RMSD) according to the algorithm by Kabsch [8]. - Otherwise, and if ndims is 3, the quaternion based algorithm by Horn [9] - is used, which is slower when using this Python implementation. - - The returned matrix performs rotation, translation and uniform scaling - (if specified). - - >>> v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]] - >>> v1 = [[675, 826, 826, 677], [55, 52, 281, 277]] - >>> affine_matrix_from_points(v0, v1) - array([[ 0.14549, 0.00062, 675.50008], - [ 0.00048, 0.14094, 53.24971], - [ 0. , 0. , 1. ]]) - >>> T = translation_matrix(numpy.random.random(3)-0.5) - >>> R = random_rotation_matrix(numpy.random.random(3)) - >>> S = scale_matrix(random.random()) - >>> M = concatenate_matrices(T, R, S) - >>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20 - >>> v0[3] = 1 - >>> v1 = numpy.dot(M, v0) - >>> v0[:3] += numpy.random.normal(0, 1e-8, 300).reshape(3, -1) - >>> M = affine_matrix_from_points(v0[:3], v1[:3]) - >>> numpy.allclose(v1, numpy.dot(M, v0)) - True - - More examples in superimposition_matrix() - - """ - v0 = numpy.array(v0, dtype=numpy.float64, copy=True) - v1 = numpy.array(v1, dtype=numpy.float64, copy=True) - - ndims = v0.shape[0] - if ndims < 2 or v0.shape[1] < ndims or v0.shape != v1.shape: - raise ValueError("input arrays are of wrong shape or type") - - # move centroids to origin - t0 = -numpy.mean(v0, axis=1) - M0 = numpy.identity(ndims+1) - M0[:ndims, ndims] = t0 - v0 += t0.reshape(ndims, 1) - t1 = -numpy.mean(v1, axis=1) - M1 = numpy.identity(ndims+1) - M1[:ndims, ndims] = t1 - v1 += t1.reshape(ndims, 1) - - if shear: - # Affine transformation - A = numpy.concatenate((v0, v1), axis=0) - u, s, vh = numpy.linalg.svd(A.T) - vh = vh[:ndims].T - B = vh[:ndims] - C = vh[ndims:2*ndims] - t = numpy.dot(C, numpy.linalg.pinv(B)) - t = numpy.concatenate((t, numpy.zeros((ndims, 1))), axis=1) - M = numpy.vstack((t, ((0.0,)*ndims) + (1.0,))) - elif usesvd or ndims != 3: - # Rigid transformation via SVD of covariance matrix - u, s, vh = numpy.linalg.svd(numpy.dot(v1, v0.T)) - # rotation matrix from SVD orthonormal bases - R = numpy.dot(u, vh) - if numpy.linalg.det(R) < 0.0: - # R does not constitute right handed system - R -= numpy.outer(u[:, ndims-1], vh[ndims-1, :]*2.0) - s[-1] *= -1.0 - # homogeneous transformation matrix - M = numpy.identity(ndims+1) - M[:ndims, :ndims] = R - else: - # Rigid transformation matrix via quaternion - # compute symmetric matrix N - xx, yy, zz = numpy.sum(v0 * v1, axis=1) - xy, yz, zx = numpy.sum(v0 * numpy.roll(v1, -1, axis=0), axis=1) - xz, yx, zy = numpy.sum(v0 * numpy.roll(v1, -2, axis=0), axis=1) - N = [[xx+yy+zz, 0.0, 0.0, 0.0], - [yz-zy, xx-yy-zz, 0.0, 0.0], - [zx-xz, xy+yx, yy-xx-zz, 0.0], - [xy-yx, zx+xz, yz+zy, zz-xx-yy]] - # quaternion: eigenvector corresponding to most positive eigenvalue - w, V = numpy.linalg.eigh(N) - q = V[:, numpy.argmax(w)] - q /= vector_norm(q) # unit quaternion - # homogeneous transformation matrix - M = quaternion_matrix(q) - - if scale and not shear: - # Affine transformation; scale is ratio of RMS deviations from centroid - v0 *= v0 - v1 *= v1 - M[:ndims, :ndims] *= math.sqrt(numpy.sum(v1) / numpy.sum(v0)) - - # move centroids back - M = numpy.dot(numpy.linalg.inv(M1), numpy.dot(M, M0)) - M /= M[ndims, ndims] - return M - - -def superimposition_matrix(v0, v1, scale=False, usesvd=True): - """Return matrix to transform given 3D point set into second point set. - - v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points. - - The parameters scale and usesvd are explained in the more general - affine_matrix_from_points function. - - The returned matrix is a similarity or Euclidean transformation matrix. - This function has a fast C implementation in transformations.c. - - >>> v0 = numpy.random.rand(3, 10) - >>> M = superimposition_matrix(v0, v0) - >>> numpy.allclose(M, numpy.identity(4)) - True - >>> R = random_rotation_matrix(numpy.random.random(3)) - >>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]] - >>> v1 = numpy.dot(R, v0) - >>> M = superimposition_matrix(v0, v1) - >>> numpy.allclose(v1, numpy.dot(M, v0)) - True - >>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20 - >>> v0[3] = 1 - >>> v1 = numpy.dot(R, v0) - >>> M = superimposition_matrix(v0, v1) - >>> numpy.allclose(v1, numpy.dot(M, v0)) - True - >>> S = scale_matrix(random.random()) - >>> T = translation_matrix(numpy.random.random(3)-0.5) - >>> M = concatenate_matrices(T, R, S) - >>> v1 = numpy.dot(M, v0) - >>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1) - >>> M = superimposition_matrix(v0, v1, scale=True) - >>> numpy.allclose(v1, numpy.dot(M, v0)) - True - >>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False) - >>> numpy.allclose(v1, numpy.dot(M, v0)) - True - >>> v = numpy.empty((4, 100, 3)) - >>> v[:, :, 0] = v0 - >>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False) - >>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0])) - True - - """ - v0 = numpy.array(v0, dtype=numpy.float64, copy=False)[:3] - v1 = numpy.array(v1, dtype=numpy.float64, copy=False)[:3] - return affine_matrix_from_points(v0, v1, shear=False, - scale=scale, usesvd=usesvd) - - -def euler_matrix(ai, aj, ak, axes='sxyz'): - """Return homogeneous rotation matrix from Euler angles and axis sequence. - - ai, aj, ak : Euler's roll, pitch and yaw angles - axes : One of 24 axis sequences as string or encoded tuple - - >>> R = euler_matrix(1, 2, 3, 'syxz') - >>> numpy.allclose(numpy.sum(R[0]), -1.34786452) - True - >>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1)) - >>> numpy.allclose(numpy.sum(R[0]), -0.383436184) - True - >>> ai, aj, ak = (4*math.pi) * (numpy.random.random(3) - 0.5) - >>> for axes in _AXES2TUPLE.keys(): - ... R = euler_matrix(ai, aj, ak, axes) - >>> for axes in _TUPLE2AXES.keys(): - ... R = euler_matrix(ai, aj, ak, axes) - - """ - try: - firstaxis, parity, repetition, frame = _AXES2TUPLE[axes] - except (AttributeError, KeyError): - _TUPLE2AXES[axes] # validation - firstaxis, parity, repetition, frame = axes - - i = firstaxis - j = _NEXT_AXIS[i+parity] - k = _NEXT_AXIS[i-parity+1] - - if frame: - ai, ak = ak, ai - if parity: - ai, aj, ak = -ai, -aj, -ak - - si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak) - ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak) - cc, cs = ci*ck, ci*sk - sc, ss = si*ck, si*sk - - M = numpy.identity(4) - if repetition: - M[i, i] = cj - M[i, j] = sj*si - M[i, k] = sj*ci - M[j, i] = sj*sk - M[j, j] = -cj*ss+cc - M[j, k] = -cj*cs-sc - M[k, i] = -sj*ck - M[k, j] = cj*sc+cs - M[k, k] = cj*cc-ss - else: - M[i, i] = cj*ck - M[i, j] = sj*sc-cs - M[i, k] = sj*cc+ss - M[j, i] = cj*sk - M[j, j] = sj*ss+cc - M[j, k] = sj*cs-sc - M[k, i] = -sj - M[k, j] = cj*si - M[k, k] = cj*ci - return M - - -def euler_from_matrix(matrix, axes='sxyz'): - """Return Euler angles from rotation matrix for specified axis sequence. - - axes : One of 24 axis sequences as string or encoded tuple - - Note that many Euler angle triplets can describe one matrix. - - >>> R0 = euler_matrix(1, 2, 3, 'syxz') - >>> al, be, ga = euler_from_matrix(R0, 'syxz') - >>> R1 = euler_matrix(al, be, ga, 'syxz') - >>> numpy.allclose(R0, R1) - True - >>> angles = (4*math.pi) * (numpy.random.random(3) - 0.5) - >>> for axes in _AXES2TUPLE.keys(): - ... R0 = euler_matrix(axes=axes, *angles) - ... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes)) - ... if not numpy.allclose(R0, R1): print(axes, "failed") - - """ - try: - firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] - except (AttributeError, KeyError): - _TUPLE2AXES[axes] # validation - firstaxis, parity, repetition, frame = axes - - i = firstaxis - j = _NEXT_AXIS[i+parity] - k = _NEXT_AXIS[i-parity+1] - - M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3] - if repetition: - sy = math.sqrt(M[i, j]*M[i, j] + M[i, k]*M[i, k]) - if sy > _EPS: - ax = math.atan2( M[i, j], M[i, k]) - ay = math.atan2( sy, M[i, i]) - az = math.atan2( M[j, i], -M[k, i]) - else: - ax = math.atan2(-M[j, k], M[j, j]) - ay = math.atan2( sy, M[i, i]) - az = 0.0 - else: - cy = math.sqrt(M[i, i]*M[i, i] + M[j, i]*M[j, i]) - if cy > _EPS: - ax = math.atan2( M[k, j], M[k, k]) - ay = math.atan2(-M[k, i], cy) - az = math.atan2( M[j, i], M[i, i]) - else: - ax = math.atan2(-M[j, k], M[j, j]) - ay = math.atan2(-M[k, i], cy) - az = 0.0 - - if parity: - ax, ay, az = -ax, -ay, -az - if frame: - ax, az = az, ax - return ax, ay, az - - -def euler_from_quaternion(quaternion, axes='sxyz'): - """Return Euler angles from quaternion for specified axis sequence. - - >>> angles = euler_from_quaternion([0.99810947, 0.06146124, 0, 0]) - >>> numpy.allclose(angles, [0.123, 0, 0]) - True - - """ - return euler_from_matrix(quaternion_matrix(quaternion), axes) - - -def quaternion_from_euler(ai, aj, ak, axes='sxyz'): - """Return quaternion from Euler angles and axis sequence. - - ai, aj, ak : Euler's roll, pitch and yaw angles - axes : One of 24 axis sequences as string or encoded tuple - - >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') - >>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) - True - - """ - try: - firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] - except (AttributeError, KeyError): - _TUPLE2AXES[axes] # validation - firstaxis, parity, repetition, frame = axes - - i = firstaxis + 1 - j = _NEXT_AXIS[i+parity-1] + 1 - k = _NEXT_AXIS[i-parity] + 1 - - if frame: - ai, ak = ak, ai - if parity: - aj = -aj - - ai /= 2.0 - aj /= 2.0 - ak /= 2.0 - ci = math.cos(ai) - si = math.sin(ai) - cj = math.cos(aj) - sj = math.sin(aj) - ck = math.cos(ak) - sk = math.sin(ak) - cc = ci*ck - cs = ci*sk - sc = si*ck - ss = si*sk - - q = numpy.empty((4, )) - if repetition: - q[0] = cj*(cc - ss) - q[i] = cj*(cs + sc) - q[j] = sj*(cc + ss) - q[k] = sj*(cs - sc) - else: - q[0] = cj*cc + sj*ss - q[i] = cj*sc - sj*cs - q[j] = cj*ss + sj*cc - q[k] = cj*cs - sj*sc - if parity: - q[j] *= -1.0 - - return q - - -def quaternion_about_axis(angle, axis): - """Return quaternion for rotation about axis. - - >>> q = quaternion_about_axis(0.123, [1, 0, 0]) - >>> numpy.allclose(q, [0.99810947, 0.06146124, 0, 0]) - True - - """ - q = numpy.array([0.0, axis[0], axis[1], axis[2]]) - qlen = vector_norm(q) - if qlen > _EPS: - q *= math.sin(angle/2.0) / qlen - q[0] = math.cos(angle/2.0) - return q - - -def quaternion_matrix(quaternion): - """Return homogeneous rotation matrix from quaternion. - - >>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0]) - >>> numpy.allclose(M, rotation_matrix(0.123, [1, 0, 0])) - True - >>> M = quaternion_matrix([1, 0, 0, 0]) - >>> numpy.allclose(M, numpy.identity(4)) - True - >>> M = quaternion_matrix([0, 1, 0, 0]) - >>> numpy.allclose(M, numpy.diag([1, -1, -1, 1])) - True - - """ - q = numpy.array(quaternion, dtype=numpy.float64, copy=True) - n = numpy.dot(q, q) - if n < _EPS: - return numpy.identity(4) - q *= math.sqrt(2.0 / n) - q = numpy.outer(q, q) - return numpy.array([ - [1.0-q[2, 2]-q[3, 3], q[1, 2]-q[3, 0], q[1, 3]+q[2, 0], 0.0], - [ q[1, 2]+q[3, 0], 1.0-q[1, 1]-q[3, 3], q[2, 3]-q[1, 0], 0.0], - [ q[1, 3]-q[2, 0], q[2, 3]+q[1, 0], 1.0-q[1, 1]-q[2, 2], 0.0], - [ 0.0, 0.0, 0.0, 1.0]]) - - -def quaternion_from_matrix(matrix, isprecise=False): - """Return quaternion from rotation matrix. - - If isprecise is True, the input matrix is assumed to be a precise rotation - matrix and a faster algorithm is used. - - >>> q = quaternion_from_matrix(numpy.identity(4), True) - >>> numpy.allclose(q, [1, 0, 0, 0]) - True - >>> q = quaternion_from_matrix(numpy.diag([1, -1, -1, 1])) - >>> numpy.allclose(q, [0, 1, 0, 0]) or numpy.allclose(q, [0, -1, 0, 0]) - True - >>> R = rotation_matrix(0.123, (1, 2, 3)) - >>> q = quaternion_from_matrix(R, True) - >>> numpy.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786]) - True - >>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0], - ... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]] - >>> q = quaternion_from_matrix(R) - >>> numpy.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611]) - True - >>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0], - ... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]] - >>> q = quaternion_from_matrix(R) - >>> numpy.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603]) - True - >>> R = random_rotation_matrix() - >>> q = quaternion_from_matrix(R) - >>> is_same_transform(R, quaternion_matrix(q)) - True - >>> R = euler_matrix(0.0, 0.0, numpy.pi/2.0) - >>> numpy.allclose(quaternion_from_matrix(R, isprecise=False), - ... quaternion_from_matrix(R, isprecise=True)) - True - - """ - M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:4, :4] - if isprecise: - q = numpy.empty((4, )) - t = numpy.trace(M) - if t > M[3, 3]: - q[0] = t - q[3] = M[1, 0] - M[0, 1] - q[2] = M[0, 2] - M[2, 0] - q[1] = M[2, 1] - M[1, 2] - else: - i, j, k = 1, 2, 3 - if M[1, 1] > M[0, 0]: - i, j, k = 2, 3, 1 - if M[2, 2] > M[i, i]: - i, j, k = 3, 1, 2 - t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3] - q[i] = t - q[j] = M[i, j] + M[j, i] - q[k] = M[k, i] + M[i, k] - q[3] = M[k, j] - M[j, k] - q *= 0.5 / math.sqrt(t * M[3, 3]) - else: - m00 = M[0, 0] - m01 = M[0, 1] - m02 = M[0, 2] - m10 = M[1, 0] - m11 = M[1, 1] - m12 = M[1, 2] - m20 = M[2, 0] - m21 = M[2, 1] - m22 = M[2, 2] - # symmetric matrix K - K = numpy.array([[m00-m11-m22, 0.0, 0.0, 0.0], - [m01+m10, m11-m00-m22, 0.0, 0.0], - [m02+m20, m12+m21, m22-m00-m11, 0.0], - [m21-m12, m02-m20, m10-m01, m00+m11+m22]]) - K /= 3.0 - # quaternion is eigenvector of K that corresponds to largest eigenvalue - w, V = numpy.linalg.eigh(K) - q = V[[3, 0, 1, 2], numpy.argmax(w)] - if q[0] < 0.0: - numpy.negative(q, q) - return q - - -def quaternion_multiply(quaternion1, quaternion0): - """Return multiplication of two quaternions. - - >>> q = quaternion_multiply([4, 1, -2, 3], [8, -5, 6, 7]) - >>> numpy.allclose(q, [28, -44, -14, 48]) - True - - """ - w0, x0, y0, z0 = quaternion0 - w1, x1, y1, z1 = quaternion1 - return numpy.array([-x1*x0 - y1*y0 - z1*z0 + w1*w0, - x1*w0 + y1*z0 - z1*y0 + w1*x0, - -x1*z0 + y1*w0 + z1*x0 + w1*y0, - x1*y0 - y1*x0 + z1*w0 + w1*z0], dtype=numpy.float64) - - -def quaternion_conjugate(quaternion): - """Return conjugate of quaternion. - - >>> q0 = random_quaternion() - >>> q1 = quaternion_conjugate(q0) - >>> q1[0] == q0[0] and all(q1[1:] == -q0[1:]) - True - - """ - q = numpy.array(quaternion, dtype=numpy.float64, copy=True) - numpy.negative(q[1:], q[1:]) - return q - - -def quaternion_inverse(quaternion): - """Return inverse of quaternion. - - >>> q0 = random_quaternion() - >>> q1 = quaternion_inverse(q0) - >>> numpy.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0]) - True - - """ - q = numpy.array(quaternion, dtype=numpy.float64, copy=True) - numpy.negative(q[1:], q[1:]) - return q / numpy.dot(q, q) - - -def quaternion_real(quaternion): - """Return real part of quaternion. - - >>> quaternion_real([3, 0, 1, 2]) - 3.0 - - """ - return float(quaternion[0]) - - -def quaternion_imag(quaternion): - """Return imaginary part of quaternion. - - >>> quaternion_imag([3, 0, 1, 2]) - array([ 0., 1., 2.]) - - """ - return numpy.array(quaternion[1:4], dtype=numpy.float64, copy=True) - - -def quaternion_slerp(quat0, quat1, fraction, spin=0, shortestpath=True): - """Return spherical linear interpolation between two quaternions. - - >>> q0 = random_quaternion() - >>> q1 = random_quaternion() - >>> q = quaternion_slerp(q0, q1, 0) - >>> numpy.allclose(q, q0) - True - >>> q = quaternion_slerp(q0, q1, 1, 1) - >>> numpy.allclose(q, q1) - True - >>> q = quaternion_slerp(q0, q1, 0.5) - >>> angle = math.acos(numpy.dot(q0, q)) - >>> numpy.allclose(2, math.acos(numpy.dot(q0, q1)) / angle) or \ - numpy.allclose(2, math.acos(-numpy.dot(q0, q1)) / angle) - True - - """ - q0 = unit_vector(quat0[:4]) - q1 = unit_vector(quat1[:4]) - if fraction == 0.0: - return q0 - elif fraction == 1.0: - return q1 - d = numpy.dot(q0, q1) - if abs(abs(d) - 1.0) < _EPS: - return q0 - if shortestpath and d < 0.0: - # invert rotation - d = -d - numpy.negative(q1, q1) - angle = math.acos(d) + spin * math.pi - if abs(angle) < _EPS: - return q0 - isin = 1.0 / math.sin(angle) - q0 *= math.sin((1.0 - fraction) * angle) * isin - q1 *= math.sin(fraction * angle) * isin - q0 += q1 - return q0 - - -def random_quaternion(rand=None): - """Return uniform random unit quaternion. - - rand: array like or None - Three independent random variables that are uniformly distributed - between 0 and 1. - - >>> q = random_quaternion() - >>> numpy.allclose(1, vector_norm(q)) - True - >>> q = random_quaternion(numpy.random.random(3)) - >>> len(q.shape), q.shape[0]==4 - (1, True) - - """ - if rand is None: - rand = numpy.random.rand(3) - else: - assert len(rand) == 3 - r1 = numpy.sqrt(1.0 - rand[0]) - r2 = numpy.sqrt(rand[0]) - pi2 = math.pi * 2.0 - t1 = pi2 * rand[1] - t2 = pi2 * rand[2] - return numpy.array([numpy.cos(t2)*r2, numpy.sin(t1)*r1, - numpy.cos(t1)*r1, numpy.sin(t2)*r2]) - - -def random_rotation_matrix(rand=None): - """Return uniform random rotation matrix. - - rand: array like - Three independent random variables that are uniformly distributed - between 0 and 1 for each returned quaternion. - - >>> R = random_rotation_matrix() - >>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4)) - True - - """ - return quaternion_matrix(random_quaternion(rand)) - - -class Arcball(object): - """Virtual Trackball Control. - - >>> ball = Arcball() - >>> ball = Arcball(initial=numpy.identity(4)) - >>> ball.place([320, 320], 320) - >>> ball.down([500, 250]) - >>> ball.drag([475, 275]) - >>> R = ball.matrix() - >>> numpy.allclose(numpy.sum(R), 3.90583455) - True - >>> ball = Arcball(initial=[1, 0, 0, 0]) - >>> ball.place([320, 320], 320) - >>> ball.setaxes([1, 1, 0], [-1, 1, 0]) - >>> ball.constrain = True - >>> ball.down([400, 200]) - >>> ball.drag([200, 400]) - >>> R = ball.matrix() - >>> numpy.allclose(numpy.sum(R), 0.2055924) - True - >>> ball.next() - - """ - def __init__(self, initial=None): - """Initialize virtual trackball control. - - initial : quaternion or rotation matrix - - """ - self._axis = None - self._axes = None - self._radius = 1.0 - self._center = [0.0, 0.0] - self._vdown = numpy.array([0.0, 0.0, 1.0]) - self._constrain = False - if initial is None: - self._qdown = numpy.array([1.0, 0.0, 0.0, 0.0]) - else: - initial = numpy.array(initial, dtype=numpy.float64) - if initial.shape == (4, 4): - self._qdown = quaternion_from_matrix(initial) - elif initial.shape == (4, ): - initial /= vector_norm(initial) - self._qdown = initial - else: - raise ValueError("initial not a quaternion or matrix") - self._qnow = self._qpre = self._qdown - - def place(self, center, radius): - """Place Arcball, e.g. when window size changes. - - center : sequence[2] - Window coordinates of trackball center. - radius : float - Radius of trackball in window coordinates. - - """ - self._radius = float(radius) - self._center[0] = center[0] - self._center[1] = center[1] - - def setaxes(self, *axes): - """Set axes to constrain rotations.""" - if axes is None: - self._axes = None - else: - self._axes = [unit_vector(axis) for axis in axes] - - @property - def constrain(self): - """Return state of constrain to axis mode.""" - return self._constrain - - @constrain.setter - def constrain(self, value): - """Set state of constrain to axis mode.""" - self._constrain = bool(value) - - def down(self, point): - """Set initial cursor window coordinates and pick constrain-axis.""" - self._vdown = arcball_map_to_sphere(point, self._center, self._radius) - self._qdown = self._qpre = self._qnow - if self._constrain and self._axes is not None: - self._axis = arcball_nearest_axis(self._vdown, self._axes) - self._vdown = arcball_constrain_to_axis(self._vdown, self._axis) - else: - self._axis = None - - def drag(self, point): - """Update current cursor window coordinates.""" - vnow = arcball_map_to_sphere(point, self._center, self._radius) - if self._axis is not None: - vnow = arcball_constrain_to_axis(vnow, self._axis) - self._qpre = self._qnow - t = numpy.cross(self._vdown, vnow) - if numpy.dot(t, t) < _EPS: - self._qnow = self._qdown - else: - q = [numpy.dot(self._vdown, vnow), t[0], t[1], t[2]] - self._qnow = quaternion_multiply(q, self._qdown) - - def next(self, acceleration=0.0): - """Continue rotation in direction of last drag.""" - q = quaternion_slerp(self._qpre, self._qnow, 2.0+acceleration, False) - self._qpre, self._qnow = self._qnow, q - - def matrix(self): - """Return homogeneous rotation matrix.""" - return quaternion_matrix(self._qnow) - - -def arcball_map_to_sphere(point, center, radius): - """Return unit sphere coordinates from window coordinates.""" - v0 = (point[0] - center[0]) / radius - v1 = (center[1] - point[1]) / radius - n = v0*v0 + v1*v1 - if n > 1.0: - # position outside of sphere - n = math.sqrt(n) - return numpy.array([v0/n, v1/n, 0.0]) - else: - return numpy.array([v0, v1, math.sqrt(1.0 - n)]) - - -def arcball_constrain_to_axis(point, axis): - """Return sphere point perpendicular to axis.""" - v = numpy.array(point, dtype=numpy.float64, copy=True) - a = numpy.array(axis, dtype=numpy.float64, copy=True) - v -= a * numpy.dot(a, v) # on plane - n = vector_norm(v) - if n > _EPS: - if v[2] < 0.0: - numpy.negative(v, v) - v /= n - return v - if a[2] == 1.0: - return numpy.array([1.0, 0.0, 0.0]) - return unit_vector([-a[1], a[0], 0.0]) - - -def arcball_nearest_axis(point, axes): - """Return axis, which arc is nearest to point.""" - point = numpy.array(point, dtype=numpy.float64, copy=False) - nearest = None - mx = -1.0 - for axis in axes: - t = numpy.dot(arcball_constrain_to_axis(point, axis), point) - if t > mx: - nearest = axis - mx = t - return nearest - - -# epsilon for testing whether a number is close to zero -_EPS = numpy.finfo(float).eps * 4.0 - -# axis sequences for Euler angles -_NEXT_AXIS = [1, 2, 0, 1] - -# map axes strings to/from tuples of inner axis, parity, repetition, frame -_AXES2TUPLE = { - 'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0), - 'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0), - 'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0), - 'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0), - 'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1), - 'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1), - 'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1), - 'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)} - -_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items()) - - -def vector_norm(data, axis=None, out=None): - """Return length, i.e. Euclidean norm, of ndarray along axis. - - >>> v = numpy.random.random(3) - >>> n = vector_norm(v) - >>> numpy.allclose(n, numpy.linalg.norm(v)) - True - >>> v = numpy.random.rand(6, 5, 3) - >>> n = vector_norm(v, axis=-1) - >>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2))) - True - >>> n = vector_norm(v, axis=1) - >>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1))) - True - >>> v = numpy.random.rand(5, 4, 3) - >>> n = numpy.empty((5, 3)) - >>> vector_norm(v, axis=1, out=n) - >>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1))) - True - >>> vector_norm([]) - 0.0 - >>> vector_norm([1]) - 1.0 - - """ - data = numpy.array(data, dtype=numpy.float64, copy=True) - if out is None: - if data.ndim == 1: - return math.sqrt(numpy.dot(data, data)) - data *= data - out = numpy.atleast_1d(numpy.sum(data, axis=axis)) - numpy.sqrt(out, out) - return out - else: - data *= data - numpy.sum(data, axis=axis, out=out) - numpy.sqrt(out, out) - - -def unit_vector(data, axis=None, out=None): - """Return ndarray normalized by length, i.e. Euclidean norm, along axis. - - >>> v0 = numpy.random.random(3) - >>> v1 = unit_vector(v0) - >>> numpy.allclose(v1, v0 / numpy.linalg.norm(v0)) - True - >>> v0 = numpy.random.rand(5, 4, 3) - >>> v1 = unit_vector(v0, axis=-1) - >>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=2)), 2) - >>> numpy.allclose(v1, v2) - True - >>> v1 = unit_vector(v0, axis=1) - >>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=1)), 1) - >>> numpy.allclose(v1, v2) - True - >>> v1 = numpy.empty((5, 4, 3)) - >>> unit_vector(v0, axis=1, out=v1) - >>> numpy.allclose(v1, v2) - True - >>> list(unit_vector([])) - [] - >>> list(unit_vector([1])) - [1.0] - - """ - if out is None: - data = numpy.array(data, dtype=numpy.float64, copy=True) - if data.ndim == 1: - data /= math.sqrt(numpy.dot(data, data)) - return data - else: - if out is not data: - out[:] = numpy.array(data, copy=False) - data = out - length = numpy.atleast_1d(numpy.sum(data*data, axis)) - numpy.sqrt(length, length) - if axis is not None: - length = numpy.expand_dims(length, axis) - data /= length - if out is None: - return data - - -def random_vector(size): - """Return array of random doubles in the half-open interval [0.0, 1.0). - - >>> v = random_vector(10000) - >>> numpy.all(v >= 0) and numpy.all(v < 1) - True - >>> v0 = random_vector(10) - >>> v1 = random_vector(10) - >>> numpy.any(v0 == v1) - False - - """ - return numpy.random.random(size) - - -def vector_product(v0, v1, axis=0): - """Return vector perpendicular to vectors. - - >>> v = vector_product([2, 0, 0], [0, 3, 0]) - >>> numpy.allclose(v, [0, 0, 6]) - True - >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] - >>> v1 = [[3], [0], [0]] - >>> v = vector_product(v0, v1) - >>> numpy.allclose(v, [[0, 0, 0, 0], [0, 0, 6, 6], [0, -6, 0, -6]]) - True - >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] - >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] - >>> v = vector_product(v0, v1, axis=1) - >>> numpy.allclose(v, [[0, 0, 6], [0, -6, 0], [6, 0, 0], [0, -6, 6]]) - True - - """ - return numpy.cross(v0, v1, axis=axis) - - -def angle_between_vectors(v0, v1, directed=True, axis=0): - """Return angle between vectors. - - If directed is False, the input vectors are interpreted as undirected axes, - i.e. the maximum angle is pi/2. - - >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3]) - >>> numpy.allclose(a, math.pi) - True - >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False) - >>> numpy.allclose(a, 0) - True - >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] - >>> v1 = [[3], [0], [0]] - >>> a = angle_between_vectors(v0, v1) - >>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532]) - True - >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] - >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] - >>> a = angle_between_vectors(v0, v1, axis=1) - >>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532]) - True - - """ - v0 = numpy.array(v0, dtype=numpy.float64, copy=False) - v1 = numpy.array(v1, dtype=numpy.float64, copy=False) - dot = numpy.sum(v0 * v1, axis=axis) - dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis) - return numpy.arccos(dot if directed else numpy.fabs(dot)) - - -def inverse_matrix(matrix): - """Return inverse of square transformation matrix. - - >>> M0 = random_rotation_matrix() - >>> M1 = inverse_matrix(M0.T) - >>> numpy.allclose(M1, numpy.linalg.inv(M0.T)) - True - >>> for size in range(1, 7): - ... M0 = numpy.random.rand(size, size) - ... M1 = inverse_matrix(M0) - ... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print(size) - - """ - return numpy.linalg.inv(matrix) - - -def concatenate_matrices(*matrices): - """Return concatenation of series of transformation matrices. - - >>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5 - >>> numpy.allclose(M, concatenate_matrices(M)) - True - >>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T)) - True - - """ - M = numpy.identity(4) - for i in matrices: - M = numpy.dot(M, i) - return M - - -def is_same_transform(matrix0, matrix1): - """Return True if two matrices perform same transformation. - - >>> is_same_transform(numpy.identity(4), numpy.identity(4)) - True - >>> is_same_transform(numpy.identity(4), random_rotation_matrix()) - False - - """ - matrix0 = numpy.array(matrix0, dtype=numpy.float64, copy=True) - matrix0 /= matrix0[3, 3] - matrix1 = numpy.array(matrix1, dtype=numpy.float64, copy=True) - matrix1 /= matrix1[3, 3] - return numpy.allclose(matrix0, matrix1) - - -def _import_module(name, package=None, warn=True, prefix='_py_', ignore='_'): - """Try import all public attributes from module into global namespace. - - Existing attributes with name clashes are renamed with prefix. - Attributes starting with underscore are ignored by default. - - Return True on successful import. - - """ - import warnings - from importlib import import_module - try: - if not package: - module = import_module(name) - else: - module = import_module('.' + name, package=package) - except ImportError: - if warn: - warnings.warn("failed to import module %s" % name) - else: - for attr in dir(module): - if ignore and attr.startswith(ignore): - continue - if prefix: - if attr in globals(): - globals()[prefix + attr] = globals()[attr] - elif warn: - warnings.warn("no Python implementation of " + attr) - globals()[attr] = getattr(module, attr) - return True - - -_import_module('_transformations') - -if __name__ == "__main__": - import doctest - import random # used in doctests - numpy.set_printoptions(suppress=True, precision=5) - doctest.testmod() \ No newline at end of file +# -*- coding: utf-8 -*- +# transformations.py + +# Copyright (c) 2006-2015, Christoph Gohlke +# Copyright (c) 2006-2015, The Regents of the University of California +# Produced at the Laboratory for Fluorescence Dynamics +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holders nor the names of any +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Homogeneous Transformation Matrices and Quaternions. + +A library for calculating 4x4 matrices for translating, rotating, reflecting, +scaling, shearing, projecting, orthogonalizing, and superimposing arrays of +3D homogeneous coordinates as well as for converting between rotation matrices, +Euler angles, and quaternions. Also includes an Arcball control object and +functions to decompose transformation matrices. + +:Author: + `Christoph Gohlke `_ + +:Organization: + Laboratory for Fluorescence Dynamics, University of California, Irvine + +:Version: 2015.07.18 + +Requirements +------------ +* `CPython 2.7 or 3.4 `_ +* `Numpy 1.9 `_ +* `Transformations.c 2015.07.18 `_ + (recommended for speedup of some functions) + +Notes +----- +The API is not stable yet and is expected to change between revisions. + +This Python code is not optimized for speed. Refer to the transformations.c +module for a faster implementation of some functions. + +Documentation in HTML format can be generated with epydoc. + +Matrices (M) can be inverted using numpy.linalg.inv(M), be concatenated using +numpy.dot(M0, M1), or transform homogeneous coordinate arrays (v) using +numpy.dot(M, v) for shape (4, \*) column vectors, respectively +numpy.dot(v, M.T) for shape (\*, 4) row vectors ("array of points"). + +This module follows the "column vectors on the right" and "row major storage" +(C contiguous) conventions. The translation components are in the right column +of the transformation matrix, i.e. M[:3, 3]. +The transpose of the transformation matrices may have to be used to interface +with other graphics systems, e.g. with OpenGL's glMultMatrixd(). See also [16]. + +Calculations are carried out with numpy.float64 precision. + +Vector, point, quaternion, and matrix function arguments are expected to be +"array like", i.e. tuple, list, or numpy arrays. + +Return types are numpy arrays unless specified otherwise. + +Angles are in radians unless specified otherwise. + +Quaternions w+ix+jy+kz are represented as [w, x, y, z]. + +A triple of Euler angles can be applied/interpreted in 24 ways, which can +be specified using a 4 character string or encoded 4-tuple: + + *Axes 4-string*: e.g. 'sxyz' or 'ryxy' + + - first character : rotations are applied to 's'tatic or 'r'otating frame + - remaining characters : successive rotation axis 'x', 'y', or 'z' + + *Axes 4-tuple*: e.g. (0, 0, 0, 0) or (1, 1, 1, 1) + + - inner axis: code of axis ('x':0, 'y':1, 'z':2) of rightmost matrix. + - parity : even (0) if inner axis 'x' is followed by 'y', 'y' is followed + by 'z', or 'z' is followed by 'x'. Otherwise odd (1). + - repetition : first and last axis are same (1) or different (0). + - frame : rotations are applied to static (0) or rotating (1) frame. + +Other Python packages and modules for 3D transformations and quaternions: + +* `Transforms3d `_ + includes most code of this module. +* `Blender.mathutils `_ +* `numpy-dtypes `_ + +References +---------- +(1) Matrices and transformations. Ronald Goldman. + In "Graphics Gems I", pp 472-475. Morgan Kaufmann, 1990. +(2) More matrices and transformations: shear and pseudo-perspective. + Ronald Goldman. In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991. +(3) Decomposing a matrix into simple transformations. Spencer Thomas. + In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991. +(4) Recovering the data from the transformation matrix. Ronald Goldman. + In "Graphics Gems II", pp 324-331. Morgan Kaufmann, 1991. +(5) Euler angle conversion. Ken Shoemake. + In "Graphics Gems IV", pp 222-229. Morgan Kaufmann, 1994. +(6) Arcball rotation control. Ken Shoemake. + In "Graphics Gems IV", pp 175-192. Morgan Kaufmann, 1994. +(7) Representing attitude: Euler angles, unit quaternions, and rotation + vectors. James Diebel. 2006. +(8) A discussion of the solution for the best rotation to relate two sets + of vectors. W Kabsch. Acta Cryst. 1978. A34, 827-828. +(9) Closed-form solution of absolute orientation using unit quaternions. + BKP Horn. J Opt Soc Am A. 1987. 4(4):629-642. +(10) Quaternions. Ken Shoemake. + http://www.sfu.ca/~jwa3/cmpt461/files/quatut.pdf +(11) From quaternion to matrix and back. JMP van Waveren. 2005. + http://www.intel.com/cd/ids/developer/asmo-na/eng/293748.htm +(12) Uniform random rotations. Ken Shoemake. + In "Graphics Gems III", pp 124-132. Morgan Kaufmann, 1992. +(13) Quaternion in molecular modeling. CFF Karney. + J Mol Graph Mod, 25(5):595-604 +(14) New method for extracting the quaternion from a rotation matrix. + Itzhack Y Bar-Itzhack, J Guid Contr Dynam. 2000. 23(6): 1085-1087. +(15) Multiple View Geometry in Computer Vision. Hartley and Zissermann. + Cambridge University Press; 2nd Ed. 2004. Chapter 4, Algorithm 4.7, p 130. +(16) Column Vectors vs. Row Vectors. + http://steve.hollasch.net/cgindex/math/matrix/column-vec.html + +Examples +-------- +>>> alpha, beta, gamma = 0.123, -1.234, 2.345 +>>> origin, xaxis, yaxis, zaxis = [0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1] +>>> I = identity_matrix() +>>> Rx = rotation_matrix(alpha, xaxis) +>>> Ry = rotation_matrix(beta, yaxis) +>>> Rz = rotation_matrix(gamma, zaxis) +>>> R = concatenate_matrices(Rx, Ry, Rz) +>>> euler = euler_from_matrix(R, 'rxyz') +>>> numpy.allclose([alpha, beta, gamma], euler) +True +>>> Re = euler_matrix(alpha, beta, gamma, 'rxyz') +>>> is_same_transform(R, Re) +True +>>> al, be, ga = euler_from_matrix(Re, 'rxyz') +>>> is_same_transform(Re, euler_matrix(al, be, ga, 'rxyz')) +True +>>> qx = quaternion_about_axis(alpha, xaxis) +>>> qy = quaternion_about_axis(beta, yaxis) +>>> qz = quaternion_about_axis(gamma, zaxis) +>>> q = quaternion_multiply(qx, qy) +>>> q = quaternion_multiply(q, qz) +>>> Rq = quaternion_matrix(q) +>>> is_same_transform(R, Rq) +True +>>> S = scale_matrix(1.23, origin) +>>> T = translation_matrix([1, 2, 3]) +>>> Z = shear_matrix(beta, xaxis, origin, zaxis) +>>> R = random_rotation_matrix(numpy.random.rand(3)) +>>> M = concatenate_matrices(T, R, Z, S) +>>> scale, shear, angles, trans, persp = decompose_matrix(M) +>>> numpy.allclose(scale, 1.23) +True +>>> numpy.allclose(trans, [1, 2, 3]) +True +>>> numpy.allclose(shear, [0, math.tan(beta), 0]) +True +>>> is_same_transform(R, euler_matrix(axes='sxyz', *angles)) +True +>>> M1 = compose_matrix(scale, shear, angles, trans, persp) +>>> is_same_transform(M, M1) +True +>>> v0, v1 = random_vector(3), random_vector(3) +>>> M = rotation_matrix(angle_between_vectors(v0, v1), vector_product(v0, v1)) +>>> v2 = numpy.dot(v0, M[:3,:3].T) +>>> numpy.allclose(unit_vector(v1), unit_vector(v2)) +True + +""" + +from __future__ import division, print_function + +import math + +import numpy + +__version__ = '2015.07.18' +__docformat__ = 'restructuredtext en' +__all__ = () + + +def identity_matrix(): + """Return 4x4 identity/unit matrix. + + >>> I = identity_matrix() + >>> numpy.allclose(I, numpy.dot(I, I)) + True + >>> numpy.sum(I), numpy.trace(I) + (4.0, 4.0) + >>> numpy.allclose(I, numpy.identity(4)) + True + + """ + return numpy.identity(4) + + +def translation_matrix(direction): + """Return matrix to translate by direction vector. + + >>> v = numpy.random.random(3) - 0.5 + >>> numpy.allclose(v, translation_matrix(v)[:3, 3]) + True + + """ + M = numpy.identity(4) + M[:3, 3] = direction[:3] + return M + + +def translation_from_matrix(matrix): + """Return translation vector from translation matrix. + + >>> v0 = numpy.random.random(3) - 0.5 + >>> v1 = translation_from_matrix(translation_matrix(v0)) + >>> numpy.allclose(v0, v1) + True + + """ + return numpy.array(matrix, copy=False)[:3, 3].copy() + + +def reflection_matrix(point, normal): + """Return matrix to mirror at plane defined by point and normal vector. + + >>> v0 = numpy.random.random(4) - 0.5 + >>> v0[3] = 1. + >>> v1 = numpy.random.random(3) - 0.5 + >>> R = reflection_matrix(v0, v1) + >>> numpy.allclose(2, numpy.trace(R)) + True + >>> numpy.allclose(v0, numpy.dot(R, v0)) + True + >>> v2 = v0.copy() + >>> v2[:3] += v1 + >>> v3 = v0.copy() + >>> v2[:3] -= v1 + >>> numpy.allclose(v2, numpy.dot(R, v3)) + True + + """ + normal = unit_vector(normal[:3]) + M = numpy.identity(4) + M[:3, :3] -= 2.0 * numpy.outer(normal, normal) + M[:3, 3] = (2.0 * numpy.dot(point[:3], normal)) * normal + return M + + +def reflection_from_matrix(matrix): + """Return mirror plane point and normal vector from reflection matrix. + + >>> v0 = numpy.random.random(3) - 0.5 + >>> v1 = numpy.random.random(3) - 0.5 + >>> M0 = reflection_matrix(v0, v1) + >>> point, normal = reflection_from_matrix(M0) + >>> M1 = reflection_matrix(point, normal) + >>> is_same_transform(M0, M1) + True + + """ + M = numpy.array(matrix, dtype=numpy.float64, copy=False) + # normal: unit eigenvector corresponding to eigenvalue -1 + w, V = numpy.linalg.eig(M[:3, :3]) + i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0] + if not len(i): + raise ValueError("no unit eigenvector corresponding to eigenvalue -1") + normal = numpy.real(V[:, i[0]]).squeeze() + # point: any unit eigenvector corresponding to eigenvalue 1 + w, V = numpy.linalg.eig(M) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] + if not len(i): + raise ValueError("no unit eigenvector corresponding to eigenvalue 1") + point = numpy.real(V[:, i[-1]]).squeeze() + point /= point[3] + return point, normal + + +def rotation_matrix(angle, direction, point=None): + """Return matrix to rotate about axis defined by point and direction. + + >>> R = rotation_matrix(math.pi/2, [0, 0, 1], [1, 0, 0]) + >>> numpy.allclose(numpy.dot(R, [0, 0, 0, 1]), [1, -1, 0, 1]) + True + >>> angle = (random.random() - 0.5) * (2*math.pi) + >>> direc = numpy.random.random(3) - 0.5 + >>> point = numpy.random.random(3) - 0.5 + >>> R0 = rotation_matrix(angle, direc, point) + >>> R1 = rotation_matrix(angle-2*math.pi, direc, point) + >>> is_same_transform(R0, R1) + True + >>> R0 = rotation_matrix(angle, direc, point) + >>> R1 = rotation_matrix(-angle, -direc, point) + >>> is_same_transform(R0, R1) + True + >>> I = numpy.identity(4, numpy.float64) + >>> numpy.allclose(I, rotation_matrix(math.pi*2, direc)) + True + >>> numpy.allclose(2, numpy.trace(rotation_matrix(math.pi/2, + ... direc, point))) + True + + """ + sina = math.sin(angle) + cosa = math.cos(angle) + direction = unit_vector(direction[:3]) + # rotation matrix around unit vector + R = numpy.diag([cosa, cosa, cosa]) + R += numpy.outer(direction, direction) * (1.0 - cosa) + direction *= sina + R += numpy.array([[ 0.0, -direction[2], direction[1]], + [ direction[2], 0.0, -direction[0]], + [-direction[1], direction[0], 0.0]]) + M = numpy.identity(4) + M[:3, :3] = R + if point is not None: + # rotation not around origin + point = numpy.array(point[:3], dtype=numpy.float64, copy=False) + M[:3, 3] = point - numpy.dot(R, point) + return M + + +def rotation_from_matrix(matrix): + """Return rotation angle and axis from rotation matrix. + + >>> angle = (random.random() - 0.5) * (2*math.pi) + >>> direc = numpy.random.random(3) - 0.5 + >>> point = numpy.random.random(3) - 0.5 + >>> R0 = rotation_matrix(angle, direc, point) + >>> angle, direc, point = rotation_from_matrix(R0) + >>> R1 = rotation_matrix(angle, direc, point) + >>> is_same_transform(R0, R1) + True + + """ + R = numpy.array(matrix, dtype=numpy.float64, copy=False) + R33 = R[:3, :3] + # direction: unit eigenvector of R33 corresponding to eigenvalue of 1 + w, W = numpy.linalg.eig(R33.T) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] + if not len(i): + raise ValueError("no unit eigenvector corresponding to eigenvalue 1") + direction = numpy.real(W[:, i[-1]]).squeeze() + # point: unit eigenvector of R33 corresponding to eigenvalue of 1 + w, Q = numpy.linalg.eig(R) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] + if not len(i): + raise ValueError("no unit eigenvector corresponding to eigenvalue 1") + point = numpy.real(Q[:, i[-1]]).squeeze() + point /= point[3] + # rotation angle depending on direction + cosa = (numpy.trace(R33) - 1.0) / 2.0 + if abs(direction[2]) > 1e-8: + sina = (R[1, 0] + (cosa-1.0)*direction[0]*direction[1]) / direction[2] + elif abs(direction[1]) > 1e-8: + sina = (R[0, 2] + (cosa-1.0)*direction[0]*direction[2]) / direction[1] + else: + sina = (R[2, 1] + (cosa-1.0)*direction[1]*direction[2]) / direction[0] + angle = math.atan2(sina, cosa) + return angle, direction, point + + +def scale_matrix(factor, origin=None, direction=None): + """Return matrix to scale by factor around origin in direction. + + Use factor -1 for point symmetry. + + >>> v = (numpy.random.rand(4, 5) - 0.5) * 20 + >>> v[3] = 1 + >>> S = scale_matrix(-1.234) + >>> numpy.allclose(numpy.dot(S, v)[:3], -1.234*v[:3]) + True + >>> factor = random.random() * 10 - 5 + >>> origin = numpy.random.random(3) - 0.5 + >>> direct = numpy.random.random(3) - 0.5 + >>> S = scale_matrix(factor, origin) + >>> S = scale_matrix(factor, origin, direct) + + """ + if direction is None: + # uniform scaling + M = numpy.diag([factor, factor, factor, 1.0]) + if origin is not None: + M[:3, 3] = origin[:3] + M[:3, 3] *= 1.0 - factor + else: + # nonuniform scaling + direction = unit_vector(direction[:3]) + factor = 1.0 - factor + M = numpy.identity(4) + M[:3, :3] -= factor * numpy.outer(direction, direction) + if origin is not None: + M[:3, 3] = (factor * numpy.dot(origin[:3], direction)) * direction + return M + + +def scale_from_matrix(matrix): + """Return scaling factor, origin and direction from scaling matrix. + + >>> factor = random.random() * 10 - 5 + >>> origin = numpy.random.random(3) - 0.5 + >>> direct = numpy.random.random(3) - 0.5 + >>> S0 = scale_matrix(factor, origin) + >>> factor, origin, direction = scale_from_matrix(S0) + >>> S1 = scale_matrix(factor, origin, direction) + >>> is_same_transform(S0, S1) + True + >>> S0 = scale_matrix(factor, origin, direct) + >>> factor, origin, direction = scale_from_matrix(S0) + >>> S1 = scale_matrix(factor, origin, direction) + >>> is_same_transform(S0, S1) + True + + """ + M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M33 = M[:3, :3] + factor = numpy.trace(M33) - 2.0 + try: + # direction: unit eigenvector corresponding to eigenvalue factor + w, V = numpy.linalg.eig(M33) + i = numpy.where(abs(numpy.real(w) - factor) < 1e-8)[0][0] + direction = numpy.real(V[:, i]).squeeze() + direction /= vector_norm(direction) + except IndexError: + # uniform scaling + factor = (factor + 2.0) / 3.0 + direction = None + # origin: any eigenvector corresponding to eigenvalue 1 + w, V = numpy.linalg.eig(M) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] + if not len(i): + raise ValueError("no eigenvector corresponding to eigenvalue 1") + origin = numpy.real(V[:, i[-1]]).squeeze() + origin /= origin[3] + return factor, origin, direction + + +def projection_matrix(point, normal, direction=None, + perspective=None, pseudo=False): + """Return matrix to project onto plane defined by point and normal. + + Using either perspective point, projection direction, or none of both. + + If pseudo is True, perspective projections will preserve relative depth + such that Perspective = dot(Orthogonal, PseudoPerspective). + + >>> P = projection_matrix([0, 0, 0], [1, 0, 0]) + >>> numpy.allclose(P[1:, 1:], numpy.identity(4)[1:, 1:]) + True + >>> point = numpy.random.random(3) - 0.5 + >>> normal = numpy.random.random(3) - 0.5 + >>> direct = numpy.random.random(3) - 0.5 + >>> persp = numpy.random.random(3) - 0.5 + >>> P0 = projection_matrix(point, normal) + >>> P1 = projection_matrix(point, normal, direction=direct) + >>> P2 = projection_matrix(point, normal, perspective=persp) + >>> P3 = projection_matrix(point, normal, perspective=persp, pseudo=True) + >>> is_same_transform(P2, numpy.dot(P0, P3)) + True + >>> P = projection_matrix([3, 0, 0], [1, 1, 0], [1, 0, 0]) + >>> v0 = (numpy.random.rand(4, 5) - 0.5) * 20 + >>> v0[3] = 1 + >>> v1 = numpy.dot(P, v0) + >>> numpy.allclose(v1[1], v0[1]) + True + >>> numpy.allclose(v1[0], 3-v1[1]) + True + + """ + M = numpy.identity(4) + point = numpy.array(point[:3], dtype=numpy.float64, copy=False) + normal = unit_vector(normal[:3]) + if perspective is not None: + # perspective projection + perspective = numpy.array(perspective[:3], dtype=numpy.float64, + copy=False) + M[0, 0] = M[1, 1] = M[2, 2] = numpy.dot(perspective-point, normal) + M[:3, :3] -= numpy.outer(perspective, normal) + if pseudo: + # preserve relative depth + M[:3, :3] -= numpy.outer(normal, normal) + M[:3, 3] = numpy.dot(point, normal) * (perspective+normal) + else: + M[:3, 3] = numpy.dot(point, normal) * perspective + M[3, :3] = -normal + M[3, 3] = numpy.dot(perspective, normal) + elif direction is not None: + # parallel projection + direction = numpy.array(direction[:3], dtype=numpy.float64, copy=False) + scale = numpy.dot(direction, normal) + M[:3, :3] -= numpy.outer(direction, normal) / scale + M[:3, 3] = direction * (numpy.dot(point, normal) / scale) + else: + # orthogonal projection + M[:3, :3] -= numpy.outer(normal, normal) + M[:3, 3] = numpy.dot(point, normal) * normal + return M + + +def projection_from_matrix(matrix, pseudo=False): + """Return projection plane and perspective point from projection matrix. + + Return values are same as arguments for projection_matrix function: + point, normal, direction, perspective, and pseudo. + + >>> point = numpy.random.random(3) - 0.5 + >>> normal = numpy.random.random(3) - 0.5 + >>> direct = numpy.random.random(3) - 0.5 + >>> persp = numpy.random.random(3) - 0.5 + >>> P0 = projection_matrix(point, normal) + >>> result = projection_from_matrix(P0) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + >>> P0 = projection_matrix(point, normal, direct) + >>> result = projection_from_matrix(P0) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + >>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=False) + >>> result = projection_from_matrix(P0, pseudo=False) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + >>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=True) + >>> result = projection_from_matrix(P0, pseudo=True) + >>> P1 = projection_matrix(*result) + >>> is_same_transform(P0, P1) + True + + """ + M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M33 = M[:3, :3] + w, V = numpy.linalg.eig(M) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] + if not pseudo and len(i): + # point: any eigenvector corresponding to eigenvalue 1 + point = numpy.real(V[:, i[-1]]).squeeze() + point /= point[3] + # direction: unit eigenvector corresponding to eigenvalue 0 + w, V = numpy.linalg.eig(M33) + i = numpy.where(abs(numpy.real(w)) < 1e-8)[0] + if not len(i): + raise ValueError("no eigenvector corresponding to eigenvalue 0") + direction = numpy.real(V[:, i[0]]).squeeze() + direction /= vector_norm(direction) + # normal: unit eigenvector of M33.T corresponding to eigenvalue 0 + w, V = numpy.linalg.eig(M33.T) + i = numpy.where(abs(numpy.real(w)) < 1e-8)[0] + if len(i): + # parallel projection + normal = numpy.real(V[:, i[0]]).squeeze() + normal /= vector_norm(normal) + return point, normal, direction, None, False + else: + # orthogonal projection, where normal equals direction vector + return point, direction, None, None, False + else: + # perspective projection + i = numpy.where(abs(numpy.real(w)) > 1e-8)[0] + if not len(i): + raise ValueError( + "no eigenvector not corresponding to eigenvalue 0") + point = numpy.real(V[:, i[-1]]).squeeze() + point /= point[3] + normal = - M[3, :3] + perspective = M[:3, 3] / numpy.dot(point[:3], normal) + if pseudo: + perspective -= normal + return point, normal, None, perspective, pseudo + + +def clip_matrix(left, right, bottom, top, near, far, perspective=False): + """Return matrix to obtain normalized device coordinates from frustum. + + The frustum bounds are axis-aligned along x (left, right), + y (bottom, top) and z (near, far). + + Normalized device coordinates are in range [-1, 1] if coordinates are + inside the frustum. + + If perspective is True the frustum is a truncated pyramid with the + perspective point at origin and direction along z axis, otherwise an + orthographic canonical view volume (a box). + + Homogeneous coordinates transformed by the perspective clip matrix + need to be dehomogenized (divided by w coordinate). + + >>> frustum = numpy.random.rand(6) + >>> frustum[1] += frustum[0] + >>> frustum[3] += frustum[2] + >>> frustum[5] += frustum[4] + >>> M = clip_matrix(perspective=False, *frustum) + >>> numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1]) + array([-1., -1., -1., 1.]) + >>> numpy.dot(M, [frustum[1], frustum[3], frustum[5], 1]) + array([ 1., 1., 1., 1.]) + >>> M = clip_matrix(perspective=True, *frustum) + >>> v = numpy.dot(M, [frustum[0], frustum[2], frustum[4], 1]) + >>> v / v[3] + array([-1., -1., -1., 1.]) + >>> v = numpy.dot(M, [frustum[1], frustum[3], frustum[4], 1]) + >>> v / v[3] + array([ 1., 1., -1., 1.]) + + """ + if left >= right or bottom >= top or near >= far: + raise ValueError("invalid frustum") + if perspective: + if near <= _EPS: + raise ValueError("invalid frustum: near <= 0") + t = 2.0 * near + M = [[t/(left-right), 0.0, (right+left)/(right-left), 0.0], + [0.0, t/(bottom-top), (top+bottom)/(top-bottom), 0.0], + [0.0, 0.0, (far+near)/(near-far), t*far/(far-near)], + [0.0, 0.0, -1.0, 0.0]] + else: + M = [[2.0/(right-left), 0.0, 0.0, (right+left)/(left-right)], + [0.0, 2.0/(top-bottom), 0.0, (top+bottom)/(bottom-top)], + [0.0, 0.0, 2.0/(far-near), (far+near)/(near-far)], + [0.0, 0.0, 0.0, 1.0]] + return numpy.array(M) + + +def shear_matrix(angle, direction, point, normal): + """Return matrix to shear by angle along direction vector on shear plane. + + The shear plane is defined by a point and normal vector. The direction + vector must be orthogonal to the plane's normal vector. + + A point P is transformed by the shear matrix into P" such that + the vector P-P" is parallel to the direction vector and its extent is + given by the angle of P-P'-P", where P' is the orthogonal projection + of P onto the shear plane. + + >>> angle = (random.random() - 0.5) * 4*math.pi + >>> direct = numpy.random.random(3) - 0.5 + >>> point = numpy.random.random(3) - 0.5 + >>> normal = numpy.cross(direct, numpy.random.random(3)) + >>> S = shear_matrix(angle, direct, point, normal) + >>> numpy.allclose(1, numpy.linalg.det(S)) + True + + """ + normal = unit_vector(normal[:3]) + direction = unit_vector(direction[:3]) + if abs(numpy.dot(normal, direction)) > 1e-6: + raise ValueError("direction and normal vectors are not orthogonal") + angle = math.tan(angle) + M = numpy.identity(4) + M[:3, :3] += angle * numpy.outer(direction, normal) + M[:3, 3] = -angle * numpy.dot(point[:3], normal) * direction + return M + + +def shear_from_matrix(matrix): + """Return shear angle, direction and plane from shear matrix. + + >>> angle = (random.random() - 0.5) * 4*math.pi + >>> direct = numpy.random.random(3) - 0.5 + >>> point = numpy.random.random(3) - 0.5 + >>> normal = numpy.cross(direct, numpy.random.random(3)) + >>> S0 = shear_matrix(angle, direct, point, normal) + >>> angle, direct, point, normal = shear_from_matrix(S0) + >>> S1 = shear_matrix(angle, direct, point, normal) + >>> is_same_transform(S0, S1) + True + + """ + M = numpy.array(matrix, dtype=numpy.float64, copy=False) + M33 = M[:3, :3] + # normal: cross independent eigenvectors corresponding to the eigenvalue 1 + w, V = numpy.linalg.eig(M33) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-4)[0] + if len(i) < 2: + raise ValueError("no two linear independent eigenvectors found %s" % w) + V = numpy.real(V[:, i]).squeeze().T + lenorm = -1.0 + for i0, i1 in ((0, 1), (0, 2), (1, 2)): + n = numpy.cross(V[i0], V[i1]) + w = vector_norm(n) + if w > lenorm: + lenorm = w + normal = n + normal /= lenorm + # direction and angle + direction = numpy.dot(M33 - numpy.identity(3), normal) + angle = vector_norm(direction) + direction /= angle + angle = math.atan(angle) + # point: eigenvector corresponding to eigenvalue 1 + w, V = numpy.linalg.eig(M) + i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0] + if not len(i): + raise ValueError("no eigenvector corresponding to eigenvalue 1") + point = numpy.real(V[:, i[-1]]).squeeze() + point /= point[3] + return angle, direction, point, normal + + +def decompose_matrix(matrix): + """Return sequence of transformations from transformation matrix. + + matrix : array_like + Non-degenerative homogeneous transformation matrix + + Return tuple of: + scale : vector of 3 scaling factors + shear : list of shear factors for x-y, x-z, y-z axes + angles : list of Euler angles about static x, y, z axes + translate : translation vector along x, y, z axes + perspective : perspective partition of matrix + + Raise ValueError if matrix is of wrong type or degenerative. + + >>> T0 = translation_matrix([1, 2, 3]) + >>> scale, shear, angles, trans, persp = decompose_matrix(T0) + >>> T1 = translation_matrix(trans) + >>> numpy.allclose(T0, T1) + True + >>> S = scale_matrix(0.123) + >>> scale, shear, angles, trans, persp = decompose_matrix(S) + >>> scale[0] + 0.123 + >>> R0 = euler_matrix(1, 2, 3) + >>> scale, shear, angles, trans, persp = decompose_matrix(R0) + >>> R1 = euler_matrix(*angles) + >>> numpy.allclose(R0, R1) + True + + """ + M = numpy.array(matrix, dtype=numpy.float64, copy=True).T + if abs(M[3, 3]) < _EPS: + raise ValueError("M[3, 3] is zero") + M /= M[3, 3] + P = M.copy() + P[:, 3] = 0.0, 0.0, 0.0, 1.0 + if not numpy.linalg.det(P): + raise ValueError("matrix is singular") + + scale = numpy.zeros((3, )) + shear = [0.0, 0.0, 0.0] + angles = [0.0, 0.0, 0.0] + + if any(abs(M[:3, 3]) > _EPS): + perspective = numpy.dot(M[:, 3], numpy.linalg.inv(P.T)) + M[:, 3] = 0.0, 0.0, 0.0, 1.0 + else: + perspective = numpy.array([0.0, 0.0, 0.0, 1.0]) + + translate = M[3, :3].copy() + M[3, :3] = 0.0 + + row = M[:3, :3].copy() + scale[0] = vector_norm(row[0]) + row[0] /= scale[0] + shear[0] = numpy.dot(row[0], row[1]) + row[1] -= row[0] * shear[0] + scale[1] = vector_norm(row[1]) + row[1] /= scale[1] + shear[0] /= scale[1] + shear[1] = numpy.dot(row[0], row[2]) + row[2] -= row[0] * shear[1] + shear[2] = numpy.dot(row[1], row[2]) + row[2] -= row[1] * shear[2] + scale[2] = vector_norm(row[2]) + row[2] /= scale[2] + shear[1:] /= scale[2] + + if numpy.dot(row[0], numpy.cross(row[1], row[2])) < 0: + numpy.negative(scale, scale) + numpy.negative(row, row) + + angles[1] = math.asin(-row[0, 2]) + if math.cos(angles[1]): + angles[0] = math.atan2(row[1, 2], row[2, 2]) + angles[2] = math.atan2(row[0, 1], row[0, 0]) + else: + #angles[0] = math.atan2(row[1, 0], row[1, 1]) + angles[0] = math.atan2(-row[2, 1], row[1, 1]) + angles[2] = 0.0 + + return scale, shear, angles, translate, perspective + + +def compose_matrix(scale=None, shear=None, angles=None, translate=None, + perspective=None): + """Return transformation matrix from sequence of transformations. + + This is the inverse of the decompose_matrix function. + + Sequence of transformations: + scale : vector of 3 scaling factors + shear : list of shear factors for x-y, x-z, y-z axes + angles : list of Euler angles about static x, y, z axes + translate : translation vector along x, y, z axes + perspective : perspective partition of matrix + + >>> scale = numpy.random.random(3) - 0.5 + >>> shear = numpy.random.random(3) - 0.5 + >>> angles = (numpy.random.random(3) - 0.5) * (2*math.pi) + >>> trans = numpy.random.random(3) - 0.5 + >>> persp = numpy.random.random(4) - 0.5 + >>> M0 = compose_matrix(scale, shear, angles, trans, persp) + >>> result = decompose_matrix(M0) + >>> M1 = compose_matrix(*result) + >>> is_same_transform(M0, M1) + True + + """ + M = numpy.identity(4) + if perspective is not None: + P = numpy.identity(4) + P[3, :] = perspective[:4] + M = numpy.dot(M, P) + if translate is not None: + T = numpy.identity(4) + T[:3, 3] = translate[:3] + M = numpy.dot(M, T) + if angles is not None: + R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz') + M = numpy.dot(M, R) + if shear is not None: + Z = numpy.identity(4) + Z[1, 2] = shear[2] + Z[0, 2] = shear[1] + Z[0, 1] = shear[0] + M = numpy.dot(M, Z) + if scale is not None: + S = numpy.identity(4) + S[0, 0] = scale[0] + S[1, 1] = scale[1] + S[2, 2] = scale[2] + M = numpy.dot(M, S) + M /= M[3, 3] + return M + + +def orthogonalization_matrix(lengths, angles): + """Return orthogonalization matrix for crystallographic cell coordinates. + + Angles are expected in degrees. + + The de-orthogonalization matrix is the inverse. + + >>> O = orthogonalization_matrix([10, 10, 10], [90, 90, 90]) + >>> numpy.allclose(O[:3, :3], numpy.identity(3, float) * 10) + True + >>> O = orthogonalization_matrix([9.8, 12.0, 15.5], [87.2, 80.7, 69.7]) + >>> numpy.allclose(numpy.sum(O), 43.063229) + True + + """ + a, b, c = lengths + angles = numpy.radians(angles) + sina, sinb, _ = numpy.sin(angles) + cosa, cosb, cosg = numpy.cos(angles) + co = (cosa * cosb - cosg) / (sina * sinb) + return numpy.array([ + [ a*sinb*math.sqrt(1.0-co*co), 0.0, 0.0, 0.0], + [-a*sinb*co, b*sina, 0.0, 0.0], + [ a*cosb, b*cosa, c, 0.0], + [ 0.0, 0.0, 0.0, 1.0]]) + + +def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True): + """Return affine transform matrix to register two point sets. + + v0 and v1 are shape (ndims, \*) arrays of at least ndims non-homogeneous + coordinates, where ndims is the dimensionality of the coordinate space. + + If shear is False, a similarity transformation matrix is returned. + If also scale is False, a rigid/Euclidean transformation matrix + is returned. + + By default the algorithm by Hartley and Zissermann [15] is used. + If usesvd is True, similarity and Euclidean transformation matrices + are calculated by minimizing the weighted sum of squared deviations + (RMSD) according to the algorithm by Kabsch [8]. + Otherwise, and if ndims is 3, the quaternion based algorithm by Horn [9] + is used, which is slower when using this Python implementation. + + The returned matrix performs rotation, translation and uniform scaling + (if specified). + + >>> v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]] + >>> v1 = [[675, 826, 826, 677], [55, 52, 281, 277]] + >>> affine_matrix_from_points(v0, v1) + array([[ 0.14549, 0.00062, 675.50008], + [ 0.00048, 0.14094, 53.24971], + [ 0. , 0. , 1. ]]) + >>> T = translation_matrix(numpy.random.random(3)-0.5) + >>> R = random_rotation_matrix(numpy.random.random(3)) + >>> S = scale_matrix(random.random()) + >>> M = concatenate_matrices(T, R, S) + >>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20 + >>> v0[3] = 1 + >>> v1 = numpy.dot(M, v0) + >>> v0[:3] += numpy.random.normal(0, 1e-8, 300).reshape(3, -1) + >>> M = affine_matrix_from_points(v0[:3], v1[:3]) + >>> numpy.allclose(v1, numpy.dot(M, v0)) + True + + More examples in superimposition_matrix() + + """ + v0 = numpy.array(v0, dtype=numpy.float64, copy=True) + v1 = numpy.array(v1, dtype=numpy.float64, copy=True) + + ndims = v0.shape[0] + if ndims < 2 or v0.shape[1] < ndims or v0.shape != v1.shape: + raise ValueError("input arrays are of wrong shape or type") + + # move centroids to origin + t0 = -numpy.mean(v0, axis=1) + M0 = numpy.identity(ndims+1) + M0[:ndims, ndims] = t0 + v0 += t0.reshape(ndims, 1) + t1 = -numpy.mean(v1, axis=1) + M1 = numpy.identity(ndims+1) + M1[:ndims, ndims] = t1 + v1 += t1.reshape(ndims, 1) + + if shear: + # Affine transformation + A = numpy.concatenate((v0, v1), axis=0) + u, s, vh = numpy.linalg.svd(A.T) + vh = vh[:ndims].T + B = vh[:ndims] + C = vh[ndims:2*ndims] + t = numpy.dot(C, numpy.linalg.pinv(B)) + t = numpy.concatenate((t, numpy.zeros((ndims, 1))), axis=1) + M = numpy.vstack((t, ((0.0,)*ndims) + (1.0,))) + elif usesvd or ndims != 3: + # Rigid transformation via SVD of covariance matrix + u, s, vh = numpy.linalg.svd(numpy.dot(v1, v0.T)) + # rotation matrix from SVD orthonormal bases + R = numpy.dot(u, vh) + if numpy.linalg.det(R) < 0.0: + # R does not constitute right handed system + R -= numpy.outer(u[:, ndims-1], vh[ndims-1, :]*2.0) + s[-1] *= -1.0 + # homogeneous transformation matrix + M = numpy.identity(ndims+1) + M[:ndims, :ndims] = R + else: + # Rigid transformation matrix via quaternion + # compute symmetric matrix N + xx, yy, zz = numpy.sum(v0 * v1, axis=1) + xy, yz, zx = numpy.sum(v0 * numpy.roll(v1, -1, axis=0), axis=1) + xz, yx, zy = numpy.sum(v0 * numpy.roll(v1, -2, axis=0), axis=1) + N = [[xx+yy+zz, 0.0, 0.0, 0.0], + [yz-zy, xx-yy-zz, 0.0, 0.0], + [zx-xz, xy+yx, yy-xx-zz, 0.0], + [xy-yx, zx+xz, yz+zy, zz-xx-yy]] + # quaternion: eigenvector corresponding to most positive eigenvalue + w, V = numpy.linalg.eigh(N) + q = V[:, numpy.argmax(w)] + q /= vector_norm(q) # unit quaternion + # homogeneous transformation matrix + M = quaternion_matrix(q) + + if scale and not shear: + # Affine transformation; scale is ratio of RMS deviations from centroid + v0 *= v0 + v1 *= v1 + M[:ndims, :ndims] *= math.sqrt(numpy.sum(v1) / numpy.sum(v0)) + + # move centroids back + M = numpy.dot(numpy.linalg.inv(M1), numpy.dot(M, M0)) + M /= M[ndims, ndims] + return M + + +def superimposition_matrix(v0, v1, scale=False, usesvd=True): + """Return matrix to transform given 3D point set into second point set. + + v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points. + + The parameters scale and usesvd are explained in the more general + affine_matrix_from_points function. + + The returned matrix is a similarity or Euclidean transformation matrix. + This function has a fast C implementation in transformations.c. + + >>> v0 = numpy.random.rand(3, 10) + >>> M = superimposition_matrix(v0, v0) + >>> numpy.allclose(M, numpy.identity(4)) + True + >>> R = random_rotation_matrix(numpy.random.random(3)) + >>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]] + >>> v1 = numpy.dot(R, v0) + >>> M = superimposition_matrix(v0, v1) + >>> numpy.allclose(v1, numpy.dot(M, v0)) + True + >>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20 + >>> v0[3] = 1 + >>> v1 = numpy.dot(R, v0) + >>> M = superimposition_matrix(v0, v1) + >>> numpy.allclose(v1, numpy.dot(M, v0)) + True + >>> S = scale_matrix(random.random()) + >>> T = translation_matrix(numpy.random.random(3)-0.5) + >>> M = concatenate_matrices(T, R, S) + >>> v1 = numpy.dot(M, v0) + >>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1) + >>> M = superimposition_matrix(v0, v1, scale=True) + >>> numpy.allclose(v1, numpy.dot(M, v0)) + True + >>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False) + >>> numpy.allclose(v1, numpy.dot(M, v0)) + True + >>> v = numpy.empty((4, 100, 3)) + >>> v[:, :, 0] = v0 + >>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False) + >>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0])) + True + + """ + v0 = numpy.array(v0, dtype=numpy.float64, copy=False)[:3] + v1 = numpy.array(v1, dtype=numpy.float64, copy=False)[:3] + return affine_matrix_from_points(v0, v1, shear=False, + scale=scale, usesvd=usesvd) + + +def euler_matrix(ai, aj, ak, axes='sxyz'): + """Return homogeneous rotation matrix from Euler angles and axis sequence. + + ai, aj, ak : Euler's roll, pitch and yaw angles + axes : One of 24 axis sequences as string or encoded tuple + + >>> R = euler_matrix(1, 2, 3, 'syxz') + >>> numpy.allclose(numpy.sum(R[0]), -1.34786452) + True + >>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1)) + >>> numpy.allclose(numpy.sum(R[0]), -0.383436184) + True + >>> ai, aj, ak = (4*math.pi) * (numpy.random.random(3) - 0.5) + >>> for axes in _AXES2TUPLE.keys(): + ... R = euler_matrix(ai, aj, ak, axes) + >>> for axes in _TUPLE2AXES.keys(): + ... R = euler_matrix(ai, aj, ak, axes) + + """ + try: + firstaxis, parity, repetition, frame = _AXES2TUPLE[axes] + except (AttributeError, KeyError): + _TUPLE2AXES[axes] # validation + firstaxis, parity, repetition, frame = axes + + i = firstaxis + j = _NEXT_AXIS[i+parity] + k = _NEXT_AXIS[i-parity+1] + + if frame: + ai, ak = ak, ai + if parity: + ai, aj, ak = -ai, -aj, -ak + + si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak) + ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak) + cc, cs = ci*ck, ci*sk + sc, ss = si*ck, si*sk + + M = numpy.identity(4) + if repetition: + M[i, i] = cj + M[i, j] = sj*si + M[i, k] = sj*ci + M[j, i] = sj*sk + M[j, j] = -cj*ss+cc + M[j, k] = -cj*cs-sc + M[k, i] = -sj*ck + M[k, j] = cj*sc+cs + M[k, k] = cj*cc-ss + else: + M[i, i] = cj*ck + M[i, j] = sj*sc-cs + M[i, k] = sj*cc+ss + M[j, i] = cj*sk + M[j, j] = sj*ss+cc + M[j, k] = sj*cs-sc + M[k, i] = -sj + M[k, j] = cj*si + M[k, k] = cj*ci + return M + + +def euler_from_matrix(matrix, axes='sxyz'): + """Return Euler angles from rotation matrix for specified axis sequence. + + axes : One of 24 axis sequences as string or encoded tuple + + Note that many Euler angle triplets can describe one matrix. + + >>> R0 = euler_matrix(1, 2, 3, 'syxz') + >>> al, be, ga = euler_from_matrix(R0, 'syxz') + >>> R1 = euler_matrix(al, be, ga, 'syxz') + >>> numpy.allclose(R0, R1) + True + >>> angles = (4*math.pi) * (numpy.random.random(3) - 0.5) + >>> for axes in _AXES2TUPLE.keys(): + ... R0 = euler_matrix(axes=axes, *angles) + ... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes)) + ... if not numpy.allclose(R0, R1): print(axes, "failed") + + """ + try: + firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] + except (AttributeError, KeyError): + _TUPLE2AXES[axes] # validation + firstaxis, parity, repetition, frame = axes + + i = firstaxis + j = _NEXT_AXIS[i+parity] + k = _NEXT_AXIS[i-parity+1] + + M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3] + if repetition: + sy = math.sqrt(M[i, j]*M[i, j] + M[i, k]*M[i, k]) + if sy > _EPS: + ax = math.atan2( M[i, j], M[i, k]) + ay = math.atan2( sy, M[i, i]) + az = math.atan2( M[j, i], -M[k, i]) + else: + ax = math.atan2(-M[j, k], M[j, j]) + ay = math.atan2( sy, M[i, i]) + az = 0.0 + else: + cy = math.sqrt(M[i, i]*M[i, i] + M[j, i]*M[j, i]) + if cy > _EPS: + ax = math.atan2( M[k, j], M[k, k]) + ay = math.atan2(-M[k, i], cy) + az = math.atan2( M[j, i], M[i, i]) + else: + ax = math.atan2(-M[j, k], M[j, j]) + ay = math.atan2(-M[k, i], cy) + az = 0.0 + + if parity: + ax, ay, az = -ax, -ay, -az + if frame: + ax, az = az, ax + return ax, ay, az + + +def euler_from_quaternion(quaternion, axes='sxyz'): + """Return Euler angles from quaternion for specified axis sequence. + + >>> angles = euler_from_quaternion([0.99810947, 0.06146124, 0, 0]) + >>> numpy.allclose(angles, [0.123, 0, 0]) + True + + """ + return euler_from_matrix(quaternion_matrix(quaternion), axes) + + +def quaternion_from_euler(ai, aj, ak, axes='sxyz'): + """Return quaternion from Euler angles and axis sequence. + + ai, aj, ak : Euler's roll, pitch and yaw angles + axes : One of 24 axis sequences as string or encoded tuple + + >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') + >>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) + True + + """ + try: + firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] + except (AttributeError, KeyError): + _TUPLE2AXES[axes] # validation + firstaxis, parity, repetition, frame = axes + + i = firstaxis + 1 + j = _NEXT_AXIS[i+parity-1] + 1 + k = _NEXT_AXIS[i-parity] + 1 + + if frame: + ai, ak = ak, ai + if parity: + aj = -aj + + ai /= 2.0 + aj /= 2.0 + ak /= 2.0 + ci = math.cos(ai) + si = math.sin(ai) + cj = math.cos(aj) + sj = math.sin(aj) + ck = math.cos(ak) + sk = math.sin(ak) + cc = ci*ck + cs = ci*sk + sc = si*ck + ss = si*sk + + q = numpy.empty((4, )) + if repetition: + q[0] = cj*(cc - ss) + q[i] = cj*(cs + sc) + q[j] = sj*(cc + ss) + q[k] = sj*(cs - sc) + else: + q[0] = cj*cc + sj*ss + q[i] = cj*sc - sj*cs + q[j] = cj*ss + sj*cc + q[k] = cj*cs - sj*sc + if parity: + q[j] *= -1.0 + + return q + + +def quaternion_about_axis(angle, axis): + """Return quaternion for rotation about axis. + + >>> q = quaternion_about_axis(0.123, [1, 0, 0]) + >>> numpy.allclose(q, [0.99810947, 0.06146124, 0, 0]) + True + + """ + q = numpy.array([0.0, axis[0], axis[1], axis[2]]) + qlen = vector_norm(q) + if qlen > _EPS: + q *= math.sin(angle/2.0) / qlen + q[0] = math.cos(angle/2.0) + return q + + +def quaternion_matrix(quaternion): + """Return homogeneous rotation matrix from quaternion. + + >>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0]) + >>> numpy.allclose(M, rotation_matrix(0.123, [1, 0, 0])) + True + >>> M = quaternion_matrix([1, 0, 0, 0]) + >>> numpy.allclose(M, numpy.identity(4)) + True + >>> M = quaternion_matrix([0, 1, 0, 0]) + >>> numpy.allclose(M, numpy.diag([1, -1, -1, 1])) + True + + """ + q = numpy.array(quaternion, dtype=numpy.float64, copy=True) + n = numpy.dot(q, q) + if n < _EPS: + return numpy.identity(4) + q *= math.sqrt(2.0 / n) + q = numpy.outer(q, q) + return numpy.array([ + [1.0-q[2, 2]-q[3, 3], q[1, 2]-q[3, 0], q[1, 3]+q[2, 0], 0.0], + [ q[1, 2]+q[3, 0], 1.0-q[1, 1]-q[3, 3], q[2, 3]-q[1, 0], 0.0], + [ q[1, 3]-q[2, 0], q[2, 3]+q[1, 0], 1.0-q[1, 1]-q[2, 2], 0.0], + [ 0.0, 0.0, 0.0, 1.0]]) + + +def quaternion_from_matrix(matrix, isprecise=False): + """Return quaternion from rotation matrix. + + If isprecise is True, the input matrix is assumed to be a precise rotation + matrix and a faster algorithm is used. + + >>> q = quaternion_from_matrix(numpy.identity(4), True) + >>> numpy.allclose(q, [1, 0, 0, 0]) + True + >>> q = quaternion_from_matrix(numpy.diag([1, -1, -1, 1])) + >>> numpy.allclose(q, [0, 1, 0, 0]) or numpy.allclose(q, [0, -1, 0, 0]) + True + >>> R = rotation_matrix(0.123, (1, 2, 3)) + >>> q = quaternion_from_matrix(R, True) + >>> numpy.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786]) + True + >>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0], + ... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]] + >>> q = quaternion_from_matrix(R) + >>> numpy.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611]) + True + >>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0], + ... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]] + >>> q = quaternion_from_matrix(R) + >>> numpy.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603]) + True + >>> R = random_rotation_matrix() + >>> q = quaternion_from_matrix(R) + >>> is_same_transform(R, quaternion_matrix(q)) + True + >>> R = euler_matrix(0.0, 0.0, numpy.pi/2.0) + >>> numpy.allclose(quaternion_from_matrix(R, isprecise=False), + ... quaternion_from_matrix(R, isprecise=True)) + True + + """ + M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:4, :4] + if isprecise: + q = numpy.empty((4, )) + t = numpy.trace(M) + if t > M[3, 3]: + q[0] = t + q[3] = M[1, 0] - M[0, 1] + q[2] = M[0, 2] - M[2, 0] + q[1] = M[2, 1] - M[1, 2] + else: + i, j, k = 1, 2, 3 + if M[1, 1] > M[0, 0]: + i, j, k = 2, 3, 1 + if M[2, 2] > M[i, i]: + i, j, k = 3, 1, 2 + t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3] + q[i] = t + q[j] = M[i, j] + M[j, i] + q[k] = M[k, i] + M[i, k] + q[3] = M[k, j] - M[j, k] + q *= 0.5 / math.sqrt(t * M[3, 3]) + else: + m00 = M[0, 0] + m01 = M[0, 1] + m02 = M[0, 2] + m10 = M[1, 0] + m11 = M[1, 1] + m12 = M[1, 2] + m20 = M[2, 0] + m21 = M[2, 1] + m22 = M[2, 2] + # symmetric matrix K + K = numpy.array([[m00-m11-m22, 0.0, 0.0, 0.0], + [m01+m10, m11-m00-m22, 0.0, 0.0], + [m02+m20, m12+m21, m22-m00-m11, 0.0], + [m21-m12, m02-m20, m10-m01, m00+m11+m22]]) + K /= 3.0 + # quaternion is eigenvector of K that corresponds to largest eigenvalue + w, V = numpy.linalg.eigh(K) + q = V[[3, 0, 1, 2], numpy.argmax(w)] + if q[0] < 0.0: + numpy.negative(q, q) + return q + + +def quaternion_multiply(quaternion1, quaternion0): + """Return multiplication of two quaternions. + + >>> q = quaternion_multiply([4, 1, -2, 3], [8, -5, 6, 7]) + >>> numpy.allclose(q, [28, -44, -14, 48]) + True + + """ + w0, x0, y0, z0 = quaternion0 + w1, x1, y1, z1 = quaternion1 + return numpy.array([-x1*x0 - y1*y0 - z1*z0 + w1*w0, + x1*w0 + y1*z0 - z1*y0 + w1*x0, + -x1*z0 + y1*w0 + z1*x0 + w1*y0, + x1*y0 - y1*x0 + z1*w0 + w1*z0], dtype=numpy.float64) + + +def quaternion_conjugate(quaternion): + """Return conjugate of quaternion. + + >>> q0 = random_quaternion() + >>> q1 = quaternion_conjugate(q0) + >>> q1[0] == q0[0] and all(q1[1:] == -q0[1:]) + True + + """ + q = numpy.array(quaternion, dtype=numpy.float64, copy=True) + numpy.negative(q[1:], q[1:]) + return q + + +def quaternion_inverse(quaternion): + """Return inverse of quaternion. + + >>> q0 = random_quaternion() + >>> q1 = quaternion_inverse(q0) + >>> numpy.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0]) + True + + """ + q = numpy.array(quaternion, dtype=numpy.float64, copy=True) + numpy.negative(q[1:], q[1:]) + return q / numpy.dot(q, q) + + +def quaternion_real(quaternion): + """Return real part of quaternion. + + >>> quaternion_real([3, 0, 1, 2]) + 3.0 + + """ + return float(quaternion[0]) + + +def quaternion_imag(quaternion): + """Return imaginary part of quaternion. + + >>> quaternion_imag([3, 0, 1, 2]) + array([ 0., 1., 2.]) + + """ + return numpy.array(quaternion[1:4], dtype=numpy.float64, copy=True) + + +def quaternion_slerp(quat0, quat1, fraction, spin=0, shortestpath=True): + """Return spherical linear interpolation between two quaternions. + + >>> q0 = random_quaternion() + >>> q1 = random_quaternion() + >>> q = quaternion_slerp(q0, q1, 0) + >>> numpy.allclose(q, q0) + True + >>> q = quaternion_slerp(q0, q1, 1, 1) + >>> numpy.allclose(q, q1) + True + >>> q = quaternion_slerp(q0, q1, 0.5) + >>> angle = math.acos(numpy.dot(q0, q)) + >>> numpy.allclose(2, math.acos(numpy.dot(q0, q1)) / angle) or \ + numpy.allclose(2, math.acos(-numpy.dot(q0, q1)) / angle) + True + + """ + q0 = unit_vector(quat0[:4]) + q1 = unit_vector(quat1[:4]) + if fraction == 0.0: + return q0 + elif fraction == 1.0: + return q1 + d = numpy.dot(q0, q1) + if abs(abs(d) - 1.0) < _EPS: + return q0 + if shortestpath and d < 0.0: + # invert rotation + d = -d + numpy.negative(q1, q1) + angle = math.acos(d) + spin * math.pi + if abs(angle) < _EPS: + return q0 + isin = 1.0 / math.sin(angle) + q0 *= math.sin((1.0 - fraction) * angle) * isin + q1 *= math.sin(fraction * angle) * isin + q0 += q1 + return q0 + + +def random_quaternion(rand=None): + """Return uniform random unit quaternion. + + rand: array like or None + Three independent random variables that are uniformly distributed + between 0 and 1. + + >>> q = random_quaternion() + >>> numpy.allclose(1, vector_norm(q)) + True + >>> q = random_quaternion(numpy.random.random(3)) + >>> len(q.shape), q.shape[0]==4 + (1, True) + + """ + if rand is None: + rand = numpy.random.rand(3) + else: + assert len(rand) == 3 + r1 = numpy.sqrt(1.0 - rand[0]) + r2 = numpy.sqrt(rand[0]) + pi2 = math.pi * 2.0 + t1 = pi2 * rand[1] + t2 = pi2 * rand[2] + return numpy.array([numpy.cos(t2)*r2, numpy.sin(t1)*r1, + numpy.cos(t1)*r1, numpy.sin(t2)*r2]) + + +def random_rotation_matrix(rand=None): + """Return uniform random rotation matrix. + + rand: array like + Three independent random variables that are uniformly distributed + between 0 and 1 for each returned quaternion. + + >>> R = random_rotation_matrix() + >>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4)) + True + + """ + return quaternion_matrix(random_quaternion(rand)) + + +class Arcball(object): + """Virtual Trackball Control. + + >>> ball = Arcball() + >>> ball = Arcball(initial=numpy.identity(4)) + >>> ball.place([320, 320], 320) + >>> ball.down([500, 250]) + >>> ball.drag([475, 275]) + >>> R = ball.matrix() + >>> numpy.allclose(numpy.sum(R), 3.90583455) + True + >>> ball = Arcball(initial=[1, 0, 0, 0]) + >>> ball.place([320, 320], 320) + >>> ball.setaxes([1, 1, 0], [-1, 1, 0]) + >>> ball.constrain = True + >>> ball.down([400, 200]) + >>> ball.drag([200, 400]) + >>> R = ball.matrix() + >>> numpy.allclose(numpy.sum(R), 0.2055924) + True + >>> ball.next() + + """ + def __init__(self, initial=None): + """Initialize virtual trackball control. + + initial : quaternion or rotation matrix + + """ + self._axis = None + self._axes = None + self._radius = 1.0 + self._center = [0.0, 0.0] + self._vdown = numpy.array([0.0, 0.0, 1.0]) + self._constrain = False + if initial is None: + self._qdown = numpy.array([1.0, 0.0, 0.0, 0.0]) + else: + initial = numpy.array(initial, dtype=numpy.float64) + if initial.shape == (4, 4): + self._qdown = quaternion_from_matrix(initial) + elif initial.shape == (4, ): + initial /= vector_norm(initial) + self._qdown = initial + else: + raise ValueError("initial not a quaternion or matrix") + self._qnow = self._qpre = self._qdown + + def place(self, center, radius): + """Place Arcball, e.g. when window size changes. + + center : sequence[2] + Window coordinates of trackball center. + radius : float + Radius of trackball in window coordinates. + + """ + self._radius = float(radius) + self._center[0] = center[0] + self._center[1] = center[1] + + def setaxes(self, *axes): + """Set axes to constrain rotations.""" + if axes is None: + self._axes = None + else: + self._axes = [unit_vector(axis) for axis in axes] + + @property + def constrain(self): + """Return state of constrain to axis mode.""" + return self._constrain + + @constrain.setter + def constrain(self, value): + """Set state of constrain to axis mode.""" + self._constrain = bool(value) + + def down(self, point): + """Set initial cursor window coordinates and pick constrain-axis.""" + self._vdown = arcball_map_to_sphere(point, self._center, self._radius) + self._qdown = self._qpre = self._qnow + if self._constrain and self._axes is not None: + self._axis = arcball_nearest_axis(self._vdown, self._axes) + self._vdown = arcball_constrain_to_axis(self._vdown, self._axis) + else: + self._axis = None + + def drag(self, point): + """Update current cursor window coordinates.""" + vnow = arcball_map_to_sphere(point, self._center, self._radius) + if self._axis is not None: + vnow = arcball_constrain_to_axis(vnow, self._axis) + self._qpre = self._qnow + t = numpy.cross(self._vdown, vnow) + if numpy.dot(t, t) < _EPS: + self._qnow = self._qdown + else: + q = [numpy.dot(self._vdown, vnow), t[0], t[1], t[2]] + self._qnow = quaternion_multiply(q, self._qdown) + + def next(self, acceleration=0.0): + """Continue rotation in direction of last drag.""" + q = quaternion_slerp(self._qpre, self._qnow, 2.0+acceleration, False) + self._qpre, self._qnow = self._qnow, q + + def matrix(self): + """Return homogeneous rotation matrix.""" + return quaternion_matrix(self._qnow) + + +def arcball_map_to_sphere(point, center, radius): + """Return unit sphere coordinates from window coordinates.""" + v0 = (point[0] - center[0]) / radius + v1 = (center[1] - point[1]) / radius + n = v0*v0 + v1*v1 + if n > 1.0: + # position outside of sphere + n = math.sqrt(n) + return numpy.array([v0/n, v1/n, 0.0]) + else: + return numpy.array([v0, v1, math.sqrt(1.0 - n)]) + + +def arcball_constrain_to_axis(point, axis): + """Return sphere point perpendicular to axis.""" + v = numpy.array(point, dtype=numpy.float64, copy=True) + a = numpy.array(axis, dtype=numpy.float64, copy=True) + v -= a * numpy.dot(a, v) # on plane + n = vector_norm(v) + if n > _EPS: + if v[2] < 0.0: + numpy.negative(v, v) + v /= n + return v + if a[2] == 1.0: + return numpy.array([1.0, 0.0, 0.0]) + return unit_vector([-a[1], a[0], 0.0]) + + +def arcball_nearest_axis(point, axes): + """Return axis, which arc is nearest to point.""" + point = numpy.array(point, dtype=numpy.float64, copy=False) + nearest = None + mx = -1.0 + for axis in axes: + t = numpy.dot(arcball_constrain_to_axis(point, axis), point) + if t > mx: + nearest = axis + mx = t + return nearest + + +# epsilon for testing whether a number is close to zero +_EPS = numpy.finfo(float).eps * 4.0 + +# axis sequences for Euler angles +_NEXT_AXIS = [1, 2, 0, 1] + +# map axes strings to/from tuples of inner axis, parity, repetition, frame +_AXES2TUPLE = { + 'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0), + 'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0), + 'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0), + 'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0), + 'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1), + 'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1), + 'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1), + 'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)} + +_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items()) + + +def vector_norm(data, axis=None, out=None): + """Return length, i.e. Euclidean norm, of ndarray along axis. + + >>> v = numpy.random.random(3) + >>> n = vector_norm(v) + >>> numpy.allclose(n, numpy.linalg.norm(v)) + True + >>> v = numpy.random.rand(6, 5, 3) + >>> n = vector_norm(v, axis=-1) + >>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2))) + True + >>> n = vector_norm(v, axis=1) + >>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1))) + True + >>> v = numpy.random.rand(5, 4, 3) + >>> n = numpy.empty((5, 3)) + >>> vector_norm(v, axis=1, out=n) + >>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1))) + True + >>> vector_norm([]) + 0.0 + >>> vector_norm([1]) + 1.0 + + """ + data = numpy.array(data, dtype=numpy.float64, copy=True) + if out is None: + if data.ndim == 1: + return math.sqrt(numpy.dot(data, data)) + data *= data + out = numpy.atleast_1d(numpy.sum(data, axis=axis)) + numpy.sqrt(out, out) + return out + else: + data *= data + numpy.sum(data, axis=axis, out=out) + numpy.sqrt(out, out) + + +def unit_vector(data, axis=None, out=None): + """Return ndarray normalized by length, i.e. Euclidean norm, along axis. + + >>> v0 = numpy.random.random(3) + >>> v1 = unit_vector(v0) + >>> numpy.allclose(v1, v0 / numpy.linalg.norm(v0)) + True + >>> v0 = numpy.random.rand(5, 4, 3) + >>> v1 = unit_vector(v0, axis=-1) + >>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=2)), 2) + >>> numpy.allclose(v1, v2) + True + >>> v1 = unit_vector(v0, axis=1) + >>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=1)), 1) + >>> numpy.allclose(v1, v2) + True + >>> v1 = numpy.empty((5, 4, 3)) + >>> unit_vector(v0, axis=1, out=v1) + >>> numpy.allclose(v1, v2) + True + >>> list(unit_vector([])) + [] + >>> list(unit_vector([1])) + [1.0] + + """ + if out is None: + data = numpy.array(data, dtype=numpy.float64, copy=True) + if data.ndim == 1: + data /= math.sqrt(numpy.dot(data, data)) + return data + else: + if out is not data: + out[:] = numpy.array(data, copy=False) + data = out + length = numpy.atleast_1d(numpy.sum(data*data, axis)) + numpy.sqrt(length, length) + if axis is not None: + length = numpy.expand_dims(length, axis) + data /= length + if out is None: + return data + + +def random_vector(size): + """Return array of random doubles in the half-open interval [0.0, 1.0). + + >>> v = random_vector(10000) + >>> numpy.all(v >= 0) and numpy.all(v < 1) + True + >>> v0 = random_vector(10) + >>> v1 = random_vector(10) + >>> numpy.any(v0 == v1) + False + + """ + return numpy.random.random(size) + + +def vector_product(v0, v1, axis=0): + """Return vector perpendicular to vectors. + + >>> v = vector_product([2, 0, 0], [0, 3, 0]) + >>> numpy.allclose(v, [0, 0, 6]) + True + >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] + >>> v1 = [[3], [0], [0]] + >>> v = vector_product(v0, v1) + >>> numpy.allclose(v, [[0, 0, 0, 0], [0, 0, 6, 6], [0, -6, 0, -6]]) + True + >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] + >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] + >>> v = vector_product(v0, v1, axis=1) + >>> numpy.allclose(v, [[0, 0, 6], [0, -6, 0], [6, 0, 0], [0, -6, 6]]) + True + + """ + return numpy.cross(v0, v1, axis=axis) + + +def angle_between_vectors(v0, v1, directed=True, axis=0): + """Return angle between vectors. + + If directed is False, the input vectors are interpreted as undirected axes, + i.e. the maximum angle is pi/2. + + >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3]) + >>> numpy.allclose(a, math.pi) + True + >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False) + >>> numpy.allclose(a, 0) + True + >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] + >>> v1 = [[3], [0], [0]] + >>> a = angle_between_vectors(v0, v1) + >>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532]) + True + >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] + >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] + >>> a = angle_between_vectors(v0, v1, axis=1) + >>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532]) + True + + """ + v0 = numpy.array(v0, dtype=numpy.float64, copy=False) + v1 = numpy.array(v1, dtype=numpy.float64, copy=False) + dot = numpy.sum(v0 * v1, axis=axis) + dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis) + return numpy.arccos(dot if directed else numpy.fabs(dot)) + + +def inverse_matrix(matrix): + """Return inverse of square transformation matrix. + + >>> M0 = random_rotation_matrix() + >>> M1 = inverse_matrix(M0.T) + >>> numpy.allclose(M1, numpy.linalg.inv(M0.T)) + True + >>> for size in range(1, 7): + ... M0 = numpy.random.rand(size, size) + ... M1 = inverse_matrix(M0) + ... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print(size) + + """ + return numpy.linalg.inv(matrix) + + +def concatenate_matrices(*matrices): + """Return concatenation of series of transformation matrices. + + >>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5 + >>> numpy.allclose(M, concatenate_matrices(M)) + True + >>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T)) + True + + """ + M = numpy.identity(4) + for i in matrices: + M = numpy.dot(M, i) + return M + + +def is_same_transform(matrix0, matrix1): + """Return True if two matrices perform same transformation. + + >>> is_same_transform(numpy.identity(4), numpy.identity(4)) + True + >>> is_same_transform(numpy.identity(4), random_rotation_matrix()) + False + + """ + matrix0 = numpy.array(matrix0, dtype=numpy.float64, copy=True) + matrix0 /= matrix0[3, 3] + matrix1 = numpy.array(matrix1, dtype=numpy.float64, copy=True) + matrix1 /= matrix1[3, 3] + return numpy.allclose(matrix0, matrix1) + + +def _import_module(name, package=None, warn=True, prefix='_py_', ignore='_'): + """Try import all public attributes from module into global namespace. + + Existing attributes with name clashes are renamed with prefix. + Attributes starting with underscore are ignored by default. + + Return True on successful import. + + """ + import warnings + from importlib import import_module + try: + if not package: + module = import_module(name) + else: + module = import_module('.' + name, package=package) + except ImportError: + if warn: + warnings.warn("failed to import module %s" % name) + else: + for attr in dir(module): + if ignore and attr.startswith(ignore): + continue + if prefix: + if attr in globals(): + globals()[prefix + attr] = globals()[attr] + elif warn: + warnings.warn("no Python implementation of " + attr) + globals()[attr] = getattr(module, attr) + return True + + +# _import_module('_transformations') + +if __name__ == "__main__": + import doctest + import random # used in doctests + numpy.set_printoptions(suppress=True, precision=5) + doctest.testmod() diff --git a/CameraNetwork/utils.py b/CameraNetwork/utils.py index 447a89f..7a41e29 100755 --- a/CameraNetwork/utils.py +++ b/CameraNetwork/utils.py @@ -240,15 +240,13 @@ def upload_file_to_proxy( return scp_proc -def save_camera_data( - general_settings_path, - capture_settings_path, - camera_settings=None, - capture_settings=None): +def save_camera_data(general_settings_path, capture_settings_path, + camera_settings=None, capture_settings=None): if camera_settings is not None: with open(general_settings_path, 'wb') as f: json.dump(camera_settings, f, sort_keys=True, indent=4, separators=(',', ': ')) + if capture_settings is not None: with open(capture_settings_path, 'wb') as f: json.dump(capture_settings, f, sort_keys=True, indent=4, separators=(',', ': ')) @@ -608,15 +606,19 @@ def f(angles, calculated_directions, measured_directions): def find_camera_orientation(calculated_directions, measured_directions): """ - Find the rotation of the camera based on the coordinates of a celestail object - The input is two sets. The first is x,y image coordinates of the object (taken - from several images). The second set is the azimuth/altitude coordinates of the - celestial object (in Horizontal coordinate system). - The output is the rotation matrix of the camera. The rotation matrix is converts - between world coordinates to camera coordinates, where the world coordinates - are centered in camera, z is in the zenith and the x-axis points to the North. - The inner calibration of the camera is given as a function that converts - between the image coordinates and the camera coordinates. + Find the rotation of the camera based on the coordinates of a celestial object. + + The input includes two sets: + 1. `calculated_directions` - is x,y image coordinates of the object (taken from several images). + 2. `measured_directions` - is the azimuth/altitude coordinates of the celestial object (in Horizontal coordinate system). + + The output is the rotation matrix of the camera. + The rotation matrix, converts between world coordinates to camera coordinates, + where the world coordinates are centered in camera, z is in the zenith and the x-axis + points to the North. + + The inner calibration of the camera is given as a function that + converts between the image coordinates and the camera coordinates. Args: calculated_directions (array like): The reference direction of the celestial diff --git a/CameraNetwork/visualization.py b/CameraNetwork/visualization.py index eab1e53..d4a22a6 100644 --- a/CameraNetwork/visualization.py +++ b/CameraNetwork/visualization.py @@ -33,25 +33,28 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -from __future__ import division -import cv2 -import matplotlib.mlab as ml -import numpy as np -import pymap3d - - -def loadMapData(): - """Load height data for map visualization.""" - - path1 = r"..\data\reconstructions\N32E034.hgt" - path2 = r"..\data\reconstructions\N32E035.hgt" - path3 = r"..\data\reconstructions\haifa_map.jpg" - - with open(path1) as hgt_data: - hgt1 = np.fromfile(hgt_data, np.dtype('>i2')).reshape((1201, 1201))[:1200, :1200] - with open(path2) as hgt_data: - hgt2 = np.fromfile(hgt_data, np.dtype('>i2')).reshape((1201, 1201))[:1200, :1200] - hgt = np.hstack((hgt1, hgt2)).astype(np.float32) +from __future__ import division +import cv2 +import matplotlib.mlab as ml +import numpy as np +import pymap3d +import os +import platform + +def loadMapData(): + """Load height data for map visualization.""" + + relpath = os.path.dirname(os.path.realpath(__file__)) + path1 = os.path.abspath(os.path.join(relpath, r'..', r'data', r'reconstructions', r'N32E034.hgt')) + path2 = os.path.abspath(os.path.join(relpath, r'..', r'data', r'reconstructions', r'N32E035.hgt')) + path3 = os.path.abspath(os.path.join(relpath, r'..', r'data', r'reconstructions', r'haifa_map.jpg')) + + + with open(path1) as hgt_data: + hgt1 = np.fromfile(hgt_data, np.dtype('>i2')).reshape((1201, 1201))[:1200, :1200] + with open(path2) as hgt_data: + hgt2 = np.fromfile(hgt_data, np.dtype('>i2')).reshape((1201, 1201))[:1200, :1200] + hgt = np.hstack((hgt1, hgt2)).astype(np.float32) lon, lat = np.meshgrid(np.linspace(34, 36, 2400, endpoint=False), np.linspace(32, 33, 1200, endpoint=False)[::-1]) map_texture = cv2.cvtColor(cv2.imread(path3), cv2.COLOR_BGR2RGB) @@ -60,40 +63,40 @@ def loadMapData(): lat[100:400, 1100:1400], lon[100:400, 1100:1400], \ hgt[100:400, 1100:1400], map_texture[100:400, 1100:1400, ...] - -def calcSeaMask(hgt_array): - """Calc a masking to the sea. - - Note: - This code is uses empirical magic number, and should be adjusted if - grid sizes change. - """ - - hgt_u8 = (255 * (hgt_array - hgt_array.min()) / (hgt_array.max() - hgt_array.min())).astype(np.uint8) - - mask = (hgt_u8 > 7).astype(np.uint8)*255 - kernel_open = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) - mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel_open) - kernel_close = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (16, 16)) - mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel_close) - mask[250:, 250:] = 255 - - return mask < 255 - - -def convertMapData(lat, lon, hgt, map_texture, lat0=32.775776, lon0=35.024963, alt0=229): - """Convert lat/lon/height data to grid data.""" - - n, e, d = pymap3d.geodetic2ned( - lat, lon, hgt, - lat0=lat0, lon0=lon0, h0=alt0) - - x, y, z = e, n, -d - - xi = np.linspace(-10000, 10000, 300) - yi = np.linspace(-10000, 10000, 300) - X, Y = np.meshgrid(xi, yi) - + +def calcSeaMask(hgt_array): + """Calc a masking to the sea. + + Note: + This code is uses empirical magic number, and should be adjusted if + grid sizes change. + """ + + hgt_u8 = (255 * (hgt_array - hgt_array.min()) / (hgt_array.max() - hgt_array.min())).astype(np.uint8) + + mask = (hgt_u8 > 7).astype(np.uint8)*255 + kernel_open = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3)) + mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel_open) + kernel_close = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (16, 16)) + mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel_close) + mask[250:, 250:] = 255 + + return mask < 255 + + +def convertMapData(lat, lon, hgt, map_texture, lat0=32.775776, lon0=35.024963, alt0=229): + """Convert lat/lon/height data to grid data.""" + + n, e, d = pymap3d.geodetic2ned( + lat, lon, hgt, + lat0=lat0, lon0=lon0, h0=alt0) + + x, y, z = e, n, -d + + xi = np.linspace(-10000, 10000, 300) + yi = np.linspace(-10000, 10000, 300) + X, Y = np.meshgrid(xi, yi) + Z = ml.griddata(y.flatten(), x.flatten(), z.flatten(), yi, xi, interp='linear') R = ml.griddata(y.flatten(), x.flatten(), map_texture[..., 0].flatten(), yi, xi, interp='linear') G = ml.griddata(y.flatten(), x.flatten(), map_texture[..., 1].flatten(), yi, xi, interp='linear') diff --git a/Image_pipeline.md b/Image_pipeline.md new file mode 100644 index 0000000..1f6c741 --- /dev/null +++ b/Image_pipeline.md @@ -0,0 +1,214 @@ +# Image pipeline of *cameranetwork* +This document describes the operations that are being done on images, +and prepares the inputs required to run 3D reconstructions. + +## Prepossessing on board: +1. [Image capturing]('CameraNetwork/controller.py#L1578-1649'): multi shot averaging (for SNR enhancement), +and for several exposure times (for HDR). +2. [Camera prepossessing]('CameraNetwork/controller.py#L1388-1527'): dark image subtraction, normalization (according to fisheye model), +HDR calculation, and vignetting correction. + >Note: This function is called from [seekImageArray()]('CameraNetwork/controller.py#1272), and [handle_array()]('CameraNetwork/controller.py#1221), +that means that the saved images are before prepossessing, and one needs to call one of these in order to apply the prepossessing. + +## Prepossessing using GUI: +### 1. Masking & Space carving: +1. 2D *grabcut*: + + - Choose under `Arrays` tab: `view settings` -> `space carving`. + + - It creates a cloud mask based on dilation operation (Applied by a maximum filter). + + - This operation is done in [ArrayModel._update_cloud_weights()](https://github.com/Addalin/cameranetwork/blob/f26cdd785dabfc6f8d217a0e9b16fa1870d26fa9/CameraNetwork/gui/main.py#L954) +2. 2D *sunshader*: + + - choose under `Arrays` tab: `view settings` -> `sun shader`. + + - It creates a sunshader mask based on *erosion* operation Since the mask is an inversion of the sunshader.) + + - This is done in [calcSunshaderMask()](https://github.com/Addalin/cameranetwork/blob/945e9e8519216d2bd8a75afa6e650367d8f7ee88/CameraNetwork/image_utils.py#L544). + +3. 2D *ROI*: + + - Choose under `Arrays` tab: `view settings` -> `Widgets` -> `show ROI`. + + - This is a rectangular mask that determines what is the relevant area in the image that observes on the volume of interest. +Currently, it is manually set. + + - Choosing the option `Show Grid` presents the inspected volume (on the map shown as a blue cube) and it's voxel as grid projection +(red scatter plot on each of the images), thus helps to set the ROI. + + - The ROI can be uploaded from earlier sessions or saved (as .pkl file). + +4. 2D and 3D space carving: + - This operation is done in [Map3dModel.do_space_carving()](https://github.com/Addalin/cameranetwork/blob/19efb5bbf0350d6cbd3b6d01efaaa08347b15327/CameraNetwork/gui/main.py#L317) + +Finally, [exportData()](https://github.com/Addalin/cameranetwork/blob/02f1e7f8c0f7d88b9e603daf7ddb0b6c55a8f237/CameraNetwork/gui/main.py#L1807-L1895) +saves space_carve.pkl and call export to shdom. + +[exportToShdom()](https://github.com/Addalin/cameranetwork/blob/c85e88bd0cf35bbd095744e2b2dc92600eb6e0c5/CameraNetwork/export.py#L51-L168): +includes final masking of ROI and sunshader, and it saves the sun mask separately. +##### Questions regarding space carving: +3. what is the difference between a mask that is saved to the space_carve.pkl? + + +#### Notes and questions regarding ROI: +1. In the class [image_analysis](https://github.com/Addalin/cameranetwork/blob/994af1ad6f7d465ec5bff38d3ca22e338225e9fe/CameraNetwork/gui/image_analysis.py#L129-L228), +there exist the following objects: +*"ROI"* object is based on [a generic region-of-interest widget](http://www.pyqtgraph.org/documentation/graphicsItems/roi.html#pyqtgraph.ROI). +The projected grid, *"grid_scatter"* is of [ScatterPlotItem](http://www.pyqtgraph.org/documentation/graphicsItems/scatterplotitem.html#pyqtgraph.ScatterPlotItem). +The *"mask_ROI"* is of [pg.PolyLineROI](http://www.pyqtgraph.org/documentation/graphicsItems/roi.html#pyqtgraph.PolyLineROI). + What is the relation between ROI and ROI_mask? +Which of the objects is being used in the final mask calculation of the image? + +2. When [drawing the camera](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L266-L315) there is an option to add the drow of + ["roi_mesh"](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L301-L310), the 3D projection of cameras' ROIs. + Currently, it is not visually clear and it seems that these objects are not perfectly calculated on the 2D ROIs. + It requires a farther investigation. E.g. how and when the 3D mesh is calculated? + Also, maybe we need to update the mesh visualization of ROI in 3D. + +3. ***TODO*** Find a way to calculate the ROI automatically based on the grid projection? +### 2. Extrinsic calibration +This process is done according to sun position and sunshader. +The process should apply for a sunny day having clear sky, and requires two steps: +1. [handle_sunshader_scan()](https://github.com/Addalin/cameranetwork/blob/4f6a0b01111725799e6796dbf206f624a99c231b/CameraNetwork/server.py#L1066-L1088) + + - calculates sun position on the image plane (`measured_positions`), sun shader angle, the color value, and then save all to `sun_positions.csv`(under sun_positions folder). + + - This process is done every ~6 min. +2. [handle_extrinsic()](https://github.com/Addalin/cameranetwork/blob/3552f2453f3d42942ae6f90c2245b9ccb7c3dbce/CameraNetwork/controller.py#L965-L1070) - + + - loads `measured_positions` from `sun_positions.csv` +and calculates the `measured_directions` according to the fisheye model (undistortion) +on a unit sphere. + + - The fisheye model is pre-determined during [intrinsic calibration process](https://github.com/Addalin/cameranetwork/blob/3552f2453f3d42942ae6f90c2245b9ccb7c3dbce/CameraNetwork/controller.py#L888-L901). + + - Using the measurements times in `sun_positions.csv` and *ephem*, the function calculates sun directions `calculated_directions`. +And then estimates camera orientation, by doing fit of `measured_directions` to `calculated_directions`. + + - This process gives as well the rotation matrix *R* (camera-to-world transform ). + +To apply the extrinsic calibration from the GUI: + + - `severs` -> + `choose camera` -> `Exrinsic` tab -> + `extrinsic calibrate` + - Also saves the extrinsic_data.npy in camera folder. + + +To save all cameras extrinsic calibration: + + - `Arrays` --> `Save Extrinsic` + + - Saves in a specific day of captured_images folder. + +Graphical Result: + +![extrinsic](docs/source/images/snapshot_extrinsic.png) + +- See a mathematical description, see p.49-50 in Amit's thesis, section: "Self-Calibration in a Camera Network". + +### 3. Radiometric calibration: +To perform radiometric calibration with a sunphotometer, +the camera should be close to the sunphotometer, and the measurements should be done in a clear sky day. + +To get the sunphotometer measurements: + +1. Download files from NASA's [AERONET site](https://aeronet.gsfc.nasa.gov/cgi-bin/webtool_inv_v3?stage=3®ion=Middle_East&state=Israel&site=Technion_Haifa_IL&place_code=10&if_polarized=0). + + - All the current files can be found under `.../data/aeronet`. + + - Some manipulation, such as deleting first rows, might be needed for new data. + + - The meaning of numbers and measurements can be found [here](https://aeronet.gsfc.nasa.gov/new_web/units.html). + + - Specifically: irradiance sunphotometer units are ![formula](https://render.githubusercontent.com/render/math?math=uW/cm^2/sr/nm). + +2. The function [handle_radiometric()](https://github.com/Addalin/cameranetwork/blob/3552f2453f3d42942ae6f90c2245b9ccb7c3dbce/CameraNetwork/controller.py#L1095-L1178): + + - reads the sunphotometer measurements according to 3 channels at the requested day and hour. + + - Then estimates the location of the pixel on the image plane corresponding to Almucantar measurement angles. + + - Then the radiometric fit is estimated between sunphotometer measurements to camera samples. + +3. The radiometric results are saved to radiometric.pkl under the camera's folder. + +For example: + +![Radiometric](docs/source/images/radiometric_calibration.png) + +##### Questions regarding radiometric calibration: +1. What are the final conversion units? +2. What inputs/changes are required for a new experiment? + +### 4. 3D grid and space curving: +The [geographic coordinate systems](https://en.wikipedia.org/wiki/Geographic_coordinate_system) that are used here are: +1. The ECEF (earth-centered, earth-fixed frame) is the common 3D grid that is being used for moving the point-of-view (the observing camera) around the grid conveniently according to cameras' location (Latitude (\phi), longitude (\lambda),X_ecef,Y_ecef,Z_ecef). + +![ECEF](docs/source/images/ECEF.png) + +2. Local tangent plane coordinates (LTP). The NEU (North East Up) uses grid (X,Y,Z) as follows: + ![ECEF2ENU](docs/source/images/ECEF2ENU.png) + +See their definitions in the project [here](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L1393-L1420). +image of the relation between both coordinates systems: + +![ECEF_ENU](docs/source/images/ECEF_ENU_Longitude_Latitude_relationships.png) + +In *cameranetwork*, the NED (North East Down) grid (X,Y,-Z) convention, is used for visualization and reconstruction grid. + + +See their definitions in the project [here](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L1393-L1420). + +There are several conversion processes that are being done: + +1. [ProjectGrid()](https://github.com/Addalin/cameranetwork/blob/fa7d2b2f29d5217cdc2b216ae55d147393e9db0d/CameraNetwork/image_utils.py#L615-L645) - Projecting the 3D grid of the interest volume, onto image plane. Which uses ecef2ned in [projectECEF()](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L881-L933). +The 3D NED grid is of size 12 X 12 X 10 [km^3], having 81 X 81 X 121 voxels, each voxel size is of 150 X 150 X 100 [m^3]. +The 3D grid is projected to a 2D grid on the image plane, shown as red dots on image plane). +This is done when choosing: `View settings`-->`Widgets`--> `show grid`. + + +![roi_grid_los_and_settings](docs/source/images/roi_grid_los_and_settings.png) + + +This method is also being used when computing the [space carve](https://github.com/Addalin/cameranetwork/blob/fa7d2b2f29d5217cdc2b216ae55d147393e9db0d/CameraNetwork/image_utils.py#L738-L810) score per each view. +This is done when choosing in the map view `Space carving`--> `Update` --> `Show space carving`. + +![space_carving](docs/source/images/space_carving.png) + +Another usage of this method is when applying [Update LOS](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L620-L667). Ths function converts the also uses +LOS of a single image to the epipolar lines on all other images. + + +![camera_array-los1](docs/source/images/camera_array.png) + +To change the line of sight, simply press inside any one of the images. The others should update automatically. + +![los2](docs/source/images/los2.png) + +The change should also be visible in the map if `show LOS` has been enabled. + +The 3D grid and ROI can also be seen in the map. + +![map_los_roi_grid](docs/source/images/map_los_roi_grid.png) + + +2. The main process [do_space_carving()](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L317-L337) of the 3D grid, calls visual hall calculation, using a process pool. +[calc_visual_hull()](https://github.com/Addalin/cameranetwork/blob/2583c47e52d937ba70c5d7f9293d970c0fcba428/CameraNetwork/image_utils.py#L655-L739) runs space carve per each view separately, and collects their results. + +(// TODOL add snapshot and explanation here.). + +[space_carve_view()](https://github.com/Addalin/cameranetwork/blob/fa7d2b2f29d5217cdc2b216ae55d147393e9db0d/CameraNetwork/image_utils.py#L739-L810), projects the the 3D grid onto the corresponding image plane. +This process is done according to the number of perturbations chosen by the user. The final `grid_score` is the mean value throughout the perturbations, for the voxels that are seen from this view. + +// TODOL add snapshot and explanation here.). + +The `grid score` is based on the `cloud_weights`. This is done at [_update_cloud_weights()](https://github.com/Addalin/cameranetwork/blob/c69dda2adc041dc2dc98660b34e57769213f23a9/CameraNetwork/gui/main.py#L936-L985), on the 2D image plane (see section *Computation of cloud scores maps*, p.23, Amit's Aides thesis). + + +Following that, [calc_visual_hull()](https://github.com/Addalin/cameranetwork/blob/2583c47e52d937ba70c5d7f9293d970c0fcba428/CameraNetwork/image_utils.py#L655-L739), collects scores from all servers/ subprocesses. + +### TODO: Other issues to cover regarding image pipeline: +1. Calculate pixels phase function.? +2. Intrinsic calibration. diff --git a/README.md b/README.md index 8702455..695eeb6 100644 --- a/README.md +++ b/README.md @@ -3,27 +3,53 @@ CameraNetwork Code for running and analyzing the Camera Network -Latest version can be downloaded from [bitbucket](http://bitbucket.org/amitibo/CameraNetwork_git). +Latest version can be downloaded from [github](https://github.com/Addalin/cameranetwork.git). + +[Installation Instructions](docs/source/install.rst) + +[Usage Instructions](docs/source/usage.rst) Documentation ------------- +[![Documentation Status](https://readthedocs.org/projects/camera-network/badge/?version=latest)](https://camera-network.readthedocs.io/en/latest/?badge=latest) -Documentation is provided using [sphinx](http://www.sphinx-doc.org/). -To compile the documentation: +Documentation is provided using [sphinx](http://www.sphinx-doc.org/). +To compile the documentation:
+Make sure Sphinx is installed `pip install -U Sphinx` + +Navigate to document folder `cd docs`,
+then generate source files -
+Windows: + ```sh + sphinx-apidoc -f -o source ..\CameraNetwork + ``` + Linux: + ```sh + sphinx-apidoc -f -o source/ ../CameraNetwork + ``` + Finally, Create html document `make html`
+ or, for PDF: + ```sh + sudo apt-get install texlive-latex-recommended texlive-fonts-recommended texlive-latex-extra latexmk texlive-luatex texlive-xetex + ``` + then ```sh -> cd docs -> sphinx-apidoc -f -o source ..\CameraNetwork -> make html -> make latexpdf + make latexpdf ``` +To view the docs navigate to `/docs/build/latex/CameraNetwork.pdf` or run `/docs/build/html/index.html` Author ------ Amit Aides +Contributors +------ +Adi Vainiger, +Omer Shubi + License ------- -Please see the LICENSE file for details on copying and usage. +Please see the [LICENSE](LICENSE.md) file for details on copying and usage. diff --git a/cn_client_amit.yml b/cn_client_amit.yml new file mode 100644 index 0000000..bccc793 --- /dev/null +++ b/cn_client_amit.yml @@ -0,0 +1,348 @@ +name: cn_client_amit +channels: + - defaults + - free + - conda-forge +dependencies: + - beautifulsoup4=4.5.1 + - cython=0.24.1 +# - enaml=0.10 + - ephem=3.7.6.0 + - futures=3.0.5 + - ipython=5.4.1 + - joblib=0.10.3 + - matplotlib=2.0.0 + - mayavi=4.5.0 + - numpy=1.12.1 + - opencv=3.2.0 + - pandas=0.19.2 + - paramiko=2.2.1 + - pillow=4.1.1 + - pip=19.2 +# - pyfirmata=1.1 + - pyopengl=3.1.0 + - pyqtgraph=0.10 + - python=2.7 + - scikit-image=0.12.3 + - scikit-learn=0.19 + - scipy=0.19.1 + - tornado=4.4.1 + - traits=4.6.0 + - vtk=6.3.0 + - pyzmq=15.4.0 + # - _libgcc_mutex=0.1=main + # - apptools=4.4.0=py27_1002 + # - asn1crypto=0.24.0=py27_1003 + # - atom=0.4.3=py27h6bb024c_0 + # - backports=1.0=py_2 + # - backports.functools_lru_cache=1.5=py_1 + # - backports.shutil_get_terminal_size=1.0.0=py_3 + # - backports_abc=0.5=py_1 + # - bcrypt=3.1.6=py27h516909a_1 + # - blas=1.1=openblas + # - bzip2=1.0.8=h516909a_1 + # - ca-certificates=2019.9.11=hecc5488_0 + # - cairo=1.16.0=h18b612c_1001 + # - certifi=2019.9.11=py27_0 + # - cffi=1.12.3=py27h8022711_0 + # - cloudpickle=1.2.2=py_0 + # - configobj=5.0.6=py_0 + # - cryptography=2.7=py27h72c5cf5_0 + # - curl=7.65.3=hf8cf82a_0 + # - cycler=0.10.0=py_1 + # - cytoolz=0.10.0=py27h516909a_0 + # - dask-core=0.15.2=py27_0 + # - dbus=1.13.6=he372182_0 + # - decorator=4.4.0=py_0 + # - enum34=1.1.6=py27_1001 + # - envisage=4.8.0=py_0 + # - expat=2.2.5=he1b5a44_1003 + # - ffmpeg=4.1.3=h167e202_0 + # - fontconfig=2.13.1=he4413a7_1000 + # - freetype=2.10.0=he983fc9_1 + # - functools32=3.2.3.2=py_3 + # - future=0.17.1=py27_1000 + # - gettext=0.19.8.1=hc5be6a0_1002 + # - giflib=5.1.9=h516909a_0 + # - glib=2.58.3=h6f030ca_1002 + # - gmp=6.1.2=hf484d3e_1000 + # - gnutls=3.6.5=hd3a4fd2_1002 + # - graphite2=1.3.13=hf484d3e_1000 + # - gst-plugins-base=1.14.5=h0935bb2_0 + # - gstreamer=1.14.5=h36ae1b5_0 + # - harfbuzz=2.4.0=h37c48d4_1 + # - hdf4=4.2.13=h9a582f1_1002 + # - hdf5=1.10.5=nompi_h3c11f04_1103 + # - icu=58.2=hf484d3e_1000 + # - idna=2.8=py27_1000 + # - imageio=2.5.0=py27_0 + # - ipaddress=1.0.22=py_1 + # - ipython_genutils=0.2.0=py_1 + # - jasper=1.900.1=h07fcdf6_1006 + # - jpeg=9c=h14c3975_1001 + # - jsoncpp=1.8.4=hc9558a2_1001 + # - kiwisolver=1.1.0=py27hc9558a2_0 + # - krb5=1.16.3=h05b26f9_1001 + # - lame=3.100=h14c3975_1001 + # - libblas=3.8.0=11_openblas + # - libcblas=3.8.0=11_openblas + # - libcurl=7.65.3=hda55be3_0 + # - libedit=3.1.20170329=hf8c457e_1001 + # - libffi=3.2.1=he1b5a44_1006 + # - libgcc-ng=9.1.0=hdf63c60_0 + # - libgfortran-ng=7.3.0=hdf63c60_0 + # - libiconv=1.15=h516909a_1005 + # - liblapack=3.8.0=11_openblas + # - liblapacke=3.8.0=11_openblas + # - libnetcdf=4.6.2=h303dfb8_1003 + # - libopenblas=0.3.6=h5a2b251_1 + # - libpng=1.6.37=hed695b0_0 + # - libssh2=1.8.2=h22169c7_2 + # - libstdcxx-ng=9.1.0=hdf63c60_0 + # - libtiff=4.0.10=h57b8799_1003 + # - libuuid=2.32.1=h14c3975_1000 + # - libwebp=1.0.2=h576950b_1 + # - libxcb=1.13=h14c3975_1002 + # - libxml2=2.9.9=h13577e0_2 + # - lz4-c=1.8.3=he1b5a44_1001 + # - ncurses=6.1=hf484d3e_1002 + # - nettle=3.4.1=h1bed415_1002 + # - networkx=2.1=py27_0 + # - olefile=0.46=py_0 + # - openblas=0.3.3=h9ac9557_1001 + # - openh264=1.8.0=hdbcaa40_1000 + # - openssl=1.1.1c=h516909a_0 + # - pathlib2=2.3.4=py27_0 + # - pcre=8.41=hf484d3e_1003 + # - pexpect=4.7.0=py27_0 + # - pickleshare=0.7.5=py27_1000 + # - pixman=0.38.0=h516909a_1003 + # - ply=3.11=py_1 + # - prompt_toolkit=1.0.15=py_1 + # - pthread-stubs=0.4=h14c3975_1001 + # - ptyprocess=0.6.0=py_1001 + # - pycparser=2.19=py27_1 + # - pyface=6.1.2=py_0 + # - pygments=2.4.2=py_0 + # - pynacl=1.3.0=py27h14c3975_1000 + # - pyparsing=2.4.2=py_0 + # - pyqt=5.9.2=py27hcca6a23_4 + # - pyserial=3.4=py_2 + # - python-dateutil=2.8.0=py_0 + # - pytz=2019.2=py_0 + # - pywavelets=1.0.3=py27hd352d35_1 + # - qt=5.9.7=h52cfd70_2 + # - qtpy=1.9.0=py_0 + # - readline=8.0=hf8c457e_0 + # - scandir=1.10.0=py27h14c3975_0 + # - setuptools=41.2.0=py27_0 + # - simplegeneric=0.8.1=py_1 + # - singledispatch=3.4.0.3=py27_1000 + # - sip=4.19.8=py27hf484d3e_1000 + # - six=1.12.0=py27_1000 + # - soupsieve=1.9.3=py27_0 + # - sqlite=3.29.0=hcee41ef_1 + # - subprocess32=3.5.4=py27h516909a_0 + # - tbb=2019.8=hc9558a2_0 + # - tk=8.6.9=hed695b0_1003 + # - toolz=0.10.0=py_0 + # - traitlets=4.3.2=py27_1000 + # - traitsui=6.1.3=py_0 + # - wcwidth=0.1.7=py_1 + # - wheel=0.33.6=py27_0 + # - x264=1!152.20180806=h14c3975_0 + # - xorg-kbproto=1.0.7=h14c3975_1002 + # - xorg-libice=1.0.10=h516909a_0 + # - xorg-libsm=1.2.3=h84519dc_1000 + # - xorg-libx11=1.6.8=h516909a_0 + # - xorg-libxau=1.0.9=h14c3975_0 + # - xorg-libxdmcp=1.1.3=h516909a_0 + # - xorg-libxext=1.3.4=h516909a_0 + # - xorg-libxrender=0.9.10=h516909a_1002 + # - xorg-libxt=1.2.0=h516909a_0 + # - xorg-renderproto=0.11.1=h14c3975_1002 + # - xorg-xextproto=7.3.0=h14c3975_1002 + # - xorg-xproto=7.0.31=h14c3975_1007 + # - xz=5.2.4=h14c3975_1001 + # - zlib=1.2.11=h516909a_1006 + # - zstd=1.4.0=h3b9ef0a_0 + - pip: + - pymap3d==1.2.4 + - git+git://github.com/enthought/traits-enaml.git@update-data-frame-table + + + + +# +#name: cn_client_amit +#channels: +# - defaults +# - free +# - conda-forge +#dependencies: +# - beautifulsoup4=4.5.1 +# - cython=0.24.1 +## - enaml=0.10 +# - ephem=3.7.6.0 +# - futures=3.0.5 +# - ipython=5.4.1 +# - joblib=0.10.3 +# - matplotlib=2.0.0 +# - mayavi=4.5.0 +# - numpy=1.12.1 +# - opencv=3.2.0 +# - pandas=0.19.2 +# - paramiko=2.2.1 +# - pillow=4.1.1 +# - pip=19.2 +## - pyfirmata=1.1 +# - pyopengl=3.1.0 +# - pyqtgraph=0.10 +# - python=2.7 +# - scikit-image=0.12.3 +# - scikit-learn=0.19 +# - scipy=0.19.1 +# - tornado=4.4.1 +# - traits=4.6.0 +# - vtk=6.3.0 +# - pyzmq=15.4.0 +# # - _libgcc_mutex=0.1=main +# # - apptools=4.4.0=py27_1002 +# # - asn1crypto=0.24.0=py27_1003 +# # - atom=0.4.3=py27h6bb024c_0 +# # - backports=1.0=py_2 +# # - backports.functools_lru_cache=1.5=py_1 +# # - backports.shutil_get_terminal_size=1.0.0=py_3 +# # - backports_abc=0.5=py_1 +# # - bcrypt=3.1.6=py27h516909a_1 +# # - blas=1.1=openblas +# # - bzip2=1.0.8=h516909a_1 +# # - ca-certificates=2019.9.11=hecc5488_0 +# # - cairo=1.16.0=h18b612c_1001 +# # - certifi=2019.9.11=py27_0 +# # - cffi=1.12.3=py27h8022711_0 +# # - cloudpickle=1.2.2=py_0 +# # - configobj=5.0.6=py_0 +# # - cryptography=2.7=py27h72c5cf5_0 +# # - curl=7.65.3=hf8cf82a_0 +# # - cycler=0.10.0=py_1 +# # - cytoolz=0.10.0=py27h516909a_0 +# # - dask-core=0.15.2=py27_0 +# # - dbus=1.13.6=he372182_0 +# # - decorator=4.4.0=py_0 +# # - enum34=1.1.6=py27_1001 +# # - envisage=4.8.0=py_0 +# # - expat=2.2.5=he1b5a44_1003 +# # - ffmpeg=4.1.3=h167e202_0 +# # - fontconfig=2.13.1=he4413a7_1000 +# # - freetype=2.10.0=he983fc9_1 +# # - functools32=3.2.3.2=py_3 +# # - future=0.17.1=py27_1000 +# # - gettext=0.19.8.1=hc5be6a0_1002 +# # - giflib=5.1.9=h516909a_0 +# # - glib=2.58.3=h6f030ca_1002 +# # - gmp=6.1.2=hf484d3e_1000 +# # - gnutls=3.6.5=hd3a4fd2_1002 +# # - graphite2=1.3.13=hf484d3e_1000 +# # - gst-plugins-base=1.14.5=h0935bb2_0 +# # - gstreamer=1.14.5=h36ae1b5_0 +# # - harfbuzz=2.4.0=h37c48d4_1 +# # - hdf4=4.2.13=h9a582f1_1002 +# # - hdf5=1.10.5=nompi_h3c11f04_1103 +# # - icu=58.2=hf484d3e_1000 +# # - idna=2.8=py27_1000 +# # - imageio=2.5.0=py27_0 +# # - ipaddress=1.0.22=py_1 +# # - ipython_genutils=0.2.0=py_1 +# # - jasper=1.900.1=h07fcdf6_1006 +# # - jpeg=9c=h14c3975_1001 +# # - jsoncpp=1.8.4=hc9558a2_1001 +# # - kiwisolver=1.1.0=py27hc9558a2_0 +# # - krb5=1.16.3=h05b26f9_1001 +# # - lame=3.100=h14c3975_1001 +# # - libblas=3.8.0=11_openblas +# # - libcblas=3.8.0=11_openblas +# # - libcurl=7.65.3=hda55be3_0 +# # - libedit=3.1.20170329=hf8c457e_1001 +# # - libffi=3.2.1=he1b5a44_1006 +# # - libgcc-ng=9.1.0=hdf63c60_0 +# # - libgfortran-ng=7.3.0=hdf63c60_0 +# # - libiconv=1.15=h516909a_1005 +# # - liblapack=3.8.0=11_openblas +# # - liblapacke=3.8.0=11_openblas +# # - libnetcdf=4.6.2=h303dfb8_1003 +# # - libopenblas=0.3.6=h5a2b251_1 +# # - libpng=1.6.37=hed695b0_0 +# # - libssh2=1.8.2=h22169c7_2 +# # - libstdcxx-ng=9.1.0=hdf63c60_0 +# # - libtiff=4.0.10=h57b8799_1003 +# # - libuuid=2.32.1=h14c3975_1000 +# # - libwebp=1.0.2=h576950b_1 +# # - libxcb=1.13=h14c3975_1002 +# # - libxml2=2.9.9=h13577e0_2 +# # - lz4-c=1.8.3=he1b5a44_1001 +# # - ncurses=6.1=hf484d3e_1002 +# # - nettle=3.4.1=h1bed415_1002 +# # - networkx=2.1=py27_0 +# # - olefile=0.46=py_0 +# # - openblas=0.3.3=h9ac9557_1001 +# # - openh264=1.8.0=hdbcaa40_1000 +# # - openssl=1.1.1c=h516909a_0 +# # - pathlib2=2.3.4=py27_0 +# # - pcre=8.41=hf484d3e_1003 +# # - pexpect=4.7.0=py27_0 +# # - pickleshare=0.7.5=py27_1000 +# # - pixman=0.38.0=h516909a_1003 +# # - ply=3.11=py_1 +# # - prompt_toolkit=1.0.15=py_1 +# # - pthread-stubs=0.4=h14c3975_1001 +# # - ptyprocess=0.6.0=py_1001 +# # - pycparser=2.19=py27_1 +# # - pyface=6.1.2=py_0 +# # - pygments=2.4.2=py_0 +# # - pynacl=1.3.0=py27h14c3975_1000 +# # - pyparsing=2.4.2=py_0 +# # - pyqt=5.9.2=py27hcca6a23_4 +# # - pyserial=3.4=py_2 +# # - python-dateutil=2.8.0=py_0 +# # - pytz=2019.2=py_0 +# # - pywavelets=1.0.3=py27hd352d35_1 +# # - qt=5.9.7=h52cfd70_2 +# # - qtpy=1.9.0=py_0 +# # - readline=8.0=hf8c457e_0 +# # - scandir=1.10.0=py27h14c3975_0 +# # - setuptools=41.2.0=py27_0 +# # - simplegeneric=0.8.1=py_1 +# # - singledispatch=3.4.0.3=py27_1000 +# # - sip=4.19.8=py27hf484d3e_1000 +# # - six=1.12.0=py27_1000 +# # - soupsieve=1.9.3=py27_0 +# # - sqlite=3.29.0=hcee41ef_1 +# # - subprocess32=3.5.4=py27h516909a_0 +# # - tbb=2019.8=hc9558a2_0 +# # - tk=8.6.9=hed695b0_1003 +# # - toolz=0.10.0=py_0 +# # - traitlets=4.3.2=py27_1000 +# # - traitsui=6.1.3=py_0 +# # - wcwidth=0.1.7=py_1 +# # - wheel=0.33.6=py27_0 +# # - x264=1!152.20180806=h14c3975_0 +# # - xorg-kbproto=1.0.7=h14c3975_1002 +# # - xorg-libice=1.0.10=h516909a_0 +# # - xorg-libsm=1.2.3=h84519dc_1000 +# # - xorg-libx11=1.6.8=h516909a_0 +# # - xorg-libxau=1.0.9=h14c3975_0 +# # - xorg-libxdmcp=1.1.3=h516909a_0 +# # - xorg-libxext=1.3.4=h516909a_0 +# # - xorg-libxrender=0.9.10=h516909a_1002 +# # - xorg-libxt=1.2.0=h516909a_0 +# # - xorg-renderproto=0.11.1=h14c3975_1002 +# # - xorg-xextproto=7.3.0=h14c3975_1002 +# # - xorg-xproto=7.0.31=h14c3975_1007 +# # - xz=5.2.4=h14c3975_1001 +# # - zlib=1.2.11=h516909a_1006 +# # - zstd=1.4.0=h3b9ef0a_0 +# - pip: +# - pymap3d==1.2.4 +# - git+git://github.com/enthought/traits-enaml.git@update-data-frame-table +# diff --git a/cn_client_ubuntu18.yml b/cn_client_ubuntu18.yml new file mode 100644 index 0000000..f5c8ed1 --- /dev/null +++ b/cn_client_ubuntu18.yml @@ -0,0 +1,169 @@ +name: cn_client +channels: + - defaults +dependencies: +# - _libgcc_mutex=0.1=main +# - apptools=4.4.0=py27_1002 +# - asn1crypto=0.24.0=py27_1003 +# - atom=0.4.3=py27h6bb024c_0 +# - backports=1.0=py_2 +# - backports.functools_lru_cache=1.5=py_1 +# - backports.shutil_get_terminal_size=1.0.0=py_3 +# - backports_abc=0.5=py_1 +# - bcrypt=3.1.6=py27h516909a_1 + - beautifulsoup4=4.8 +# - blas=1.1=openblas +# - bzip2=1.0.8=h516909a_1 +# - ca-certificates=2019.9.11=hecc5488_0 +# - cairo=1.16.0=h18b612c_1001 +# - certifi=2019.9.11=py27_0 +# - cffi=1.12.3=py27h8022711_0 +# - cloudpickle=1.2.2=py_0 +# - configobj=5.0.6=py_0 +# - cryptography=2.7=py27h72c5cf5_0 +# - curl=7.65.3=hf8cf82a_0 +# - cycler=0.10.0=py_1 + - cython=0.29 +# - cytoolz=0.10.0=py27h516909a_0 +# - dask-core=0.15.2=py27_0 +# - dbus=1.13.6=he372182_0 +# - decorator=4.4.0=py_0 + - enaml=0.10 +# - enum34=1.1.6=py27_1001 +# - envisage=4.8.0=py_0 + - ephem=3.7 +# - expat=2.2.5=he1b5a44_1003 +# - ffmpeg=4.1.3=h167e202_0 +# - fontconfig=2.13.1=he4413a7_1000 +# - freetype=2.10.0=he983fc9_1 +# - functools32=3.2.3.2=py_3 +# - future=0.17.1=py27_1000 + - futures=3.3 +# - gettext=0.19.8.1=hc5be6a0_1002 +# - giflib=5.1.9=h516909a_0 +# - glib=2.58.3=h6f030ca_1002 +# - gmp=6.1.2=hf484d3e_1000 +# - gnutls=3.6.5=hd3a4fd2_1002 +# - graphite2=1.3.13=hf484d3e_1000 +# - gst-plugins-base=1.14.5=h0935bb2_0 +# - gstreamer=1.14.5=h36ae1b5_0 +# - harfbuzz=2.4.0=h37c48d4_1 +# - hdf4=4.2.13=h9a582f1_1002 +# - hdf5=1.10.5=nompi_h3c11f04_1103 +# - icu=58.2=hf484d3e_1000 +# - idna=2.8=py27_1000 +# - imageio=2.5.0=py27_0 +# - ipaddress=1.0.22=py_1 + - ipython=5.8 +# - ipython_genutils=0.2.0=py_1 +# - jasper=1.900.1=h07fcdf6_1006 + - joblib=0.13 +# - jpeg=9c=h14c3975_1001 +# - jsoncpp=1.8.4=hc9558a2_1001 +# - kiwisolver=1.1.0=py27hc9558a2_0 +# - krb5=1.16.3=h05b26f9_1001 +# - lame=3.100=h14c3975_1001 +# - libblas=3.8.0=11_openblas +# - libcblas=3.8.0=11_openblas +# - libcurl=7.65.3=hda55be3_0 +# - libedit=3.1.20170329=hf8c457e_1001 +# - libffi=3.2.1=he1b5a44_1006 +# - libgcc-ng=9.1.0=hdf63c60_0 +# - libgfortran-ng=7.3.0=hdf63c60_0 +# - libiconv=1.15=h516909a_1005 +# - liblapack=3.8.0=11_openblas +# - liblapacke=3.8.0=11_openblas +# - libnetcdf=4.6.2=h303dfb8_1003 +# - libopenblas=0.3.6=h5a2b251_1 +# - libpng=1.6.37=hed695b0_0 +# - libssh2=1.8.2=h22169c7_2 +# - libstdcxx-ng=9.1.0=hdf63c60_0 +# - libtiff=4.0.10=h57b8799_1003 +# - libuuid=2.32.1=h14c3975_1000 +# - libwebp=1.0.2=h576950b_1 +# - libxcb=1.13=h14c3975_1002 +# - libxml2=2.9.9=h13577e0_2 +# - lz4-c=1.8.3=he1b5a44_1001 + - matplotlib=2.2 + - mayavi=4.7 +# - ncurses=6.1=hf484d3e_1002 +# - nettle=3.4.1=h1bed415_1002 +# - networkx=2.1=py27_0 + - numpy=1.16 +# - olefile=0.46=py_0 +# - openblas=0.3.3=h9ac9557_1001 + - opencv=4.1 +# - openh264=1.8.0=hdbcaa40_1000 +# - openssl=1.1.1c=h516909a_0 + - pandas=0.24.2 + - paramiko=2.6 +# - pathlib2=2.3.4=py27_0 +# - pcre=8.41=hf484d3e_1003 +# - pexpect=4.7.0=py27_0 +# - pickleshare=0.7.5=py27_1000 + - pillow=6.1 + - pip=19.2 +# - pixman=0.38.0=h516909a_1003 +# - ply=3.11=py_1 +# - prompt_toolkit=1.0.15=py_1 +# - pthread-stubs=0.4=h14c3975_1001 +# - ptyprocess=0.6.0=py_1001 +# - pycparser=2.19=py27_1 +# - pyface=6.1.2=py_0 + - pyfirmata=1.1 +# - pygments=2.4.2=py_0 +# - pynacl=1.3.0=py27h14c3975_1000 + - pyopengl=3.1. +# - pyparsing=2.4.2=py_0 +# - pyqt=5.9.2=py27hcca6a23_4 + - pyqtgraph=0.10 +# - pyserial=3.4=py_2 + - python=2.7 +# - python-dateutil=2.8.0=py_0 +# - pytz=2019.2=py_0 +# - pywavelets=1.0.3=py27hd352d35_1 +# - qt=5.9.7=h52cfd70_2 +# - qtpy=1.9.0=py_0 +# - readline=8.0=hf8c457e_0 +# - scandir=1.10.0=py27h14c3975_0 + - scikit-image=0.14 + - scikit-learn=0.20 + - scipy=1.2 +# - setuptools=41.2.0=py27_0 +# - simplegeneric=0.8.1=py_1 +# - singledispatch=3.4.0.3=py27_1000 +# - sip=4.19.8=py27hf484d3e_1000 +# - six=1.12.0=py27_1000 +# - soupsieve=1.9.3=py27_0 +# - sqlite=3.29.0=hcee41ef_1 +# - subprocess32=3.5.4=py27h516909a_0 +# - tbb=2019.8=hc9558a2_0 +# - tk=8.6.9=hed695b0_1003 +# - toolz=0.10.0=py_0 + - tornado=4.5.3 +# - traitlets=4.3.2=py27_1000 + - traits=5.1 +# - traitsui=6.1.3=py_0 + - vtk=8.2 +# - wcwidth=0.1.7=py_1 +# - wheel=0.33.6=py27_0 +# - x264=1!152.20180806=h14c3975_0 +# - xorg-kbproto=1.0.7=h14c3975_1002 +# - xorg-libice=1.0.10=h516909a_0 +# - xorg-libsm=1.2.3=h84519dc_1000 +# - xorg-libx11=1.6.8=h516909a_0 +# - xorg-libxau=1.0.9=h14c3975_0 +# - xorg-libxdmcp=1.1.3=h516909a_0 +# - xorg-libxext=1.3.4=h516909a_0 +# - xorg-libxrender=0.9.10=h516909a_1002 +# - xorg-libxt=1.2.0=h516909a_0 +# - xorg-renderproto=0.11.1=h14c3975_1002 +# - xorg-xextproto=7.3.0=h14c3975_1002 +# - xorg-xproto=7.0.31=h14c3975_1007 +# - xz=5.2.4=h14c3975_1001 +# - zlib=1.2.11=h516909a_1006 +# - zstd=1.4.0=h3b9ef0a_0 + - pip: + - pymap3d==1.8 + - pyzmq==18.1 + - git+git://github.com/enthought/traits-enaml.git@update-data-frame-table diff --git a/data/ROIS/2017_05_19_10_35_00.pkl b/data/ROIS/2017_05_19_10_35_00.pkl index 4a98a58..119c950 100644 --- a/data/ROIS/2017_05_19_10_35_00.pkl +++ b/data/ROIS/2017_05_19_10_35_00.pkl @@ -20,1192 +20,1224 @@ p9 (dp10 S'angle' p11 -F0 -sS'pos' +cnumpy.core.multiarray +scalar p12 -(F137.03205548763216 -F5.1526496769421897 -tp13 -sS'size' -p14 -(F150.13265871385977 -F287.25230581504087 -tp15 -ssS'cam_118bL' +(cnumpy +dtype +p13 +(S'f8' +I0 +I1 +tRp14 +(I3 +S'<' +NNNI-1 +I-1 +I0 +tbS'\xc4\x19;\xb6f\xbb\xc7?' +tRp15 +sS'pos' p16 -(dp17 -sS'cam_109bL' +(F137.49721538619269 +F4.9104908630096418 +tp17 +sS'size' p18 -(dp19 +(F160.61308642222153 +F287.21839151023318 +tp19 +ssS'cam_118bL' +p20 +(dp21 +g11 +g12 +(g14 +S'I\xe5%\xcb\xe1\x8bf\xc0' +tRp22 +sg16 +(F302.63137974877225 +F302.02119042717544 +tp23 +sg18 +(F303.54699649074058 +F297.99673510093362 +tp24 +ssS'cam_109bL' +p25 +(dp26 g4 F0 sg5 (F3.4059025801118992 F13.013011612678639 -tp20 +tp27 sg7 (F276.77182323405879 F255.81085807209487 -tp21 +tp28 ssS'cam_115bL' -p22 -(dp23 -g11 +p29 +(dp30 +S'angle' +p31 F0 -sg12 +sS'pos' +p32 (F0.65526829877950377 F12.139638064263522 -tp24 -sg14 +tp33 +sS'size' +p34 (F281.13869097613463 F276.77182323405884 -tp25 +tp35 ssS'cam_111cL' -p26 -(dp27 +p36 +(dp37 S'angle' -p28 +p38 F0 sS'pos' -p29 +p39 (F20.018054858070677 F22.638175503316234 -tp30 +tp40 sS'size' -p31 +p41 (F273.27832904039815 F272.404955491983 -tp32 +tp42 ssS'cam_119bL' -p33 -(dp34 +p43 +(dp44 S'angle' -p35 +p45 F0 sS'pos' -p36 +p46 (F5.1526496769421897 F5.1526496769421897 -tp37 +tp47 sS'size' -p38 +p48 (F289.87242646028631 F290.74580000870151 -tp39 +tp49 ssS'cam_120L' -p40 -(dp41 -S'angle' -p42 -F0 -sS'pos' -p43 -(F12.113264313193042 -F5.1211633818986115 -tp44 -sS'size' -p45 -(F277.3010798417439 -F278.62555334077001 -tp46 +p50 +(dp51 +g11 +g12 +(g14 +S'\xff\x1dD\x90\x8ca\xe3?' +tRp52 +sg16 +(F0.49857108920244286 +F6.2332560357255886 +tp53 +sg18 +(F307.15948329281343 +F295.00535104992105 +tp54 ssS'cam_112bL' -p47 -(dp48 +p55 +(dp56 S'angle' -p49 +p57 F0 sS'pos' -p50 +p58 (F20.873373548415145 F12.139638064263465 -tp51 +tp59 sS'size' -p52 +p60 (F265.41796710466195 F256.68423162050999 -tp53 -ss(dp54 +tp61 +ss(dp62 g2 -(dp55 +(dp63 S'points' -p56 -(lp57 +p64 +(lp65 (F297.30385612939716 F161.85385612939717 -tp58 +tp66 a(F288.49302064959625 F188.92843613026733 -tp59 +tp67 a(F251.5363653874808 F219.80837600781416 -tp60 +tp68 a(F232.06124909370166 F241.55547386955192 -tp61 +tp69 a(F209.01602774381618 F260.54506710031097 -tp62 +tp70 a(F210.76277484064653 F271.02554968129294 -tp63 +tp71 a(F196.63617446839746 F285.72853819103864 -tp64 +tp72 a(F169.77703245857862 F287.91888086038847 -tp65 +tp73 a(F134.93062374625219 F291.60098441665957 -tp66 +tp74 a(F96.090354730755294 F277.16161774067302 -tp67 +tp75 a(F64.875825962199556 F248.40705660384538 -tp68 +tp76 a(F36.615521689572049 F209.72683309638063 -tp69 +tp77 a(F23.88434984142258 F164.06060176937379 -tp70 +tp78 a(F29.124591131913562 F126.4589156496443 -tp71 +tp79 a(F42.729136528478207 F88.653046258373877 -tp72 +tp80 a(F58.762211123293298 F63.946799525551853 -tp73 +tp81 a(F74.256016020376038 F45.672720969706177 -tp74 +tp82 a(F117.48028894539337 F35.600222035795412 -tp75 +tp83 a(F147.94269374819942 F23.561601720593501 -tp76 +tp84 a(F196.63617446839734 F34.485679874095013 -tp77 +tp85 a(F247.78197296517462 F66.431514517769301 -tp78 +tp86 a(F278.61094538835101 F106.51945689622559 -tp79 +tp87 a(F291.19024129049097 F135.65264967694216 -tp80 +tp88 asg4 F0 sg5 (F0 F0 -tp81 +tp89 sS'closed' -p82 +p90 I01 sg7 (F1 F1 -tp83 +tp91 ssg9 -(dp84 +(dp92 S'points' -p85 -(lp86 +p93 +(lp94 (F274.14137049982764 F191.91237547213302 -tp87 +tp95 a(F260.27010087163251 F213.75832510255316 -tp88 +tp96 a(F254.46797866078995 F209.72039584346209 -tp89 +tp97 a(F214.59377812539839 F248.54246225687331 -tp90 +tp98 a(F208.86340414620977 F265.64094657748984 -tp91 +tp99 a(F183.75100923322125 F281.80526602148234 -tp92 +tp100 a(F140.18800120418777 F281.9938753840928 -tp93 +tp101 a(F98.710475376000787 F270.17462935335169 -tp94 +tp102 a(F58.762211123293397 F250.1538037006757 -tp95 +tp103 a(F31.375280399081067 F214.96707438687164 -tp96 +tp104 a(F16.897361454101254 F172.79433725352544 -tp97 +tp105 a(F11.657120163610241 F121.21867435915328 -tp98 +tp106 a(F31.375280399081049 F86.032925613128384 -tp99 +tp107 a(F52.648596284387139 F49.972822750909188 -tp100 +tp108 a(F94.343607633924861 F19.471514517251194 -tp101 +tp109 a(F136.69450701052702 F8.5256420349252267 -tp102 +tp110 a(F186.37112987846672 F11.334372042781183 -tp103 +tp111 a(F232.5929591350972 F30.522445837016306 -tp104 +tp112 a(F224.58412801768267 F36.23242697092536 -tp105 +tp113 a(F245.69771677725308 F70.149633545804761 -tp106 +tp114 a(F247.78197296517462 F88.265853228148416 -tp107 +tp115 a(F256.77660667797187 F110.01295108988626 -tp108 +tp116 a(F264.111260886492 F109.43026069616113 -tp109 +tp117 a(F270.86155844448342 F124.85951225728438 -tp110 +tp118 a(F279.54051244761513 F123.41301992342909 -tp111 +tp119 a(F282.84771524346934 F150.53617319313514 -tp112 +tp120 asS'angle' -p113 +p121 F0 sS'pos' -p114 +p122 (F-0.87337354841525894 F0.87337354841508841 -tp115 +tp123 sS'closed' -p116 +p124 I01 sS'size' -p117 +p125 (F1 F1 -tp118 -ssg16 -(dp119 +tp126 +ssg20 +(dp127 S'points' -p120 -(lp121 +p128 +(lp129 (F285.94999999999999 F150.5 -tp122 +tp130 a(F278.61094538835096 F194.48054310377438 -tp123 +tp131 a(F257.38908199774153 F233.6951119338155 -tp124 +tp132 a(F224.58412801768273 F263.89419948065949 -tp125 +tp133 a(F183.75100923322125 F281.80526602148234 -tp126 +tp134 a(F139.3146276557726 F285.48736957775344 -tp127 +tp135 a(F96.090354730755308 F274.54149709542753 -tp128 +tp136 a(F58.762211123293397 F250.1538037006757 -tp129 +tp137 a(F31.375280399081067 F214.96707438687164 -tp130 +tp138 a(F16.897361454101254 F172.79433725352544 -tp131 +tp139 a(F16.897361454101237 F128.20566274647462 -tp132 +tp140 a(F26.05301077294623 F77.669359057773647 -tp133 +tp141 a(F51.919293032548538 F49.325547834714399 -tp134 +tp142 a(F96.090354730755223 F26.458502904572491 -tp135 +tp143 a(F139.31462765577254 F15.512630422246559 -tp136 +tp144 a(F183.75100923322123 F19.194733978517689 -tp137 +tp145 a(F224.5841280176827 F37.105800519340477 -tp138 +tp146 a(F257.38908199774147 F67.304888066184461 -tp139 +tp147 a(F278.61094538835096 F106.51945689622558 -tp140 +tp148 a(F285.94999999999999 F150.49999999999997 -tp141 -asg35 +tp149 +asg45 F0 -sg36 +sg46 (F0 F0 -tp142 +tp150 sS'closed' -p143 +p151 I01 -sg38 +sg48 (F1 F1 -tp144 -ssg18 -(dp145 +tp152 +ssg25 +(dp153 S'points' -p146 -(lp147 +p154 +(lp155 (F277.21626451584831 F153.12012064524549 -tp148 +tp156 a(F271.62395700102968 F200.59415794268054 -tp149 +tp157 a(F242.54173167468369 F228.45487064332448 -tp150 +tp158 a(F204.49653640413393 F253.41371689967752 -tp151 +tp159 a(F160.16992342601176 F263.46442150476383 -tp152 +tp160 a(F106.99980636441146 F260.1595366737136 -tp153 +tp161 a(F57.661918600487965 F240.47992870723604 -tp154 +tp162 a(F31.687631122423252 F213.47211466723869 -tp155 +tp163 a(F33.995401044326556 F193.13273567649247 -tp156 +tp164 a(F17.770735002516414 F155.32686628522211 -tp157 +tp165 a(F5.5435053247040749 F148.29325436002344 -tp158 +tp166 a(F11.527596664334343 F105.38413348094355 -tp159 +tp167 a(F24.262828872901913 F76.517479722910181 -tp160 +tp168 a(F49.434959941361797 F48.505933504714221 -tp161 +tp169 a(F84.422840754213524 F23.433061269833871 -tp162 +tp170 a(F107.70469612765686 F20.367341286140288 -tp163 +tp171 a(F115.56505806339337 F27.354329673461621 -tp164 +tp172 a(F139.31462765577254 F15.512630422246559 -tp165 +tp173 a(F160.16992342601174 F20.941481075348008 -tp166 +tp174 a(F216.72376608194617 F45.839536003492185 -tp167 +tp175 a(F241.66835812626843 F71.671755808260286 -tp168 +tp176 a(F264.63696861370835 F106.51945689622559 -tp169 +tp177 a(F271.10264967694218 F129.539034838036 -tp170 -asg28 +tp178 +asg38 F0 -sg29 +sg39 (F0 F0 -tp171 +tp179 sS'closed' -p172 +p180 I01 -sg31 +sg41 (F1 F1 -tp173 -ssg22 -(dp174 -g146 -(lp175 +tp181 +ssg29 +(dp182 +g154 +(lp183 (F273.76618883225251 F144.95950394653397 -tp176 +tp184 a(F270.19706355155313 F168.19820781242376 -tp177 +tp185 a(F254.84944791020803 F218.61108463434971 -tp178 +tp186 a(F241.10257110254736 F213.6984749944217 -tp179 +tp187 a(F209.50064791357107 F251.01804039159236 -tp180 +tp188 a(F177.61772330845912 F264.33779505317909 -tp181 +tp189 a(F158.21239693491282 F282.57139251344915 -tp182 +tp190 a(F149.40327303943334 F276.45777767454297 -tp183 +tp191 a(F120.82890465624899 F288.1074902229991 -tp184 +tp192 a(F96.090354730755308 F274.54149709542753 -tp185 +tp193 a(F68.359333201022253 F265.10392154514579 -tp186 +tp194 a(F58.795777416407546 F250.06458451932889 -tp187 +tp195 a(F43.877166594571662 F230.93958563554204 -tp188 +tp196 a(F21.210559470024272 F216.00301613113743 -tp189 +tp197 a(F39.581559351273896 F202.02903935649482 -tp190 +tp198 a(F33.467944512367737 F187.18168903343695 -tp191 +tp199 a(F14.781182979192909 F194.00610922490762 -tp192 +tp200 a(F5.5435053247040962 F136.9860217685036 -tp193 +tp201 a(F19.517482099346733 F94.144094358283127 -tp194 +tp202 a(F40.109015883232715 F57.211598515427852 -tp195 +tp203 a(F80.270613623185596 F81.503489675201934 -tp196 +tp204 a(F86.08066630722989 F46.47932855724855 -tp197 +tp205 a(F87.279918434249936 F27.331876452987672 -tp198 +tp206 a(F112.00634357978622 F12.892509777001067 -tp199 +tp207 a(F162.61695888265257 F11.825592504747647 -tp200 +tp208 a(F167.02152083039232 F17.939207343653834 -tp201 +tp209 a(F177.58462250638559 F15.701239784857023 -tp202 +tp210 a(F227.85678242400024 F38.403659341850101 -tp203 +tp211 a(F256.02882434649609 F69.925008711429911 -tp204 +tp212 a(F273.51755249200073 F105.64608334781042 -tp205 +tp213 a(F277.80674455240285 F140.92906416056832 -tp206 -asg28 +tp214 +asg38 F0 -sg29 +sg39 (F-0.87337354841515946 F-0.87337354841505999 -tp207 -sg172 +tp215 +sg180 I01 -sg31 +sg41 (F1 F1 -tp208 -ssg26 -(dp209 -g146 -(lp210 +tp216 +ssg36 +(dp217 +g154 +(lp218 (F292.93698838732132 F153.99349419366069 -tp211 +tp219 a(F282.99586798849754 F190.30248901543624 -tp212 +tp220 a(F274.56535499587369 F196.61546187460763 -tp213 +tp221 a(F275.94188822913316 F210.82085778001795 -tp214 +tp222 a(F269.39122486435554 F226.10573896449918 -tp215 +tp223 a(F268.51780308238517 F235.27666767518781 -tp216 +tp224 a(F259.57234705133635 F241.99223299409238 -tp217 +tp225 a(F242.05169545309644 F245.99011408848139 -tp218 +tp226 a(F226.48349871211946 F257.05157290665022 -tp219 +tp227 a(F220.86950562475374 F270.88812078307035 -tp220 +tp228 a(F204.04580912264657 F278.14634936702362 -tp221 +tp229 a(F188.26731622863366 F286.97832853766346 -tp222 +tp230 a(F161.09515491023313 F290.53297232536823 -tp223 +tp231 a(F147.00650580633931 F287.40762882587262 -tp224 +tp232 a(F136.34678951161052 F289.48835076819091 -tp225 +tp233 a(F120.75507679354152 F283.03660757450723 -tp226 +tp234 a(F116.99155993055935 F275.50957384854286 -tp227 +tp235 a(F96.090354730755308 F274.54149709542753 -tp228 +tp236 a(F80.208635954594527 F265.50229663741516 -tp229 +tp237 a(F77.102669771570405 F257.14030975244526 -tp230 +tp238 a(F76.263248384710863 F247.44446387408351 -tp231 +tp239 a(F68.417441897994806 F248.9072827799784 -tp232 +tp240 a(F59.683224078291296 F244.10346297914154 -tp233 +tp241 a(F58.841654435331044 F235.91722331973492 -tp234 +tp242 a(F58.405377646342551 F227.62034342723399 -tp235 +tp243 a(F46.581897348736028 F221.39449664791238 -tp236 +tp244 a(F38.721101311002869 F220.08436397495683 -tp237 +tp245 a(F40.467944874943569 F213.97041150116436 -tp238 +tp246 a(F34.741749522265678 F209.48977747388804 -tp239 +tp247 a(F38.731748398035585 F202.05213407443523 -tp240 +tp248 a(F29.584002315086884 F197.92578748178389 -tp241 +tp249 a(F26.504470486668062 F187.59506403870594 -tp242 +tp250 a(F30.457375863502044 F176.09144877140474 -tp243 +tp251 a(F21.723640379350385 F165.61096619042274 -tp244 +tp252 a(F22.690319129223784 F142.675747780405 -tp245 +tp253 a(F37.762592822160684 F126.95502390893202 -tp246 +tp254 a(F33.122027495911361 F117.47437335607435 -tp247 +tp255 a(F44.431352638144709 F114.95530038234307 -tp248 +tp256 a(F37.444364250823376 F100.10795005928526 -tp249 +tp257 a(F53.251947163480807 F92.566209639386955 -tp250 +tp258 a(F56.722839439119369 F76.508417761055767 -tp251 +tp259 a(F61.347792376196999 F72.229708852407271 -tp252 +tp260 a(F65.951249588898563 F64.421549133495887 -tp253 +tp261 a(F80.630116126786703 F61.415898061653081 -tp254 +tp262 a(F91.648243973909018 F49.32247968225019 -tp255 +tp263 a(F105.13915326822134 F40.941379161416194 -tp256 +tp264 a(F118.18517870863883 F40.454932899689084 -tp257 +tp265 a(F166.93198844345397 F29.972359361661717 -tp258 +tp266 a(F181.35847663567216 F31.289723608753491 -tp259 +tp267 a(F191.73063006822983 F31.865954355006693 -tp260 +tp268 a(F202.7497893073035 F35.359053422510158 -tp261 +tp269 a(F209.59378320207921 F35.323338832525934 -tp262 +tp270 a(F223.99955185840929 F42.238107787564417 -tp263 +tp271 a(F235.52416678347339 F52.610261220122055 -tp264 +tp272 a(F241.63224841012703 F52.439482885055924 -tp265 +tp273 a(F246.47255096228429 F59.525030175160509 -tp266 +tp274 a(F253.963550663576 F57.796337936400903 -tp267 +tp275 a(F257.99716588734839 F71.625875846477811 -tp268 +tp276 a(F267.79308857365288 F75.659491070250255 -tp269 +tp277 a(F269.52178081241254 F82.574260025288737 -tp270 +tp278 a(F282.10443958201159 F94.292227218413302 -tp271 +tp279 a(F288.57012064524548 F119.93192580546921 -tp272 -asg28 +tp280 +asg38 F0 -sg29 +sg39 (F0 F0 -tp273 -sg172 +tp281 +sg180 I01 -sg31 +sg41 (F1 F1 -tp274 -ssg33 -(dp275 -g146 -(lp276 +tp282 +ssg43 +(dp283 +g154 +(lp284 (F272.84939677377253 F71.89638064263508 -tp277 +tp285 a(F289.96480151774819 F126.3574063273914 -tp278 +tp286 a(F262.29181419714126 F131.28578193486638 -tp279 +tp287 a(F264.03856129397161 F146.13313225792419 -tp280 +tp288 a(F284.12615290752046 F140.8928909674332 -tp281 +tp289 a(F288.49302064959625 F180.19470064611568 -tp282 +tp290 a(F267.86956457872355 F224.96137644966382 -tp283 +tp291 a(F224.58412801768273 F263.89419948065949 -tp284 +tp292 a(F176.76402084589992 F289.66562795721882 -tp285 +tp293 a(F128.8341450747906 F288.98086377141408 -tp286 +tp294 a(F90.850113440264295 F279.78173838591852 -tp287 +tp295 a(F47.408354993896225 F254.52067144275151 -tp288 +tp296 a(F19.148050721268735 F214.09370083845644 -tp289 +tp297 a(F8.1636259699495959 F168.4274695114496 -tp290 +tp298 a(F8.1636259699495817 F130.82578339172011 -tp291 +tp299 a(F16.000473544064416 F103.33782838558105 -tp292 +tp300 a(F28.755159753835553 F82.539431419467718 -tp293 +tp301 a(F64.875825962199457 F54.339690492985028 -tp294 +tp302 a(F110.06433150539786 F20.344888065666339 -tp295 +tp303 a(F134.0743863652815 F9.3990155833404287 -tp296 +tp304 a(F173.27052665223923 F27.928469462669341 -tp297 +tp305 a(F220.21726027560683 F41.472668261416345 -tp298 +tp306 a(F186.30831548502178 F86.743730965692919 -tp299 +tp307 a(F164.8114858657338 F100.49308290596076 -tp300 +tp308 a(F133.90590258011184 F102.4644548371659 -tp301 +tp309 a(F95.477466449844528 F133.0325290316967 -tp302 +tp310 a(F90.237225159353514 F161.8538561293972 -tp303 +tp311 a(F92.857345804599035 F171.46096516196403 -tp304 +tp312 a(F88.490478062523195 F181.94144774294602 -tp305 +tp313 a(F91.110598707768702 F198.53554516283413 -tp306 +tp314 a(F97.224213546674832 F222.99000451845879 -tp307 +tp315 a(F129.539034838036 F248.31783742249866 -tp308 +tp316 a(F175.82783290403981 F247.44446387408345 -tp309 +tp317 a(F218.34817054770446 F220.93139173861238 -tp310 +tp318 a(F236.96398129310145 F197.66217161441901 -tp311 +tp319 a(F245.69771677725308 F167.09409741988819 -tp312 +tp320 a(F229.97699290578012 F124.29879354754505 -tp313 +tp321 a(F194.16867742075831 F96.350839998259744 -tp314 +tp322 a(F239.66120193399618 F41.328306448104229 -tp315 -asg28 +tp323 +asg38 F0 -sg29 +sg39 (F0 F0 -tp316 -sg172 +tp324 +sg180 I01 -sg31 +sg41 (F1 F1 -tp317 -ssg40 -(dp318 -g85 -(lp319 +tp325 +ssg50 +(dp326 +g93 +(lp327 (F295.91353741968646 F116.53971116850289 -tp320 +tp328 a(F290.07317271839321 F119.13542881352214 -tp321 +tp329 a(F291.37103154090283 F141.84795820744046 -tp322 +tp330 a(F284.32518387179522 F143.61803215359396 -tp323 +tp331 a(F281.7892039434588 F177.89459349378018 -tp324 +tp332 a(F275.62050881390667 F201.30856018154299 -tp325 +tp333 a(F243.99918451930179 F241.13415812942628 -tp326 +tp334 a(F198.29500906165947 F268.27032789774819 -tp327 +tp335 a(F150.73064967242681 F275.66146216252605 -tp328 +tp336 a(F132.56062615729212 F267.87430922746842 -tp329 +tp337 a(F115.5576881103096 F254.45832011733233 -tp330 +tp338 a(F95.637654554804456 F252.75644100700219 -tp331 +tp339 a(F73.786010569077362 F250.96089419060397 -tp332 +tp340 a(F61.42655672945007 F241.73551366987954 -tp333 +tp341 a(F55.315103865266465 F233.86215788468985 -tp334 +tp342 a(F62.428413036876279 F222.7655518373291 -tp335 +tp343 a(F49.99089157289562 F207.66530619262582 -tp336 +tp344 a(F38.371565471339579 F215.05978539382556 -tp337 +tp345 a(F19.092878236922534 F171.298501222939 -tp338 +tp346 a(F15.592647576255089 F142.06671831123856 -tp339 +tp347 a(F19.239801888602571 F139.00219636093269 -tp340 +tp348 a(F20.142008510375291 F126.73692948995921 -tp341 +tp349 a(F26.1362027115035 F127.05662698517474 -tp342 +tp350 a(F28.516687555657271 F121.67303025898383 -tp343 +tp351 a(F20.295838010210218 F116.67375640509781 -tp344 +tp352 a(F24.189414477739078 F100.45052112372758 -tp345 +tp353 a(F31.976567412796769 F99.152662301217958 -tp346 +tp354 a(F39.114790936599675 F88.769791721141019 -tp347 +tp355 a(F30.678708590287158 F80.982638786083299 -tp348 +tp356 a(F34.572285057816018 F75.142274084790017 -tp349 +tp357 a(F43.008367404128521 F79.684779963573675 -tp350 +tp358 a(F55.878597174119662 F64.694754287712783 -tp351 +tp359 a(F52.272917010745246 F55.388702178108019 -tp352 +tp360 a(F83.16045086099578 F27.63394871091424 -tp353 +tp361 a(F140.3211009963367 F7.0097243402844924 -tp354 +tp362 a(F163.74019414418345 F2.5323967267171383 -tp355 +tp363 a(F195.47723091541113 F8.4096257584259888 -tp356 +tp364 a(F197.74842934214868 F20.960002425373659 -tp357 +tp365 a(F215.45031025966284 F26.425276801424417 -tp358 +tp366 a(F221.93558453663815 F17.902440657771905 -tp359 +tp367 a(F253.73312568812378 F41.912828874199846 -tp360 +tp368 a(F247.29498927264098 F48.259638867632134 -tp361 +tp369 a(F260.87134921192666 F63.976428856863379 -tp362 +tp370 a(F269.30743155823939 F58.136064155570068 -tp363 +tp371 a(F274.71736892082208 F77.317633389759152 -tp364 +tp372 a(F283.3542823549808 F85.879894987441162 -tp365 -asg113 +tp373 +asg121 F0 -sg114 +sg122 (F-1.7467470968303473 F4.3668677420758968 -tp366 -sg116 +tp374 +sg124 I01 -sg117 +sg125 (F1 F1 -tp367 -ssg47 -(dp368 -g146 -(lp369 +tp375 +ssg55 +(dp376 +g154 +(lp377 (F278.08963806426351 F181.941447742946 -tp370 +tp378 a(F261.1434744200476 F218.06162891098387 -tp371 +tp379 a(F225.94763425479553 F256.40282419260978 -tp372 +tp380 a(F194.16867742075831 F274.51904387495364 -tp373 +tp381 a(F176.54858285484855 F274.37468206164152 -tp374 +tp382 a(F152.30956149027526 F268.70466279525482 -tp375 +tp383 a(F148.92173668833942 F257.53941602846817 -tp376 +tp384 a(F90.850113440264323 F228.2526990294237 -tp377 +tp385 a(F72.769754191050311 F246.57109032566831 -tp378 +tp386 a(F59.635584671708564 F244.91356241018471 -tp379 +tp387 a(F42.729136528478243 F220.20731567736266 -tp380 +tp388 a(F31.744711777159079 F197.24879660915013 -tp381 +tp389 a(F16.89736145410124 F155.2802427473448 -tp382 +tp390 a(F19.493967737725075 F115.56505806339339 -tp383 +tp391 a(F30.501906850665879 F99.133528839355876 -tp384 +tp392 a(F33.467944512367737 F83.250236772032252 -tp385 +tp393 a(F58.762211123293319 F57.833184686645723 -tp386 +tp394 a(F76.170647010601101 F42.24194418013019 -tp387 +tp395 a(F89.744171219340075 F34.753103237377644 -tp388 +tp396 a(F97.701064721014632 F35.689208355221723 -tp389 +tp397 a(F99.834775202131496 F29.734870817026717 -tp390 +tp398 a(F139.31462765577254 F15.512630422246559 -tp391 +tp399 a(F183.75100923322123 F19.194733978517689 -tp392 +tp400 a(F221.09063382402206 F40.5992947130012 -tp393 +tp401 a(F254.76896135249598 F67.304888066184446 -tp394 +tp402 a(F278.61094538835096 F106.51945689622558 -tp395 +tp403 a(F282.45650580633935 F138.27277032218768 -tp396 -asg28 +tp404 +asg38 F0 -sg29 +sg39 (F0 F0 -tp397 -sg172 +tp405 +sg180 I01 -sg31 +sg41 (F1 F1 -tp398 -ss(dp399 +tp406 +ss(dp407 g2 (L301L L301L -tp400 +tp408 sg9 (L301L L301L -tp401 -sg16 +tp409 +sg20 (L301L L301L -tp402 -sg18 +tp410 +sg25 (L301L L301L -tp403 -sg22 +tp411 +sg29 (L301L L301L -tp404 -sg26 +tp412 +sg36 (L301L L301L -tp405 -sg33 +tp413 +sg43 (L301L L301L -tp406 -sg40 +tp414 +sg50 (L301L L301L -tp407 -sg47 +tp415 +sg55 (L301L L301L -tp408 +tp416 st. \ No newline at end of file diff --git a/data/aeronet/2016_10/161001_161031_Technion_Haifa_IL.alm b/data/aeronet/2016_10/161001_161031_Technion_Haifa_IL.alm index 5b10a36..c9f7dc3 100644 --- a/data/aeronet/2016_10/161001_161031_Technion_Haifa_IL.alm +++ b/data/aeronet/2016_10/161001_161031_Technion_Haifa_IL.alm @@ -1,6 +1,3 @@ -These data are raw data with calibration applied. -Location=Technion_Haifa_IL,long=35.025,lat=32.776,elev=230,Nmeas=60,PI=Yoav_Y._Schechner_and_Dietrich_Althausen,Email=yoav@ee.technion.ac.il_and_dietrich@tropos.de -Almucantars,All Points,UNITS can be found at,,, http://aeronet.gsfc.nasa.gov/data_menu.html Date(dd-mm-yyyy),Time(hh:mm:ss),Wavelength(um),SolarZenithAngle(degrees),0.000000,-6.000000,-5.000000,-4.000000,-3.500000,-3.000000,-2.500000,-2.000000,2.000000,2.500000,3.000000,3.500000,4.000000,5.000000,6.000000,6.000000,7.000000,8.000000,10.000000,12.000000,14.000000,16.000000,18.000000,20.000000,25.000000,30.000000,35.000000,40.000000,45.000000,50.000000,60.000000,70.000000,80.000000,90.000000,100.000000,120.000000,140.000000,160.000000,180.000000,-180.000000,-160.000000,-140.000000,-120.000000,-100.000000,-90.000000,-80.000000,-70.000000,-60.000000,-50.000000,-45.000000,-40.000000,-35.000000,-30.000000,-25.000000,-20.000000,-18.000000,-16.000000,-14.000000,-12.000000,-10.000000,-8.000000,-7.000000,-6.000000,-6.000000,-5.000000,-4.000000,-3.500000,-3.000000,-2.500000,-2.000000,2.000000,2.500000,3.000000,3.500000,4.000000,5.000000,6.000000 01:10:2016,04:49:59,1.020200,75.043157,-100,19.089889,26.014818,37.202538,45.492234,56.578859,-100.000000,-100.000000,-100.000000,-100.000000,57.825684,46.469474,38.044986,26.537136,19.393171,19.468594,14.723387,11.586547,7.789540,5.731251,4.509734,3.740241,3.199914,2.815167,2.163411,1.766050,1.465401,1.236235,1.059630,0.929278,0.719034,0.412078,0.294342,0.021024,0.027332,0.365825,0.401566,0.443615,0.460434,0.456229,0.433103,0.395259,0.365825,0.393156,0.433103,0.496176,0.594991,0.750571,0.962918,1.105883,1.276181,1.494835,1.782869,2.180230,2.821474,3.208323,3.736036,4.499222,5.684998,7.709647,11.443581,14.538372,19.146921,19.089889,26.014818,37.202538,45.492234,56.578859,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000 01:10:2016,04:51:16,1.641700,74.782756,-100,5.617560,7.206432,9.523260,11.068920,12.946980,-100.000000,-100.000000,-100.000000,-100.000000,13.259436,11.261712,9.652896,7.299504,5.674068,5.694012,4.524795,3.670527,2.541198,1.869750,1.442616,1.155921,0.954819,0.806901,0.574221,0.427134,0.334893,0.270906,0.221877,0.183651,0.132960,0.091410,0.221046,0.004986,0.007479,0.051522,0.054015,0.058170,0.060663,0.059001,0.056508,0.053184,0.053184,0.059001,0.068142,0.081438,0.099720,0.140439,0.191130,0.226032,0.275061,0.338217,0.429627,0.566742,0.806901,0.953157,1.155921,1.449264,1.888032,2.565297,3.671358,4.503189,5.641659,5.617560,7.206432,9.523260,11.068920,12.946980,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000 diff --git a/data/aeronet/2017_05/170501_170531_Technion_Haifa_IL.alm b/data/aeronet/2017_05/170501_170531_Technion_Haifa_IL.alm index 770d294..cad0c36 100644 --- a/data/aeronet/2017_05/170501_170531_Technion_Haifa_IL.alm +++ b/data/aeronet/2017_05/170501_170531_Technion_Haifa_IL.alm @@ -1,6 +1,3 @@ -These data are raw data with calibration applied. -Location=Technion_Haifa_IL,long=35.025,lat=32.776,elev=230,Nmeas=48,PI=Yoav_Y._Schechner_and_Dietrich_Althausen,Email=yoav@ee.technion.ac.il_and_dietrich@tropos.de -Almucantars,All Points,UNITS can be found at,,, http://aeronet.gsfc.nasa.gov/data_menu.html Date(dd-mm-yyyy),Time(hh:mm:ss),Wavelength(um),SolarZenithAngle(degrees),0.000000,-6.000000,-5.000000,-4.000000,-3.500000,-3.000000,-2.500000,-2.000000,2.000000,2.500000,3.000000,3.500000,4.000000,5.000000,6.000000,6.000000,7.000000,8.000000,10.000000,12.000000,14.000000,16.000000,18.000000,20.000000,25.000000,30.000000,35.000000,40.000000,45.000000,50.000000,60.000000,70.000000,80.000000,90.000000,100.000000,120.000000,140.000000,160.000000,180.000000,-180.000000,-160.000000,-140.000000,-120.000000,-100.000000,-90.000000,-80.000000,-70.000000,-60.000000,-50.000000,-45.000000,-40.000000,-35.000000,-30.000000,-25.000000,-20.000000,-18.000000,-16.000000,-14.000000,-12.000000,-10.000000,-8.000000,-7.000000,-6.000000,-6.000000,-5.000000,-4.000000,-3.500000,-3.000000,-2.500000,-2.000000,2.000000,2.500000,3.000000,3.500000,4.000000,5.000000,6.000000 01:05:2017,04:10:41,1.020200,74.869557,-100,64.666408,77.606518,95.668044,109.000796,128.052462,-100.000000,-100.000000,-100.000000,-100.000000,132.252023,118.236415,106.815658,90.546628,78.408873,-100.000000,67.545013,59.395854,44.204238,35.967820,30.602443,26.056451,23.049201,20.912414,17.662541,13.738005,10.924427,9.260118,8.383269,5.599487,0.097901,0.095772,0.095772,0.114927,1.619616,1.413174,1.389763,1.464252,1.853726,1.787750,1.843085,2.204891,1.887779,2.055912,2.358127,2.698651,3.145588,3.916023,5.454765,6.408231,7.610706,9.751749,12.920748,16.777179,21.240169,23.806866,26.441669,30.383231,36.176391,42.884709,50.586930,56.258779,64.814438,64.666408,77.606518,95.668044,109.000796,128.052462,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000 01:05:2017,04:11:53,1.641700,74.620836,-100,21.326784,25.338852,29.866140,32.970756,38.043180,-100.000000,-100.000000,-100.000000,-100.000000,34.579572,30.354768,27.180348,22.673004,19.588332,19.659798,17.249898,15.538038,12.814851,10.915185,9.440991,8.386452,7.786470,7.377618,5.809521,4.681023,3.639780,3.002403,2.893542,2.030133,0.042381,0.039888,0.034902,0.035733,0.393894,0.311625,0.284202,0.306639,0.379767,0.364809,0.413007,0.452064,0.422148,0.466191,0.586686,0.661476,0.787788,1.035426,1.493307,1.880553,2.196333,2.808780,3.922320,5.563545,7.229700,7.961811,9.141831,10.203018,11.921526,14.310651,16.663212,18.319395,21.394926,21.326784,25.338852,29.866140,32.970756,38.043180,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000,-100.000000 diff --git a/docs/Makefile b/docs/Makefile index e3e5445..69fe55e 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,225 +1,19 @@ -# Makefile for Sphinx documentation +# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -PAPER = +SOURCEDIR = source BUILDDIR = build -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help +# Put it first so that "make" without argument is like "make help". help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " applehelp to make an Apple Help Book" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " epub3 to make an epub3" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " coverage to run coverage check of the documentation (if enabled)" - @echo " dummy to check syntax errors of document sources" - -.PHONY: clean -clean: - rm -rf $(BUILDDIR)/* - -.PHONY: html -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -.PHONY: dirhtml -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -.PHONY: singlehtml -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -.PHONY: pickle -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -.PHONY: json -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -.PHONY: htmlhelp -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -.PHONY: qthelp -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/CameraNetwork.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/CameraNetwork.qhc" - -.PHONY: applehelp -applehelp: - $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp - @echo - @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." - @echo "N.B. You won't be able to view it unless you put it in" \ - "~/Library/Documentation/Help or install it in your application" \ - "bundle." - -.PHONY: devhelp -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/CameraNetwork" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/CameraNetwork" - @echo "# devhelp" - -.PHONY: epub -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -.PHONY: epub3 -epub3: - $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 - @echo - @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." - -.PHONY: latex -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -.PHONY: latexpdf -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -.PHONY: latexpdfja -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -.PHONY: text -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -.PHONY: man -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -.PHONY: texinfo -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -.PHONY: info -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -.PHONY: gettext -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -.PHONY: changes -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -.PHONY: linkcheck -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -.PHONY: doctest -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -.PHONY: coverage -coverage: - $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage - @echo "Testing of coverage in the sources finished, look at the " \ - "results in $(BUILDDIR)/coverage/python.txt." - -.PHONY: xml -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -.PHONY: pseudoxml -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." +.PHONY: help Makefile -.PHONY: dummy -dummy: - $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy - @echo - @echo "Build finished. Dummy builder generates no files." +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/make.bat b/docs/make.bat index e04c0b8..69aea4a 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -1,281 +1,298 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source -set I18NSPHINXOPTS=%SPHINXOPTS% source -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. epub3 to make an epub3 - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - echo. dummy to check syntax errors of document sources - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 1>NUL 2>NUL -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\CameraNetwork.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\CameraNetwork.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "epub3" ( - %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -if "%1" == "dummy" ( - %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. Dummy builder generates no files. - goto end -) - -:end +@ECHO OFF + +pushd %~dp0 + + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. epub3 to make an epub3 + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + echo. coverage to run coverage check of the documentation if enabled + echo. dummy to check syntax errors of document sources + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +REM Check if sphinx-build is available and fallback to Python version if any +%SPHINXBUILD% 1>NUL 2>NUL +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd +:sphinx_ok + + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\CameraNetwork.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\CameraNetwork.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "epub3" ( + %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "coverage" ( + %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage + if errorlevel 1 exit /b 1 + echo. + echo.Testing of coverage in the sources finished, look at the ^ +results in %BUILDDIR%/coverage/python.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +if "%1" == "dummy" ( + %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. Dummy builder generates no files. + goto end +) + +:end diff --git a/docs/source/camera.rst b/docs/source/camera.rst new file mode 100644 index 0000000..4635f39 --- /dev/null +++ b/docs/source/camera.rst @@ -0,0 +1,3 @@ +****** +Camera +****** \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py index 5a44e0c..d83904f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,446 +33,454 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -# -*- coding: utf-8 -*- -# -# Camera Network documentation build configuration file, created by -# sphinx-quickstart on Tue Dec 27 09:54:15 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -sys.path.insert(0, os.path.abspath('../..')) -sys.path.insert(0, os.path.abspath('../../scripts')) -sys.path.insert(0, os.path.abspath('../../scripts_calibrate')) -sys.path.insert(0, os.path.abspath('../../scripts_client')) -sys.path.insert(0, os.path.abspath('../../scripts_proxy')) -sys.path.insert(0, os.path.abspath('../../scripts_sunphotometer')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'numfig', - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', - 'sphinx.ext.napoleon' -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The encoding of source files. -# -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Camera Network' -copyright = u'2016, Amit Aides' -author = u'Amit Aides' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = u'1.0.0' -# The full version, including alpha/beta/rc tags. -release = u'1.0.0' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# -# today = '' -# -# Else, today_fmt is used as the format for a strftime call. -# -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. -# " v documentation" by default. -# -# html_title = u'Camera Network v1.0.0' - -# A shorter title for the navigation bar. Default is the same as html_title. -# -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# -# html_logo = None - -# The name of an image file (relative to this directory) to use as a favicon of -# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# -# html_extra_path = [] - -# If not None, a 'Last updated on:' timestamp is inserted at every page -# bottom, using the given strftime format. -# The empty string is equivalent to '%b %d, %Y'. -# -# html_last_updated_fmt = None - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# -# html_additional_pages = {} - -# If false, no module index is generated. -# -# html_domain_indices = True - -# If false, no index is generated. -# -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' -# -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# 'ja' uses this config value. -# 'zh' user can custom change `jieba` dictionary path. -# -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'CameraNetworkdoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - 'papersize': 'a4paper', - - # The font size ('10pt', '11pt' or '12pt'). - # - 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'CameraNetwork.tex', u'Camera Network Documentation', - u'Amit Aides', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# -# latex_use_parts = False - -# If true, show page references after internal links. -# -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# -# latex_appendices = [] - -# It false, will not define \strong, \code, itleref, \crossref ... but only -# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added -# packages. -# -# latex_keep_old_macro_names = True - -# If false, no module index is generated. -# -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'cameranetwork', u'Camera Network Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -# -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'CameraNetwork', u'Camera Network Documentation', - author, 'CameraNetwork', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -# -# texinfo_appendices = [] - -# If false, no module index is generated. -# -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# -# texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project -epub_author = author -epub_publisher = author -epub_copyright = copyright - -# The basename for the epub file. It defaults to the project name. -# epub_basename = project - -# The HTML theme for the epub output. Since the default themes are not -# optimized for small screen space, using the same theme for HTML and epub -# output is usually not wise. This defaults to 'epub', a theme designed to save -# visual space. -# -# epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or 'en' if the language is not set. -# -# epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -# epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -# -# epub_identifier = '' - -# A unique identification for the text. -# -# epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -# -# epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -# -# epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# -# epub_pre_files = [] - -# HTML files that should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# -# epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# The depth of the table of contents in toc.ncx. -# -# epub_tocdepth = 3 - -# Allow duplicate toc entries. -# -# epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -# -# epub_tocscope = 'default' - -# Fix unsupported image types using the Pillow. -# -# epub_fix_images = False - -# Scale large images. -# -# epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# -# epub_show_urls = 'inline' - -# If false, no index is generated. -# -# epub_use_index = True - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} +# -*- coding: utf-8 -*- +# +# Camera Network documentation build configuration file, created by +# sphinx-quickstart on Tue Dec 27 09:54:15 2016. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('../..')) +sys.path.insert(0, os.path.abspath('../../CameraNetwork')) +sys.path.insert(0, os.path.abspath('../../CameraNetwork/gui')) +sys.path.insert(0, os.path.abspath('../../CameraNetwork/mdp')) +sys.path.insert(0, os.path.abspath('../../scripts')) +sys.path.insert(0, os.path.abspath('../../scripts_calibrate')) +sys.path.insert(0, os.path.abspath('../../scripts_client')) +sys.path.insert(0, os.path.abspath('../../scripts_proxy')) +sys.path.insert(0, os.path.abspath('../../scripts_sunphotometer')) +# TODO Change to more Robust solution numfig +sys.path.insert(0, os.path.abspath('../../../../.local/lib/python2.7/site-packages/sphinx/ext')) + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ +# 'sphinx.ext.numfig', # Commented so that readthedocs will work + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', + 'sphinx.ext.napoleon' +] + +numfig = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +# +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Camera Network' +copyright = u'2016, Amit Aides' +author = u'Amit Aides' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = u'1.0.0' +# The full version, including alpha/beta/rc tags. +release = u'1.0.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# +# today = '' +# +# Else, today_fmt is used as the format for a strftime call. +# +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. +# " v documentation" by default. +# +# html_title = u'Camera Network v1.0.0' + +# A shorter title for the navigation bar. Default is the same as html_title. +# +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# +# html_logo = None + +# The name of an image file (relative to this directory) to use as a favicon of +# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# +# html_extra_path = [] + +# If not None, a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# The empty string is equivalent to '%b %d, %Y'. +# +# html_last_updated_fmt = None + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# +# html_additional_pages = {} + +# If false, no module index is generated. +# +# html_domain_indices = True + +# If false, no index is generated. +# +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' +# +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# 'ja' uses this config value. +# 'zh' user can custom change `jieba` dictionary path. +# +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'CameraNetworkdoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + 'papersize': 'a4paper', + + # The font size ('10pt', '11pt' or '12pt'). + # + 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'CameraNetwork.tex', u'Camera Network Documentation', + u'Amit Aides', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# +# latex_use_parts = False + +# If true, show page references after internal links. +# +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# +# latex_appendices = [] + +# It false, will not define \strong, \code, itleref, \crossref ... but only +# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added +# packages. +# +# latex_keep_old_macro_names = True + +# If false, no module index is generated. +# +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'cameranetwork', u'Camera Network Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +# +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'CameraNetwork', u'Camera Network Documentation', + author, 'CameraNetwork', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# +# texinfo_appendices = [] + +# If false, no module index is generated. +# +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# +# texinfo_no_detailmenu = False + + +# -- Options for Epub output ---------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project +epub_author = author +epub_publisher = author +epub_copyright = copyright + +# The basename for the epub file. It defaults to the project name. +# epub_basename = project + +# The HTML theme for the epub output. Since the default themes are not +# optimized for small screen space, using the same theme for HTML and epub +# output is usually not wise. This defaults to 'epub', a theme designed to save +# visual space. +# +# epub_theme = 'epub' + +# The language of the text. It defaults to the language option +# or 'en' if the language is not set. +# +# epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +# epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +# +# epub_cover = () + +# A sequence of (type, uri, title) tuples for the guide element of content.opf. +# +# epub_guide = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +# +# epub_pre_files = [] + +# HTML files that should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +# +# epub_post_files = [] + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + +# The depth of the table of contents in toc.ncx. +# +# epub_tocdepth = 3 + +# Allow duplicate toc entries. +# +# epub_tocdup = True + +# Choose between 'default' and 'includehidden'. +# +# epub_tocscope = 'default' + +# Fix unsupported image types using the Pillow. +# +# epub_fix_images = False + +# Scale large images. +# +# epub_max_image_width = 0 + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# +# epub_show_urls = 'inline' + +# If false, no index is generated. +# +# epub_use_index = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} diff --git a/docs/source/images/ECEF.png b/docs/source/images/ECEF.png new file mode 100644 index 0000000..a88c54f Binary files /dev/null and b/docs/source/images/ECEF.png differ diff --git a/docs/source/images/ECEF2ENU.png b/docs/source/images/ECEF2ENU.png new file mode 100644 index 0000000..7eac66e Binary files /dev/null and b/docs/source/images/ECEF2ENU.png differ diff --git a/docs/source/images/ECEF_ENU_Longitude_Latitude_relationships.png b/docs/source/images/ECEF_ENU_Longitude_Latitude_relationships.png new file mode 100644 index 0000000..00e3d72 Binary files /dev/null and b/docs/source/images/ECEF_ENU_Longitude_Latitude_relationships.png differ diff --git a/docs/source/images/GUI_main_status.png b/docs/source/images/GUI_main_status.png new file mode 100644 index 0000000..0d1bee3 Binary files /dev/null and b/docs/source/images/GUI_main_status.png differ diff --git a/docs/source/images/GUI_on_start.png b/docs/source/images/GUI_on_start.png new file mode 100644 index 0000000..af8c8f1 Binary files /dev/null and b/docs/source/images/GUI_on_start.png differ diff --git a/docs/source/images/GUI_servers_with_camera.png b/docs/source/images/GUI_servers_with_camera.png new file mode 100644 index 0000000..d5bea09 Binary files /dev/null and b/docs/source/images/GUI_servers_with_camera.png differ diff --git a/docs/source/images/Screenshot from 2019-12-02 09-06-41.png b/docs/source/images/Screenshot from 2019-12-02 09-06-41.png new file mode 100644 index 0000000..aca36b4 Binary files /dev/null and b/docs/source/images/Screenshot from 2019-12-02 09-06-41.png differ diff --git a/docs/source/images/beta_map.png b/docs/source/images/beta_map.png new file mode 100644 index 0000000..63f3641 Binary files /dev/null and b/docs/source/images/beta_map.png differ diff --git a/docs/source/images/camera_array.png b/docs/source/images/camera_array.png new file mode 100644 index 0000000..4f2e356 Binary files /dev/null and b/docs/source/images/camera_array.png differ diff --git a/docs/source/images/gui_sprinkler.png b/docs/source/images/gui_sprinkler.png new file mode 100644 index 0000000..45ce708 Binary files /dev/null and b/docs/source/images/gui_sprinkler.png differ diff --git a/docs/source/images/gui_sunshader.png b/docs/source/images/gui_sunshader.png new file mode 100644 index 0000000..0c4b468 Binary files /dev/null and b/docs/source/images/gui_sunshader.png differ diff --git a/docs/source/images/img_data_sample.png b/docs/source/images/img_data_sample.png new file mode 100644 index 0000000..544447f Binary files /dev/null and b/docs/source/images/img_data_sample.png differ diff --git a/docs/source/images/los2.png b/docs/source/images/los2.png new file mode 100644 index 0000000..03f925e Binary files /dev/null and b/docs/source/images/los2.png differ diff --git a/docs/source/images/map_los_roi_grid.png b/docs/source/images/map_los_roi_grid.png new file mode 100644 index 0000000..c6ce00f Binary files /dev/null and b/docs/source/images/map_los_roi_grid.png differ diff --git a/docs/source/images/radiometric_calibration.png b/docs/source/images/radiometric_calibration.png new file mode 100644 index 0000000..886c255 Binary files /dev/null and b/docs/source/images/radiometric_calibration.png differ diff --git a/docs/source/images/roi_grid_los_and_settings.png b/docs/source/images/roi_grid_los_and_settings.png new file mode 100644 index 0000000..f6ae1f1 Binary files /dev/null and b/docs/source/images/roi_grid_los_and_settings.png differ diff --git a/docs/source/images/server_settings_capture_tab.png b/docs/source/images/server_settings_capture_tab.png new file mode 100644 index 0000000..c3a0e13 Binary files /dev/null and b/docs/source/images/server_settings_capture_tab.png differ diff --git a/docs/source/images/server_settings_general_tab.png b/docs/source/images/server_settings_general_tab.png new file mode 100644 index 0000000..9a72b72 Binary files /dev/null and b/docs/source/images/server_settings_general_tab.png differ diff --git a/docs/source/images/snapshot_extrinsic.png b/docs/source/images/snapshot_extrinsic.png new file mode 100644 index 0000000..cf2f702 Binary files /dev/null and b/docs/source/images/snapshot_extrinsic.png differ diff --git a/docs/source/images/space_carving.png b/docs/source/images/space_carving.png new file mode 100644 index 0000000..0ff7114 Binary files /dev/null and b/docs/source/images/space_carving.png differ diff --git a/docs/source/images/square_roi_and_LOS.png b/docs/source/images/square_roi_and_LOS.png new file mode 100644 index 0000000..be253b5 Binary files /dev/null and b/docs/source/images/square_roi_and_LOS.png differ diff --git a/docs/source/images/square_roi_grid_LOS.png b/docs/source/images/square_roi_grid_LOS.png new file mode 100644 index 0000000..9c12145 Binary files /dev/null and b/docs/source/images/square_roi_grid_LOS.png differ diff --git a/docs/source/index.rst b/docs/source/index.rst index 1d77b3c..38aab65 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,27 +1,24 @@ -.. Camera Network documentation master file, created by - sphinx-quickstart on Tue Dec 27 09:54:15 2016. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to Camera Network's documentation! -========================================== - -Contents: - -.. toctree:: - :maxdepth: 2 - - introduction - install - calibration - camera - usage - modules - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - +Welcome to Camera Network's documentation! +========================================== + + +Contents: + +.. toctree:: + :maxdepth: 5 + + introduction + install + calibration + camera + usage + modules + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/source/install.rst b/docs/source/install.rst index 8c5d780..3c814ee 100644 --- a/docs/source/install.rst +++ b/docs/source/install.rst @@ -1,148 +1,295 @@ -.. highlight:: sh - -************ -Installation -************ - -Introduction -============ - -The ``CameraNetwork`` system is made of three logical parts: - -#. *Server*: The camera unit. The server performs the actual measuremetns. -#. *Client*: A program that enables remote control of servers. -#. *Proxy*: A program that bridges and manages the communication between the *Servers* and *Clients*. - -There can be multiple *Servers* and *Clients* but only one *proxy*. - -The ``CameraNetwork`` pacakge contains the code for both the *Server*, *Client* and *Proxy* subsystems. -This simplifies the deployment and enables code reuse. The installation procedures is similar for the -three components but differs due to the different platforms. - -The ``CameraNetwork`` is implemented completely in `Python _`. - -Installing the Server -===================== - -The server software is run on an `Odroid U3 `_ -as at the time of selection it offered a unique balance between capabilites and cost. Nonetheless it should be straight -forward to install the ``CameraNetwork`` pacakge and its prerequisites on other platforms like newer Oroids and even -on the RaspberrPi. - -In the following we detail the procedure of installing the required prerequisites and main pacakge. Note that -once the package is installed on one compture, it is much more time effective to create an image of the Odroid -memory card and duplicate it as needed. - -Prerequisites -------------- - -To use *CameraNetwork* several software package are needed. This can be installed using the following -commands. Copy paste these to a commandline:: - - > sudo apt-get install python-pip git mercurial screen autossh - > sudo pip install paramiko - > sudo pip install cython - > sudo pip install pyzmq --install-option="--zmq=bundled" - > sudo pip install tornado - > sudo pip install futures - > sudo apt-get install python-numpy python-scipy python-matplotlib - > sudo pip install beautifulsoup4 - > sudo pip install sklearn - > sudo pip install skimage - > sudo pip install ephem - > sudo pip install pandas - > sudo pip install pymap3d - > sudo pip install ipython - > sudo pip install pyfirmata - > sudu pip install joblib - -To install opencv3 follow a tutorial relevant to your system, e.g. on Odroid XU4 the following tutorial -was usefull `opencvsh_for_ubuntu_mate `_. - -Install the python wrappers to the ids SDK:: - - > mkdir code - > cd code - > git clone https://github.com/amitibo/ids.git - > cd ids - > sudo python setup.py install - -Install the pyfisheye module:: - - > cd ~/code - > hg clone https://amitibo@bitbucket.org/amitibo/pyfisheye - > cd pyfisheye - > sudo python setup.py install - -Some platforms might require the installation of modem software:: - - > sudo apt-get install network-manager - > sudo apt-get install network-manager-gnome - -The first instal *nmcli* (used for activating the connection). The second intalls *nmcli-connection-editor* -used for defining the mobile network connection. - -Install a recent version of usb_modeswitch (required on raspberryPi). Follow the `usb_modeswitch tutorial `_. -To compile the above code you will need to install the *libusb-1* dev files:: - - > sudo apt-get install libusb-1.0-0-dev - -Prepare a device reference file from the following `device reference file `_ and run -it using the command:: - - > sudo usb_modeswitch -c - -CameraNetwork Installation --------------------------- - -Download and install the package:: - - > git clone https://amitibo@bitbucket.org/amitibo/cameranetwork_git.git cameranetwork - > cd cameranetwork - > python setup.py develop --user - -.. note:: - - The first command downloads a *slim* version of the code that only includes the *Server* components. - -To make the system start automatically at boot time, we use the *rc.local* script:: - - > sudo cp cameranetwork/scripts/rc.local/rc.local /etc/rc.local - -Run the camera setup script to setup the camera environment. - - > setup_camera.py - -You will be asked for a camera id. Enter a unique camera id number. - -Installing the Proxy -==================== - -Currently the code assumes that the proxy server is run on an ec2 instance. -Installation on the proxy follows the same steps of installation on the -client. - -To run the proxy program, do: - - > start_proxy.py - - -Installing the Client -===================== - -It is recommended to install python using the `Anaconda `_ distribution. -Install the ``CameraNetwork`` package:: - - > git clone https://amitibo@bitbucket.org/amitibo/cameranetwork_git.git cameranetwork - > cd cameranetwork - > python setup.py develop --user - -Installing the Calibration Station -================================== - -It is recommended to install python using the `Anaconda `_ distribution. -Install the ``CameraNetwork`` package:: - - > git clone https://amitibo@bitbucket.org/amitibo/cameranetwork_git.git cameranetwork - > cd cameranetwork - > python setup.py develop --user +.. highlight::sh + +************ +Installation +************ + +.. contents:: Table of Contents + +Installation +============ + +The ``CameraNetwork`` system is made of three logical parts: + +#. *Server*: The camera unit. The server performs the actual measurements. +#. *Client*: A program that enables remote control of servers. +#. *Proxy*: A program that bridges and manages the communication between the *Servers* and *Clients*. + +There can be multiple *Servers* and *Clients* but only one *proxy*. + +The ``CameraNetwork`` package contains the code for both the *Server*, *Client* and *Proxy* subsystems. +This simplifies the deployment and enables code reuse. The installation procedures is similar for the +three components but differs due to the different platforms. + +The ``CameraNetwork`` is implemented completely in `Python _`. + + +Installation - Client +--------------------- +#. Install conda. Tested on conda 4.7.11 +#. Clone the cameranetwork package:: + + git clone https://github.com/Addalin/cameranetwork.git +#. Navigate to it:: + + cd cameranetwork + +#. Create virtual env: + + LINUX: Create conda virtual environment from *cn_client_ubuntu18.yml* + + :: + + conda env create -f cn_client_ubuntu18.yml + + .. Note:: + + The first line of sets the new environment's name (currently *cn_client*) + + WINDOWS (exact procedure):: + + # Create new environment with name: cn_client + + conda create -n cn_client --yes + + conda activate cn_client + + conda config --env --set restore_free_channel true + + conda config --env --append channels conda-forge + + conda install python=2.7 pip paramiko cython tornado=4.5.3 futures numpy scipy matplotlib beautifulsoup4 scikit-learn scikit-image pyside requests ephem pandas=0.19.2 ipython pyfirmata joblib pyzmq enaml pillow traits pyqtgraph pyopengl vtk mayavi opencv git mercurial + + + + # pip install pymap3d, traits-enaml and pyfisheye + # Note, this installs pyfisheye without cloning it. For development of pyfisheye clone and install manually from https://bitbucket.org/amitibo/pyfisheye (TODO: migrate pyfisheye codebase to github) + + python -m pip install pymap3d==1.1.1 git+https://github.com/enthought/traits-enaml.git@update-data-frame-table hg+https://bitbucket.org/amitibo/pyfisheye + + + +#. Activate the environment:: + + conda activate + + +#. Install the cameranetwork package + + :: + + python setup.py develop --user + + .. note:: + + without --user it installs the scripts for all users (Windows: C:\ProgramData\Anaconda2\Scripts) + +#. Verify successful installation by opening the GUI:: + + python scripts_client/camera_client.py + + +Installation - Server +--------------------- + +The server software is run on an `Odroid U3 `_ +as at the time of selection it offered a unique balance between capabilities and cost. Nonetheless it should be straight +forward to install the ``CameraNetwork`` package and its prerequisites on other platforms like newer Oroids and even +on the RaspberrPi. + +In the following we detail the procedure of installing the required prerequisites and main package. Note that +once the package is installed on one computer, it is much more time effective to create an image of the Odroid +memory card and duplicate it as needed. + + +Installation - Proxy +-------------------- + +Currently the code assumes that the proxy server is run on an ec2 instance. +Installation on the proxy follows the same steps of installation on the +client. + +Before running make sure to update in the global setting the ip address: `DEFUALT_PROXI_PARAMS `_ +And make sure this is updated in all end units! + +To run the proxy program, do:: + python ./code/cameranetwork/scripts_proxy/start_proxy.py --log_level info + + or + + start_proxy.py + +Installation - Calibration Station +---------------------------------- + + +Camera setup +------------ + +Arduino connections +````````````````````````` + +Savox SunShader Servo pins: + +#. Brown (Gnd) = Gnd +#. Red (5V) = 5V +#. Orange (Signal) = PIN NUM + + + +Installation - OLD +============================ +Prerequisites +------------- + +To use *CameraNetwork* several software package are needed. This can be installed using the following +commands. Copy paste these to a commandline:: + + > sudo apt-get install python-pip git mercurial screen autossh + > sudo pip install paramiko + > sudo pip install cython + > sudo pip install pyzmq --install-option="--zmq=bundled" + > sudo pip install tornado==4.5.3 + > sudo pip install futures + > sudo apt-get install python-numpy python-scipy python-matplotlib + > sudo pip install beautifulsoup4 + > sudo pip install sklearn + > sudo pip install skimage + > sudo pip install ephem + > sudo pip install pandas + > sudo pip install pymap3d + > sudo pip install ipython + > sudo pip install pyfirmata + > sudu pip install joblib + +To install opencv3 follow a tutorial relevant to your system, e.g. on Odroid XU4 the following tutorial +was usefull `opencvsh_for_ubuntu_mate `_. + +Install the python wrappers to the ids SDK:: + + > mkdir code + > cd code + > git clone https://github.com/amitibo/ids.git + > cd ids + > sudo python setup.py install + +Install the pyfisheye module:: + + > cd ~/code + > hg clone https://amitibo@bitbucket.org/amitibo/pyfisheye + > cd pyfisheye + > sudo python setup.py install + +Some platforms might require the installation of modem software:: + + > sudo apt-get install network-manager + > sudo apt-get install network-manager-gnome + +The first instal *nmcli* (used for activating the connection). The second intalls *nmcli-connection-editor* +used for defining the mobile network connection. + +Install a recent version of usb_modeswitch (required on raspberryPi). Follow the `usb_modeswitch tutorial `_. +To compile the above code you will need to install the *libusb-1* dev files:: + + > sudo apt-get install libusb-1.0-0-dev + +Prepare a device reference file from the following `device reference file `_ and run +it using the command:: + + > sudo usb_modeswitch -c + +CameraNetwork Installation +-------------------------- + +Download and install the package:: + + > git clone https://amitibo@bitbucket.org/amitibo/cameranetwork_git.git cameranetwork + > cd cameranetwork + > python setup.py develop --user + +.. note:: + + The first command downloads a *slim* version of the code that only includes the *Server* components. + +To make the system start automatically at boot time, we use the *rc.local* script:: + + > sudo cp cameranetwork/scripts/rc.local/rc.local /etc/rc.local + +Run the camera setup script to setup the camera environment. + + > setup_camera.py + +You will be asked for a camera id. Enter a unique camera id number. + + + +Installing the Client +--------------------- + +It is recommended to install python using the `Anaconda `_ distribution. +Install the ``CameraNetwork`` package:: + + > git clone https://amitibo@bitbucket.org/amitibo/cameranetwork_git.git cameranetwork + > cd cameranetwork + > python setup.py develop --user + +Installing the Calibration Station +---------------------------------- + +It is recommended to install python using the `Anaconda `_ distribution. +Install the ``CameraNetwork`` package:: + + > git clone https://amitibo@bitbucket.org/amitibo/cameranetwork_git.git cameranetwork + > cd cameranetwork + > python setup.py develop --user + + + +Shubi reference +--------------- + +#. Create conda virtual environment:: + + conda create --name --no-default-packages + conda config --add channels conda-forge + conda activate cnvenv + + + +#. Install prerequisites:: + + conda install python=2.7 pip paramiko cython tornado=4.5.3 futures numpy scipy matplotlib beautifulsoup4 scikit-learn scikit-image ephem pandas ipython pyfirmata joblib + pip install pyzmq --install-option="--zmq=bundled" + pip install pymap3d + conda install enaml pillow traits pyqtgraph pyopengl vtk mayavi opencv + +#. Install additional modules:: + + pip install ephem + conda install -c anaconda pil + conda install -c anaconda enaml + conda install -c anaconda traits pyqtgraph pyopengl + conda install -c anaconda vtk + pip install mayavi + +#. Install traits-enaml:: + + git clone https://github.com/enthought/traits-enaml.git --branch update-data-frame-table + cd traits-enaml + python setup.py install + cd.. + + + + +#. Install the cameranetwork package + #. Navigate back to cameranetwork:: + + cd .. + #. Install the cameranetwork package:: + + python setup.py develop --user + + .. note:: + + without --user it installs the scripts for all users (Windows: C:\ProgramData\Anaconda2\Scripts) diff --git a/docs/source/usage.rst b/docs/source/usage.rst index 28e11ef..70aa634 100644 --- a/docs/source/usage.rst +++ b/docs/source/usage.rst @@ -1,2 +1,211 @@ -Using the Camera Network Package -================================ \ No newline at end of file +******************************** +Using the Camera Network Package +******************************** + +.. contents:: Table of Contents + + +Client +====== +After successful installation, +start the Client GUI by navigating to:: + + cd cameranetwork/scripts_client + +then run ``python camera_client.py`` + +You should now see + +.. image:: images/GUI_on_start.png + +after pressing on servers, you should see all connected cameras, in this case camera id 236. + +.. image:: images/GUI_servers_with_camera.png + +pressing on the camera ID should lead to the camera interface screen + +.. image:: images/GUI_main_status.png + +sun Shader +---------- + +.. image:: images/gui_sunshader.png + +The angle slider allows manual setting of sunshader angle. +The scan button moves the sunshader throughout it's whole range, +then according to the point with least sunlight (as shown in the graph) +determines the sunshader optimal angel. + + +Sprinkler +--------- + +.. image:: images/gui_sprinkler.png + +The Sprinkle tab and corresponding button allows to manually activate the sprinkler system +in order to clean the camera len dome. Period Refers to activation time in seconds. + +Camera (server) +=============== + + +Field Deployment +---------------- + +#. Verify Case Screws fully tightened. + +#. Verify plugs fully screwed. + +#. Verify sprinklers are pointing in the right direction + +#. Verify camera alignment with north-south + +Code +---- + +After changing any global_setting.py parameter, need to run setup_camera.py again. + +Connection +---------- + +There are options to connect to the camera + +Serial connection +````````````````` + +On the Client's PC, from cameranetwork folder: + +``bash scripts/listusb.sh`` to list all connected usb devices and to find the relevant one. +Should be /dev/tty/USB0 (replace '0' with relevant number) + +#. Follow `driver installation instructions by Odroid `_. + +#. Linux: Run ``sudo minicom`` in Client PC's terminal. Windows: Use Teraterm. + +#. Enter odroid username + +#. Enter odroid password + +SSH +``` + +#. Via SSH + +GUI +``` + +#. Via GUI (as mentioned in the client section) + + +Proxy +===== +To connect to the proxy +------------------------- +``sudo ssh -i ubuntu@`` + +.. note:: + ``sudo chmod 400 `` + if permission error is encountered. + +.. note:: + ** is the path and name of the proxy's private key + ** is defined in *global_settings.py*. Currently *3.123.49.101* + +If this is the initial setup of the proxy server:: + + screen -S session_name + python ./code/cameranetwork/scripts_proxy/start_proxy.py --log_level info + +Should be run from the root of the server, otherwise the logs would be put in a different location each time. +Screen is used to be able to detach and retrieve when ever needed. + +- Press *ctrl+a* then *ctrl+d* to detach the *start_proxy.py* from the terminal +- ``screen -ls`` to see detached processes. then ``screen -r `` to bring it back. + + + +Noticable stuff +--------------- +*tunnel_port_.txt* stores the odroid's password and tunnel_port (random int between 20,000 and 30,000). + +*/proxy_logs/cameralog__proxy.txt* is a log. +Mainly shows Heartbeats from connected cameras and notification of message transmissions to/from the client. + +Others +====== + +Image Acquisition flow +---------------------- +On Odroid: rc.local --> main(start_server.py) --> start(server.py).278 --> +loop_timer(server.py) --> handle_loop(controller.py) --> safe_capture(controller.py) +--> IDSCamera.capture (cameras.py) + +Useful commands +--------------- +- ``ps -ef | grep python`` to view running python processes (should see start_proxy.py!) +- ``sudo netstat -a -nlp -o | grep 198`` to see status of relevant ports +- `adding ssh key to ssh-agent `_. +- `How to use scp to transfer files `_. + For example to retrieve proxy log from proxy to client: ``scp ubuntu@3.123.49.101:/home/ubuntu/proxy_logs/cameralog_190929_092735_proxy.txt /home/shubi/Desktop/log`` +- gparted for microsd / eMMC partitioning & direct copying. +- ``sudo dd if=/dev/sdb of=~/xu4_lab.img status=progress`` to create an image of odroid +- `etcher `_ to flash image onto the SD card +- ``grep -a -e "Unregistering worker 236" -e "Registering new worker 236" cameralog_190929_092735_proxy.txt`` + to see connections and disconnections. replace log.txt with * for all logs in folder. +- ``du -ah --max-depth=1 | sort -hr`` to see size of all subfolders and files + + + +Data Structures +--------------- +When looking at a specific camera, under `captured_images`, +for each that the camera recorded a folder `<%Y-%M-%D>` is created. +Inside, the images are stored as `.mat` files. In addition there is a thumbnail `.jpg` version, add metadata as `.pkl`. +The name is `utctime_date+exact time`. +The `.pkl` file stores the following data:: + + img = pd.read_pickle('~/captured_images/2019_10_02/1570011900.0_2019_10_02_10_25_00_3.pkl') + +.. image:: images/img_data_sample.png + +In addition, one `database.pkl` is created and stored per day:: + + database = pd.read_pickle('~/captured_images/2019_10_02/database.pkl') + database.head() + + Time hdr path longitude latitude altitude serial_num + 2019-10-02 00:00:00 0 /home/odroid/captured_images/2019_10_02/1569974400.05_2019_10_02_00_00_00_0.mat 35.024963 32.775776 229 4103098529 + 2019-10-02 00:30:00 0 /home/odroid/captured_images/2019_10_02/1569976200.05_2019_10_02_00_30_00_0.mat 35.024963 32.775776 229 4103098529 + 2019-10-02 01:00:00 0 /home/odroid/captured_images/2019_10_02/1569978000.05_2019_10_02_01_00_00_0.mat 35.024963 32.775776 229 4103098529 + 2019-10-02 01:30:00 0 /home/odroid/captured_images/2019_10_02/1569979800.05_2019_10_02_01_30_00_0.mat 35.024963 32.775776 229 4103098529 + 2019-10-02 08:48:03 0 /home/odroid/captured_images/2019_10_02/1570006083.33_2019_10_02_08_48_03_0.mat 35.024963 32.775776 229 4103098529 + + + + +Analyzing Results +----------------- +On Client PC:: + + cd /cameranetwork/scripts_client + python start_local.py + +.. note:: + + - Make sure to activate environment beforehand: ``conda activate cn_client`` + - -l flag is used for local proxy (instead of real proxy server) + - -d flag is for opening gui separately. + +workflow + data structure: + +#. Run ``python start_local.py -d ~/experiment_23_09_2019`` in the background +where `experiment_date` is a folder containing `cam_ID` folder for each camera involved. +Each `cam_ID` should consist of + + #. `captured_images` folder which stores inside a folder with images(.jpg, .mat & .pkl versions) and database.pkl for each day that the camera recorded. + #. `dark_images` folder + #. `sun_positions` folder, containing a folder with .csv containing the positions of the sun (and moon!) with format: timestamp, object, pos_x, pos_y, sunshader_angle, row each 6 minutes for sun and every 1 minute for moon. + #. Additional pkl's and json's and other (important!) files. + +#. Run ``python camera_client.py`` +#. You should see a list of all real & virtual cameras. diff --git a/env2021_full.yaml b/env2021_full.yaml new file mode 100644 index 0000000..2591b85 --- /dev/null +++ b/env2021_full.yaml @@ -0,0 +1,153 @@ +name: cn_client_1 +channels: + - defaults + - conda-forge + - pro +dependencies: + - apptools=4.4.0=py27_1 + - asn1crypto=1.2.0=py27_0 + - atom=0.4.3=py27hdc96acc_0 + - backports=1.0=py_2 + - backports.functools_lru_cache=1.5=py_2 + - backports.shutil_get_terminal_size=1.0.0=py27_2 + - backports_abc=0.5=py27h0ec6b72_0 + - bcrypt=3.1.7=py27h0c8e037_0 + - beautifulsoup4=4.8.1=py27_0 + - blas=1.0=mkl + - bokeh=1.3.4=py27_0 + - ca-certificates=2019.10.16=0 + - certifi=2019.9.11=py27_0 + - cffi=1.13.1=py27hcfb25f9_0 + - chardet=3.0.4=py27_1003 + - click=7.0=py27_0 + - cloudpickle=1.2.2=py_0 + - colorama=0.4.1=py27_0 + - configobj=5.0.6=py27_1 + - cryptography=2.5=py27hc64555f_1 + - cycler=0.10.0=py27h59acbbf_0 + - cython=0.29.13=py27hc56fc5f_0 + - cytoolz=0.10.0=py27h0c8e037_0 + - dask=1.1.4=py27_1 + - dask-core=1.1.4=py27_1 + - decorator=4.4.1=py_0 + - distributed=1.26.0=py27_0 + - enaml=0.10.4=py27hdc96acc_0 + - enum34=1.1.6=py27_1 + - envisage=4.8.0=py_0 + - ephem=3.7.7.0=py27h0c8e037_0 + - freetype=2.8=hea645e0_1 + - future=0.17.1=py27_0 + - futures=3.3.0=py27_0 + - git=2.23.0=h6bb4b03_0 + - heapdict=1.0.1=py_0 + - icc_rt=2019.0.0=h0cc432a_1 + - icu=58.2=h2aa20d9_1 + - idna=2.8=py27_0 + - imageio=2.6.1=py27_0 + - intel-openmp=2019.4=245 + - ipaddress=1.0.23=py_0 + - ipython=5.8.0=py27_0 + - ipython_genutils=0.2.0=py27hbe997df_0 + - jinja2=2.10.3=py_0 + - joblib=0.13.2=py27_0 + - jpeg=9b=ha175dff_2 + - kiwisolver=1.1.0=py27hc56fc5f_0 + - libpng=1.6.37=h7a46e7a_0 + - libsodium=1.0.16=h8b3e59e_0 + - libtiff=4.0.10=h1c3b264_2 + - libwebp=0.5.2=7 + - locket=0.2.0=py27h1ca288a_1 + - markupsafe=1.1.1=py27h0c8e037_0 + - matplotlib=1.5.1=np111py27_0 + - mayavi=4.5.0=py27_0 + - mercurial=4.8.2=py27h0c8e037_0 + - mkl=2019.4=245 + - msgpack-python=0.6.1=py27hdc96acc_1 + - networkx=2.2=py27_1 + - numpy=1.11.3=py27h239e66a_12 + - numpy-base=1.11.3=py27hb1d0314_12 + - olefile=0.46=py27_0 + - opencv=3.2.0=np111py27_204 + - openssl=1.0.2t=vc9h3cc03e0_0 + - packaging=19.2=py_0 + - pandas=0.19.2=np111py27_1 + - paramiko=2.6.0=py27_0 + - partd=1.0.0=py_0 + - pathlib2=2.3.5=py27_0 + - pickleshare=0.7.5=py27_0 + - pillow=4.2.1=py27hbae4915_0 + - pip=19.3.1=py27_0 + - ply=3.11=py27_0 + - prompt_toolkit=1.0.15=py27h3a8ec6a_0 + - psutil=5.6.3=py27h0c8e037_0 + - pycparser=2.19=py27_0 + - pyface=6.1.2=py27_0 + - pyfirmata=1.1.0=py_0 + - pygments=2.4.2=py_0 + - pynacl=1.3.0=py27h3cc03e0_0 + - pyopengl=3.1.1a1=py27_0 + - pyopenssl=19.0.0=py27_0 + - pyparsing=2.4.2=py_0 + - pyqt=4.10.4=py27_1 + - pyqtgraph=0.10.0=py27h28b3542_3 + - pyserial=3.4=py27_0 + - pyside=1.2.1=py27_0 + - pysocks=1.7.1=py27_0 + - python=2.7.17=h930f6bb_0 + - python-dateutil=2.8.0=py27_0 + - pytz=2019.3=py_0 + - pywavelets=1.0.3=py27hc997a72_1 + - pyyaml=5.1.2=py27h0c8e037_0 + - pyzmq=18.1.0=py27hc56fc5f_0 + - qt=5.6.2=vc9hc26998b_12 + - qtpy=1.9.0=py_0 + - requests=2.22.0=py27_0 + - scandir=1.10.0=py27h0c8e037_0 + - scikit-image=0.13.1=py27h0c8e037_1 + - scikit-learn=0.20.3=py27hf381715_0 + - scipy=1.2.1=py27h4c3ab11_0 + - setuptools=41.6.0=py27_0 + - simplegeneric=0.8.1=py27_2 + - singledispatch=3.4.0.3=py27h3f9d112_0 + - six=1.12.0=py27_0 + - sortedcontainers=2.1.0=py27_0 + - soupsieve=1.9.3=py27_0 + - sqlite=3.30.1=h0c8e037_0 + - ssl_match_hostname=3.7.0.1=py27_0 + - tblib=1.4.0=py_0 + - tk=8.5.18=vc9_0 + - toolz=0.10.0=py_0 + - tornado=4.5.3=py27_0 + - traitlets=4.3.3=py27_0 + - traits=5.1.2=py27h0c8e037_0 + - traitsui=6.1.3=py_0 + - urllib3=1.24.2=py27_0 + - vc=9=h7299396_1 + - vs2008_runtime=9.00.30729.1=hfaea7d5_1 + - vtk=6.3.0=py27_1 + - wcwidth=0.1.7=py27hb1a0d82_0 + - wheel=0.33.6=py27_0 + - win_inet_pton=1.1.0=py27_0 + - win_unicode_console=0.5=py27hc037021_0 + - wincertstore=0.2=py27hf04cefb_0 + - xz=5.2.4=h3cc03e0_4 + - yaml=0.1.7=h3e6d941_2 + - zeromq=4.3.1=h2880e7c_3 + - zict=1.0.0=py_0 + - zlib=1.2.11=h3cc03e0_3 + - zstd=1.3.7=h1b0e4d7_0 + - pip: + - astropy==2.0.16 + - atomicwrites==1.3.0 + - attrs==19.3.0 + - funcsigs==1.0.2 + - more-itertools==5.0.0 + - nose==1.3.7 + - pluggy==0.7.1 + - py==1.8.0 +# - pyfisheye==0.0.1 + - pymap3d==1.1.1 + - pytest==3.6.4 +# - traits-enaml==0.3.0.dev0 +prefix: C:\ProgramData\Anaconda2\envs\cn_client_1 + diff --git a/particle_spectrometer/visualize_map.py b/particle_spectrometer/visualize_map.py index 158377e..2b6e99f 100644 --- a/particle_spectrometer/visualize_map.py +++ b/particle_spectrometer/visualize_map.py @@ -69,8 +69,8 @@ def load_path(flight_path, lat0=32.775776, lon0=35.024963, alt0=229): def loadMapData(): """Load height data for map visualization.""" - path1 = r"..\data\reconstructions\N32E034.hgt" - path2 = r"..\data\reconstructions\N32E035.hgt" + path1 = os.path.abspath(os.path.join(r'..', r'data', r'reconstructions', r'N32E034.hgt')) + path2 = os.path.abspath(os.path.join(r'..', r'data', r'reconstructions', r'N32E035.hgt')) with open(path1) as hgt_data: hgt1 = np.fromfile(hgt_data, np.dtype('>i2')).reshape((1201, 1201))[:1200, :1200] with open(path2) as hgt_data: diff --git a/playground/atom_pyqtgraph.py b/playground/atom_pyqtgraph.py index 1cfb946..bf6ec14 100644 --- a/playground/atom_pyqtgraph.py +++ b/playground/atom_pyqtgraph.py @@ -34,6 +34,8 @@ ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## # -*- coding: utf-8 -*- +from __future__ import print_function + """ Created on Sat Jul 20 14:27:48 2013 @@ -80,12 +82,12 @@ def mouseReleaseEvent(self, ev): super(MyGLViewWidget, self).mouseReleaseEvent(ev) if self._downpos == ev.pos(): if ev.button() == 2: - print 'show context menu' + print('show context menu') elif ev.button() == 1: x = ev.pos().x() - self.width() / 2 y = ev.pos().y() - self.height() / 2 self.pan(-x, -y, 0, relative=True) - print self.opts['center'] + print(self.opts['center']) self._prev_zoom_pos = None self._prev_pan_pos = None self.sigUpdate.emit() diff --git a/scripts/clean_memory.py b/scripts/clean_memory.py index 6e33afa..1a56580 100755 --- a/scripts/clean_memory.py +++ b/scripts/clean_memory.py @@ -37,7 +37,7 @@ """ Clean memory of the odroid. -The script moves cpatured date to a backup folder. To remove +The script moves captured date to a backup folder. To remove the backup folder (and clear the memory) use the ``--delete`` flag. """ diff --git a/scripts/drone_camera.py b/scripts/drone_camera.py index 4deea5f..d2aad7b 100644 --- a/scripts/drone_camera.py +++ b/scripts/drone_camera.py @@ -35,7 +35,7 @@ ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## """ -This script statrts the camera of the drone in a loop. +This script starts the camera of the drone in a loop. """ #from droneapi.lib import VehicleMode diff --git a/scripts/listusb.sh b/scripts/listusb.sh index c018e70..221fef9 100644 --- a/scripts/listusb.sh +++ b/scripts/listusb.sh @@ -38,9 +38,9 @@ for sysdevpath in $(find /sys/bus/usb/devices/usb*/ -name dev); do ( syspath="${sysdevpath%/dev}" - devname="$(udevadm info -q name -p $syspath)" + devname="$(udevadm info -q name -p "$syspath")" [[ "$devname" == "bus/"* ]] && continue - eval "$(udevadm info -q property --export -p $syspath)" + eval "$(udevadm info -q property --export -p "$syspath")" [[ -z "$ID_SERIAL" ]] && continue echo "/dev/$devname - $ID_SERIAL" ) diff --git a/scripts/start_server.py b/scripts/start_server.py index 7646630..95f8a45 100755 --- a/scripts/start_server.py +++ b/scripts/start_server.py @@ -35,8 +35,8 @@ ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## """ -This script statrts the camera activity. -Should be run at startup of the raspberry Pi. +This script starts the camera activity. +Should be run at startup of the raspberry Pi / Odroid XU4. """ import argparse @@ -101,8 +101,8 @@ def main(): # # Setup. # Note: - # The controller is intialized first, for some reasons: - # - Initialize and get camera infor. + # The controller is initialized first, for some reasons: + # - Initialize and get camera info. # - Pass a pointer to the controller to the server. # controller = Controller(offline=offline, local_path=args.local_path) @@ -138,7 +138,8 @@ def main(): logging.exception('Failed starting the camera. Rebooting.') logging.shutdown() time.sleep(120) - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging except Exception as e: # # Failed starting the camera, might be some USB problem. @@ -146,10 +147,11 @@ def main(): # I delay the reboot so that the tunnel will stay open and # enable debugging. # - logging.exception('Unkown error:\n{}'.format(repr(e))) + logging.exception('Rebooting. Unknown error:\n{}'.format(repr(e))) logging.shutdown() time.sleep(120) - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging if __name__ == '__main__': @@ -178,7 +180,8 @@ def main(): logging.exception('Failed starting the camera. Rebooting.') logging.shutdown() time.sleep(120) - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging except Exception as e: # # Failed starting the camera, might be some USB problem. @@ -186,7 +189,8 @@ def main(): # I delay the reboot so that the tunnel will stay open and # enable debugging. # - logging.exception('Unkown error:\n{}'.format(repr(e))) + logging.exception('Rebooting. Unknown error:\n{}'.format(repr(e))) logging.shutdown() time.sleep(120) - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging diff --git a/scripts/start_tunnel.py b/scripts/start_tunnel.py index 8d150ab..b5cc593 100644 --- a/scripts/start_tunnel.py +++ b/scripts/start_tunnel.py @@ -59,7 +59,7 @@ def main(): help='Skip starting the default tunnel') parser.add_argument( '--log_level', - default='INFO', + default='DEBUG', help='Set the log level (possible values: info, debug, ...)') args = parser.parse_args() @@ -109,7 +109,8 @@ def main(): if failures_cnt > camera_settings[gs.INTERNET_FAILURE_THRESH]: logging.error('Failed to connect 3G modem. Will reboot...') - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging logging.error( 'Failed to retrieve proxy parameters. will sleep and try again later.') @@ -148,7 +149,8 @@ def main(): logging.debug('Internet watchdog: failure number: %d.' % failures_cnt) if failures_cnt > camera_settings[gs.INTERNET_FAILURE_THRESH]: logging.error('Failed to connect 3G modem. Will reboot...') - os.system('sudo reboot') + # TODO Long term fix + os.system('sync; sudo reboot -f') # Changed from 'sudo reboot', workaround for reboot hanging else: logging.debug('Internet watchdog: succeed.') failures_cnt = 0 diff --git a/scripts_calibrate/analyze_vignetting_measurements.py b/scripts_calibrate/analyze_vignetting_measurements.py index 02c631a..bc14b58 100644 --- a/scripts_calibrate/analyze_vignetting_measurements.py +++ b/scripts_calibrate/analyze_vignetting_measurements.py @@ -33,131 +33,133 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -from __future__ import division -import numpy as np -from numpy.polynomial import polynomial -from mayavi import mlab -import matplotlib.pyplot as plt -import cv2 -import time -from mpl_toolkits.mplot3d import Axes3D -import scipy.interpolate as interp -from skimage.morphology import convex_hull_image -import fisheye -from CameraNetwork.image_utils import FisheyeProxy, Normalization -import cPickle -import os -from sklearn import linear_model -from sklearn.neighbors import KNeighborsRegressor -from sklearn.pipeline import make_pipeline -from sklearn.preprocessing import PolynomialFeatures -import math -import itertools - -COLORS = ('blue', 'green', 'red') -COLOR_INDICES = {'blue': 2, 'green': 1, 'red': 0} - - -def skFit(base_path, x, y, z, img_shape): - model = make_pipeline( - PolynomialFeatures(2), - linear_model.RANSACRegressor(random_state=0, residual_threshold=5) - ) - - # - # Interpolate a second order polynomial - # - X = np.hstack([coord.reshape(-1, 1) for coord in (x, y)]) - model.fit(X, z) - - # - # Visualize the error - # - ygrid, xgrid = np.mgrid[0:img_shape[0]:10, 0:img_shape[1]:10] - grid = np.hstack([coord.reshape(-1, 1) for coord in (xgrid, ygrid)]) - - zgrid = model.predict(grid).reshape(ygrid.shape) - - fig = plt.figure() - ax = fig.add_subplot(111, projection='3d') - ax.plot_surface(xgrid, ygrid, zgrid) - ax.scatter(x, y, z) - - - z_estim = model.predict(X).reshape(y.shape) - fig = plt.figure() - ax = fig.add_subplot(111, projection='3d') - ax.scatter(x, y, z-z_estim) - - plt.show() - - # - # Visualize the error in the normalized image. - # - z_err = np.abs(z-z_estim) - - img_out = np.ones(shape=img_shape) * 100 - - for threshold in np.logspace(-2, 2, 20)[::-1]: - print threshold - img_tmp = np.zeros(shape=img_shape) - indices = z_err < threshold - if not np.any(indices): - break - img_tmp[y[indices], x[indices]] = threshold - chull = convex_hull_image(img_tmp) - - img_out[chull] = threshold - - plt.figure() - plt.imshow(img_out) - plt.colorbar() - - fe = fisheye.load_model(os.path.join(base_path, 'fisheye.pkl')) - normalization = Normalization(1001, FisheyeProxy(fe)) - img_normalized = normalization.normalize(img_out) - - plt.figure() - plt.imshow(img_normalized) - plt.colorbar() - plt.show() - - -def main(base_path): - # - # Load the measurements - # - base_path1 = r'vignetting_calibration\4102820388' - - color = 'blue' - color_index = COLOR_INDICES[color] - - with open(os.path.join(base_path1, 'measurements_{}.pkl'.format(color)), 'rb') as f: - measurements1 = cPickle.load(f) - with open(os.path.join(base_path1, 'spec_{}.pkl'.format(color)), 'rb') as f: - spec1 = cPickle.load(f) - - x1, y1, z1 = [np.array(a) for a in zip(*measurements1)] - - plt.figure() - for c in COLORS: - plt.plot(spec1[0], spec1[1], label='camera1') - plt.legend() - plt.show() - - for c in COLORS: - mlab.figure(bgcolor=(1, 1, 1), ) - mlab.points3d(x1, y1, z1[..., COLOR_INDICES[c]], mode='sphere', scale_mode='none', scale_factor=5, color=(0, 0, 1)) - mlab.outline(color=(0, 0, 0), extent=(0, 1600, 0, 1200, 0, 255)) - mlab.title(c, color=(0, 0, 0)) - mlab.show() - - img_rgb = np.zeros(shape=(1200, 1600, 3)) - for x, y, val in measurements1: - img_rgb[y, x, ...] = val - - img = img_rgb[..., 2][1::2, 1::2] - y, x = np.nonzero(img) - z = img[np.nonzero(img)] - - skFit(base_path, x, y, z, img.shape) + +from __future__ import print_function +from __future__ import division +import numpy as np +from numpy.polynomial import polynomial +from mayavi import mlab +import matplotlib.pyplot as plt +import cv2 +import time +from mpl_toolkits.mplot3d import Axes3D +import scipy.interpolate as interp +from skimage.morphology import convex_hull_image +import fisheye +from CameraNetwork.image_utils import FisheyeProxy, Normalization +import cPickle +import os +from sklearn import linear_model +from sklearn.neighbors import KNeighborsRegressor +from sklearn.pipeline import make_pipeline +from sklearn.preprocessing import PolynomialFeatures +import math +import itertools + +COLORS = ('blue', 'green', 'red') +COLOR_INDICES = {'blue': 2, 'green': 1, 'red': 0} + + +def skFit(base_path, x, y, z, img_shape): + model = make_pipeline( + PolynomialFeatures(2), + linear_model.RANSACRegressor(random_state=0, residual_threshold=5) + ) + + # + # Interpolate a second order polynomial + # + X = np.hstack([coord.reshape(-1, 1) for coord in (x, y)]) + model.fit(X, z) + + # + # Visualize the error + # + ygrid, xgrid = np.mgrid[0:img_shape[0]:10, 0:img_shape[1]:10] + grid = np.hstack([coord.reshape(-1, 1) for coord in (xgrid, ygrid)]) + + zgrid = model.predict(grid).reshape(ygrid.shape) + + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + ax.plot_surface(xgrid, ygrid, zgrid) + ax.scatter(x, y, z) + + + z_estim = model.predict(X).reshape(y.shape) + fig = plt.figure() + ax = fig.add_subplot(111, projection='3d') + ax.scatter(x, y, z-z_estim) + + plt.show() + + # + # Visualize the error in the normalized image. + # + z_err = np.abs(z-z_estim) + + img_out = np.ones(shape=img_shape) * 100 + + for threshold in np.logspace(-2, 2, 20)[::-1]: + print(threshold) + img_tmp = np.zeros(shape=img_shape) + indices = z_err < threshold + if not np.any(indices): + break + img_tmp[y[indices], x[indices]] = threshold + chull = convex_hull_image(img_tmp) + + img_out[chull] = threshold + + plt.figure() + plt.imshow(img_out) + plt.colorbar() + + fe = fisheye.load_model(os.path.join(base_path, 'fisheye.pkl')) + normalization = Normalization(1001, FisheyeProxy(fe)) + img_normalized = normalization.normalize(img_out) + + plt.figure() + plt.imshow(img_normalized) + plt.colorbar() + plt.show() + + +def main(base_path): + # + # Load the measurements + # + base_path1 = r'vignetting_calibration\4102820388' + + color = 'blue' + color_index = COLOR_INDICES[color] + + with open(os.path.join(base_path1, 'measurements_{}.pkl'.format(color)), 'rb') as f: + measurements1 = cPickle.load(f) + with open(os.path.join(base_path1, 'spec_{}.pkl'.format(color)), 'rb') as f: + spec1 = cPickle.load(f) + + x1, y1, z1 = [np.array(a) for a in zip(*measurements1)] + + plt.figure() + for c in COLORS: + plt.plot(spec1[0], spec1[1], label='camera1') + plt.legend() + plt.show() + + for c in COLORS: + mlab.figure(bgcolor=(1, 1, 1), ) + mlab.points3d(x1, y1, z1[..., COLOR_INDICES[c]], mode='sphere', scale_mode='none', scale_factor=5, color=(0, 0, 1)) + mlab.outline(color=(0, 0, 0), extent=(0, 1600, 0, 1200, 0, 255)) + mlab.title(c, color=(0, 0, 0)) + mlab.show() + + img_rgb = np.zeros(shape=(1200, 1600, 3)) + for x, y, val in measurements1: + img_rgb[y, x, ...] = val + + img = img_rgb[..., 2][1::2, 1::2] + y, x = np.nonzero(img) + z = img[np.nonzero(img)] + + skFit(base_path, x, y, z, img.shape) diff --git a/scripts_calibrate/calibrate_camera.py b/scripts_calibrate/calibrate_camera.py index c0a2f0d..2b84b1b 100644 --- a/scripts_calibrate/calibrate_camera.py +++ b/scripts_calibrate/calibrate_camera.py @@ -33,6 +33,7 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## + """Intrinsic and Vignetting calibration This scripts does calibration of the camera. There are three steps: @@ -44,6 +45,7 @@ the camera serial number. After successful run, the results should be added, commited and pushed into the repository. """ +from __future__ import print_function from __future__ import division import CameraNetwork from CameraNetwork.calibration import VignettingCalibration @@ -203,7 +205,7 @@ def main(): img, _, _ = cam.capture(settings, frames_num=1) # - # Save image for debuging the calibration process. + # Save image for debugging the calibration process. # cv2.imwrite( os.path.join(results_path, 'geometric', 'img_{:03}.jpg'.format(img_index)), @@ -334,12 +336,12 @@ def main(): try: measurement = findSpot(np.clip(img-black_img, 0, 255)) except: - print 'FAIL' - print traceback.format_exc() + print('FAIL') + print(traceback.format_exc()) measurement = None, None, None measurements.append(measurement) - print measurement + print(measurement) # # Store the measurement image. diff --git a/scripts_client/Sessions/Default%20Settings b/scripts_client/Sessions/Default%20Settings index 237a709..e9a060b 100644 --- a/scripts_client/Sessions/Default%20Settings +++ b/scripts_client/Sessions/Default%20Settings @@ -3,11 +3,7 @@ SCPAutoPwd\0\ ACSinUTF\0\ Comment\\ CtrlTabSwitch\0\ -<<<<<<< HEAD -Password\6b2p3\ -======= -Password\4v131\ ->>>>>>> bitbucket/master +Password\v22n3\ ForegroundOnBell\0\ SaveWindowPos\0\ WindowState\0\ diff --git a/scripts_client/camera_client.py b/scripts_client/camera_client.py index 21f302e..aa4d7e5 100644 --- a/scripts_client/camera_client.py +++ b/scripts_client/camera_client.py @@ -38,6 +38,7 @@ A GUI client allows easy access to cameras thier settings and their measurements. """ + from __future__ import division import pickle @@ -76,7 +77,8 @@ def main(local_mode, log_level): if __name__ == '__main__': parser = argparse.ArgumentParser(description='Start the camera client application') parser.add_argument('--local', action='store_true', help='Run in local mode.') - parser.add_argument('--log_level', default='INFO', help='Set the log level (possible values: info, debug, ...)') + parser.add_argument('--log_level', default='debug', help='Set the log level (possible values: info, debug, ...)') + # parser.add_argument('--log_level', default='INFO', help='Set the log level (possible values: info, debug, ...)') args = parser.parse_args() main(args.local, args.log_level) \ No newline at end of file diff --git a/scripts_client/client_logs/cameralog_190920_165337.txt b/scripts_client/client_logs/cameralog_190920_165337.txt new file mode 100644 index 0000000..17ce641 --- /dev/null +++ b/scripts_client/client_logs/cameralog_190920_165337.txt @@ -0,0 +1,2 @@ +2019-09-20 16:53:37,372 [MainThread ] [DEBUG] Proxy parameters: +{u'tunnel_port': 20000, u'autossh_monitor_port': 10000, u'ip': u'3.123.49.101', u'proxy_port': 1980, u'client_port': 1981, u'hb_port': 1985, u'user': u'ubuntu'} diff --git a/scripts_client/start_local.py b/scripts_client/start_local.py index 07ac699..a50e020 100644 --- a/scripts_client/start_local.py +++ b/scripts_client/start_local.py @@ -33,63 +33,63 @@ ## LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE ## OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.## -"""Start Cameras, Proxy and Client locally. - -This script will run cameras, a proxy and a client locally on a -single computer. It is helpfull for replaying saved images. -Given a folder where the camera's data was saved, it will -start the right number of cameras and point their home -folder to the corresponding stored data. -""" - -from __future__ import division, absolute_import, print_function -import argparse -import CameraNetwork -from glob import glob -import os -import subprocess as sbp - - -def main(base_path, debug_mode=False, local_proxy=False): - camera_paths = sorted(glob(os.path.join(base_path, '*'))) - camera_paths = filter(lambda p: os.path.isdir(p), camera_paths) - - - # - # Start the proxy. - # - if local_proxy: - proxy = sbp.Popen(['python'] + ['../scripts_proxy/start_proxy.py']) - - # - # Start the client. - # - if not debug_mode: - client = sbp.Popen(['python'] + - ['../scripts_client/camera_client.py', '--local']) - - # - # Start all cameras. - # - servers = [] - for path in camera_paths: - servers.append(sbp.Popen(['python'] + - ['../scripts/start_server.py','--local_path', path] + (["--local_proxy"] if local_proxy else []))) - - - for server in servers: - server.wait() - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Start a local session.') - parser.add_argument( - '--debug_mode', '-d', action='store_true', - help="Do not start the client. The client will be started from a debugger.") - parser.add_argument( - '--local_proxy', '-l', action='store_true', - help="Start a local proxy.") - parser.add_argument('base_path', help='Base path of cameras data.') - args = parser.parse_args() - - main(args.base_path, args.debug_mode, args.local_proxy) +"""Start Cameras, Proxy and Client locally. + +This script will run cameras, a proxy and a client locally on a +single computer. It is helpfull for replaying saved images. +Given a folder where the camera's data was saved, it will +start the right number of cameras and point their home +folder to the corresponding stored data. +""" + +from __future__ import division, absolute_import, print_function +import argparse +import CameraNetwork +from glob import glob +import os +import subprocess as sbp + + +def main(base_path, debug_mode=False, local_proxy=False): + camera_paths = sorted(glob(os.path.join(base_path, '*'))) + camera_paths = filter(lambda p: os.path.isdir(p), camera_paths) + + # + # Start the proxy. + # + if local_proxy: + proxy = sbp.Popen(['python'] + ['../scripts_proxy/start_proxy.py']) + + # + # Start the client. + # + if not debug_mode: + client = sbp.Popen(['python'] + + ['../scripts_client/camera_client.py', '--local']) + + # + # Start all cameras. + # + servers = [] + for path in camera_paths: + + servers.append(sbp.Popen(['python'] + + ['../scripts/start_server.py', '--local_path', path] + + (["--local_proxy"] if local_proxy else []))) + + for server in servers: + server.wait() + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Start a local session.') + parser.add_argument( + '--debug_mode', '-d', action='store_true', + help="Do not start the client. The client will be started from a debugger.") + parser.add_argument( + '--local_proxy', '-l', action='store_true', + help="Start a local proxy.") + parser.add_argument('base_path', help='Base path of cameras data.') + args = parser.parse_args() + + main(args.base_path, args.debug_mode, args.local_proxy) diff --git a/windows_setup_instructions b/windows_setup_instructions new file mode 100644 index 0000000..d60c0b9 --- /dev/null +++ b/windows_setup_instructions @@ -0,0 +1,7 @@ +conda config --env --set restore_free_channel true +conda config --env --append channels conda-forge +conda env create -f env2021_full.yaml +cond activate cn_client_1 +python -m pip install pymap3d==1.1.1 git+https://github.com/enthought/traits-enaml.git@update-data-frame-table +python setup.py develop --user # in pyfisheye folder +python setup.py develop --user # in cameranetwork folder