diff --git a/docs/_build/html/_sources/user_guide/defaults.rst.txt b/docs/_build/html/_sources/user_guide/defaults.rst.txt
index fc5e2a8..fbf7b0e 100644
--- a/docs/_build/html/_sources/user_guide/defaults.rst.txt
+++ b/docs/_build/html/_sources/user_guide/defaults.rst.txt
@@ -33,7 +33,7 @@ The default is
return_as = "xarray"
[nearesttime]
- within = "1H"
+ within = "1h"
return_as = "xarray"
The ``[default]`` section are global settings used by each download method. These can be overwritten for each method. For instance, *s3_refresh* is set to false for ``[timerange]`` because it's unlikely you will need to refresh the file listing. Also, ``[latest]`` and ``[nearesttime]`` are by default returned as an xarray object instead of a list of files downloaded.
diff --git a/docs/user_guide/notebooks/DEMO_download_goes_single_point_timerange.ipynb b/docs/user_guide/notebooks/DEMO_download_goes_single_point_timerange.ipynb
index c37a019..6450acf 100644
--- a/docs/user_guide/notebooks/DEMO_download_goes_single_point_timerange.ipynb
+++ b/docs/user_guide/notebooks/DEMO_download_goes_single_point_timerange.ipynb
@@ -30,10 +30,7 @@
"source": [
"import sys\n",
"sys.path.append(\"../../../\")\n",
- "from goes2go.data import goes_single_point_timerange\n",
- "\n",
- "from datetime import datetime\n",
- "import pandas as pd"
+ "from goes2go.data import goes_single_point_timerange"
]
},
{
diff --git a/goes2go/NEW.py b/goes2go/NEW.py
index ea00adc..a1f0e8e 100644
--- a/goes2go/NEW.py
+++ b/goes2go/NEW.py
@@ -7,15 +7,12 @@
==========
"""
-import itertools
import logging
import re
from pathlib import Path
-import numpy as np
import pandas as pd
import s3fs
-import toml
from goes2go import config
from goes2go.data import _goes_file_df, goes_latest, goes_nearesttime, goes_timerange, goes_single_point_timerange
diff --git a/goes2go/__init__.py b/goes2go/__init__.py
index 46e6ede..813fe33 100644
--- a/goes2go/__init__.py
+++ b/goes2go/__init__.py
@@ -24,7 +24,7 @@
# TODO: Move some of the tools.py to these accessors.
try:
import goes2go.accessors
-except:
+except Exception:
warnings.warn("goes2go xarray accessors could not be imported.")
@@ -80,7 +80,7 @@ def _expand(self):
return_as = "xarray"
["nearesttime"]
-within = "1H"
+within = "1h"
return_as = "xarray"
"""
@@ -89,7 +89,7 @@ def _expand(self):
try:
# Load the goes2go config file
config = toml.load(_config_file)
-except:
+except Exception:
try:
# Create the goes2go config file
_config_path.mkdir(parents=True, exist_ok=True)
@@ -106,7 +106,7 @@ def _expand(self):
# Load the new goes2go config file
config = toml.load(_config_file)
- except (FileNotFoundError, PermissionError, IOError):
+ except (OSError, FileNotFoundError, PermissionError):
print(
f" ╭─goes2go─────────────────────────────────────────────╮\n"
f" │ WARNING: Unable to create config file │\n"
diff --git a/goes2go/accessors.py b/goes2go/accessors.py
index f7b8a76..b586123 100644
--- a/goes2go/accessors.py
+++ b/goes2go/accessors.py
@@ -3,7 +3,7 @@
"""
===========
-RGB Recipes
+RGB Recipes.
===========
RGB Recipes for the GOES Advanced Baseline Imager.
@@ -118,7 +118,7 @@ def crs(self):
)
sat_height = ds.goes_imager_projection.perspective_point_height
nadir_lon = ds.geospatial_lat_lon_extent.geospatial_lon_nadir
- nadir_lat = ds.geospatial_lat_lon_extent.geospatial_lat_nadir
+ # nadir_lat = ds.geospatial_lat_lon_extent.geospatial_lat_nadir
elif ds.cdm_data_type == "Point":
globe_kwargs = dict(
semimajor_axis=ds.goes_lat_lon_projection.semi_major_axis,
@@ -127,7 +127,7 @@ def crs(self):
)
sat_height = ds.nominal_satellite_height.item() * 1000
nadir_lon = ds.lon_field_of_view.item()
- nadir_lat = ds.lat_field_of_view.item()
+ # nadir_lat = ds.lat_field_of_view.item()
# Create a cartopy coordinate reference system (crs)
globe = ccrs.Globe(ellipse=None, **globe_kwargs)
@@ -258,9 +258,8 @@ def domain(self):
shapely.Polygon
"""
ds = self._obj
- assert ds.title.startswith(
- "ABI"
- ), "Domain polygon only available for ABI CONUS and Mesoscale files."
+ if not ds.title.startswith("ABI"):
+ raise ValueError("Domain polygon only available for ABI CONUS and Mesoscale files.")
sat_height = ds.goes_imager_projection.perspective_point_height
# Trim out domain FOV from the full disk (this is necessary for GOES-16).
dom_border = np.array(
@@ -280,9 +279,8 @@ class rgbAccessor:
def __init__(self, xarray_obj):
self._obj = xarray_obj
- assert (
- self._obj.title == "ABI L2 Cloud and Moisture Imagery"
- ), "Dataset must be an ABI L2 Cloud and Moisture Imagery file."
+ if not self._obj.title == "ABI L2 Cloud and Moisture Imagery":
+ raise ValueError("Dataset must be an ABI L2 Cloud and Moisture Imagery file.")
self._crs = None
self._x = None
self._y = None
diff --git a/goes2go/data.py b/goes2go/data.py
index 5cd228f..0c246ea 100644
--- a/goes2go/data.py
+++ b/goes2go/data.py
@@ -3,8 +3,9 @@
"""
=============
-Retrieve Data
+Retrieve Data.
=============
+
Download and read data from the R-series Geostationary Operational
Environmental Satellite data.
@@ -16,12 +17,11 @@
"""
import multiprocessing
-from concurrent.futures import ThreadPoolExecutor, as_completed, wait
+from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime, timedelta
from functools import partial
from pathlib import Path
-import sys
import numpy as np
import pandas as pd
import s3fs
@@ -73,7 +73,7 @@ def _check_param_inputs(**params):
satellite = params["satellite"]
domain = params["domain"]
product = params["product"]
- verbose = params["verbose"]
+ # verbose = params["verbose"]
## Determine the Satellite
if satellite not in _satellite:
@@ -81,9 +81,8 @@ def _check_param_inputs(**params):
for key, aliases in _satellite.items():
if satellite in aliases:
satellite = key
- assert (
- satellite in _satellite
- ), f"satellite must be one of {list(_satellite.keys())} or an alias {list(_satellite.values())}"
+ if satellite not in _satellite:
+ raise ValueError(f"satellite must be one of {list(_satellite.keys())} or an alias {list(_satellite.values())}")
## Determine the Domain (only needed for ABI product)
if product.upper().startswith("ABI"):
@@ -99,9 +98,8 @@ def _check_param_inputs(**params):
if domain in aliases:
domain = key
product = product + domain
- assert (
- (domain in _domain) or (domain in ["M1", "M2"])
- ), f"domain must be one of {list(_domain.keys())} or an alias {list(_domain.values())}"
+ if (domain not in _domain) and (domain not in ["M1", "M2"]):
+ raise ValueError(f"domain must be one of {list(_domain.keys())} or an alias {list(_domain.values())}")
else:
domain = None
@@ -110,9 +108,8 @@ def _check_param_inputs(**params):
for key, aliases in _product.items():
if product.upper() in aliases:
product = key
- assert (
- product in _product
- ), f"product must be one of {list(_product .keys())} or an alias {list(_product .values())}"
+ if product not in _product:
+ raise ValueError(f"product must be one of {list(_product .keys())} or an alias {list(_product .values())}")
return satellite, product, domain
@@ -162,7 +159,7 @@ def _goes_file_df(satellite, product, start, end, bands=None, refresh=True):
df["mode"] = mode_bands[0].str[1:].astype(int)
try:
df["band"] = mode_bands[1].astype(int)
- except:
+ except Exception: # TODO: Specify specific expected exception(s)
# No channel data
df["band"] = None
@@ -220,7 +217,6 @@ def do_download(src):
def _as_xarray_MP(src, save_dir, i=None, n=None, verbose=True):
"""Open a file as a xarray.Dataset -- a multiprocessing helper."""
-
# File destination
local_copy = Path(save_dir) / src
@@ -387,7 +383,7 @@ def goes_timerange(
start = pd.to_datetime(start)
if isinstance(end, str):
end = pd.to_datetime(end)
- # If `recent` is a string (like recent='1H'), parse with Pandas
+ # If `recent` is a string (like recent='1h'), parse with Pandas
if isinstance(recent, str):
recent = pd.to_timedelta(recent)
@@ -399,14 +395,14 @@ def goes_timerange(
check1 = start is not None and end is not None
check2 = recent is not None
- assert check1 or check2, "🤔 `start` and `end` *or* `recent` is required"
-
+ if not (check1 or check2):
+ raise ValueError("🤔 `start` and `end` *or* `recent` is required")
if check1:
- assert hasattr(start, "second") and hasattr(
- end, "second"
- ), "`start` and `end` must be a datetime object"
+ if not (hasattr(start, "second") and hasattr(end, "second")):
+ raise ValueError( "`start` and `end` must be a datetime object")
elif check2:
- assert hasattr(recent, "seconds"), "`recent` must be a timedelta object"
+ if not hasattr(recent, "seconds"):
+ raise ValueError("`recent` must be a timedelta object")
# Parameter Setup
# ---------------
@@ -429,7 +425,7 @@ def goes_timerange(
def _preprocess_single_point(ds, target_lat, target_lon, decimal_coordinates=True):
"""
- Preprocessing function to select only the single relevant data subset
+ Preprocessing function to select only the single relevant data subset.
Parameters
----------
@@ -524,7 +520,7 @@ def goes_single_point_timerange(
start = pd.to_datetime(start)
if isinstance(end, str):
end = pd.to_datetime(end)
- # If `recent` is a string (like recent='1H'), parse with Pandas
+ # If `recent` is a string (like recent='1h'), parse with Pandas
if isinstance(recent, str):
recent = pd.to_timedelta(recent)
@@ -536,14 +532,14 @@ def goes_single_point_timerange(
check1 = start is not None and end is not None
check2 = recent is not None
- assert check1 or check2, "🤔 `start` and `end` *or* `recent` is required"
-
+ if not (check1 or check2):
+ raise ValueError("🤔 `start` and `end` *or* `recent` is required")
if check1:
- assert hasattr(start, "second") and hasattr(
- end, "second"
- ), "`start` and `end` must be a datetime object"
+ if not (hasattr(start, "second") and hasattr(end, "second")):
+ raise ValueError( "`start` and `end` must be a datetime object")
elif check2:
- assert hasattr(recent, "seconds"), "`recent` must be a timedelta object"
+ if not hasattr(recent, "seconds"):
+ raise ValueError("`recent` must be a timedelta object")
# Parameter Setup
# ---------------
diff --git a/tests/test_abi.py b/tests/test_abi.py
index 8bbf566..24d662a 100644
--- a/tests/test_abi.py
+++ b/tests/test_abi.py
@@ -1,11 +1,9 @@
## Brian Blaylock
## October 18, 2021
-"""
-Some simple tests for the ABI data
-"""
+"""Some simple tests for the ABI data."""
-from goes2go.data import goes_nearesttime, goes_latest, goes_timerange
+from goes2go.data import goes_nearesttime, goes_latest
def test_nearesttime():
diff --git a/tests/test_data.py b/tests/test_data.py
index ff3ab18..91253a2 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -1,7 +1,6 @@
from datetime import datetime, timedelta
import unittest
from unittest.mock import patch
-from venv import create
import pandas as pd
diff --git a/tests/test_glm.py b/tests/test_glm.py
index 9cb560d..ade17de 100644
--- a/tests/test_glm.py
+++ b/tests/test_glm.py
@@ -1,11 +1,9 @@
## Brian Blaylock
## October 18, 2021
-"""
-Some simple tests for the GLM data
-"""
+"""Some simple tests for the GLM data."""
-from goes2go.data import goes_latest, goes_nearesttime, goes_timerange
+from goes2go.data import goes_latest, goes_nearesttime
def test_nearesttime():