diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 2b6d7e88..9e9736bf 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -16,9 +16,9 @@ jobs: name: Documentation tests steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 45e17fae..2c599693 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -19,17 +19,17 @@ jobs: numpy_ver: "1.24" os: ubuntu-latest test_config: "NEP29" - - python-version: "3.6.8" - numpy_ver: "1.19.5" + - python-version: "3.9" + numpy_ver: "1.23.5" os: "ubuntu-20.04" test_config: "Ops" name: Python ${{ matrix.python-version }} on ${{ matrix.os }} with numpy ${{ matrix.numpy_ver }} runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -40,18 +40,8 @@ jobs: - name: Install Operational dependencies if: ${{ matrix.test_config == 'Ops'}} run: | - sudo apt-get install libhdf5-serial-dev netcdf-bin libnetcdf-dev pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} - pip install "cdflib<1.0" - pip install -r requirements.txt - pip install -r test_requirements.txt - cd .. - git clone https://github.com/pysat/pysat.git - cd pysat - git checkout pip_rc_install - python setup.py develop - cd ../pysatNASA - pip install . + pip install --upgrade-strategy only-if-needed .[test] - name: Install NEP29 dependencies if: ${{ matrix.test_config == 'NEP29'}} @@ -78,46 +68,20 @@ jobs: - name: Test with pytest run: pytest - - name: Coveralls Parallel (Ubuntu) - if: startsWith(matrix.os, 'ubuntu') - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - COVERALLS_PARALLEL: true - run: | - curl -sL https://coveralls.io/coveralls-linux.tar.gz | tar -xz - ./coveralls report --parallel --repo-token=${{ secrets.COVERALLS_REPO_TOKEN }} --build-number ${{ github.run_number }} - - - name: Coveralls Parallel (Windows) - if: startsWith(matrix.os, 'windows') - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - COVERALLS_PARALLEL: true - run: | - curl -L https://github.com/coverallsapp/coverage-reporter/releases/latest/download/coveralls-windows.exe -o coveralls.exe - ./coveralls.exe report --parallel --repo-token=${{ secrets.COVERALLS_REPO_TOKEN }} --build-number ${{ github.run_number }} - - - name: Coveralls Parallel (MacOS) - if: startsWith(matrix.os, 'macos') - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - COVERALLS_PARALLEL: true - run: | - brew tap coverallsapp/coveralls --quiet - brew install coveralls --quiet - ls -lh .coverage - ls -lh coverage.xml - coveralls report coverage.xml --format=cobertura --verbose --parallel --repo-token=${{ secrets.COVERALLS_REPO_TOKEN }} --build-number ${{ github.run_number }} --debug --dry-run + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2 + with: + flag-name: run-${{ join(matrix.*, '-') }} + parallel: true finish: name: Finish Coverage Analysis needs: build - if: always() - runs-on: "ubuntu-latest" + if: ${{ always() }} + runs-on: ubuntu-latest steps: - - name: Coveralls Finish - env: - COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} - COVERALLS_PARALLEL: true - run: | - curl -sL https://coveralls.io/coveralls-linux.tar.gz | tar -xz - ./coveralls done --build-number ${{ github.run_number }} + - name: Coveralls Finished + uses: coverallsapp/github-action@v2 + with: + parallel-finished: true + diff --git a/.github/workflows/pip_rc_install.yml b/.github/workflows/pip_rc_install.yml index cc60a7dc..e0502661 100644 --- a/.github/workflows/pip_rc_install.yml +++ b/.github/workflows/pip_rc_install.yml @@ -17,9 +17,9 @@ jobs: name: Python ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/pysat_rc.yml b/.github/workflows/pysat_rc.yml index 85afb492..db17f76c 100644 --- a/.github/workflows/pysat_rc.yml +++ b/.github/workflows/pysat_rc.yml @@ -18,9 +18,9 @@ jobs: name: Python ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -42,20 +42,20 @@ jobs: - name: Test with pytest run: pytest - - name: Publish results to coveralls - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_PARALLEL: true - run: coveralls --rcfile=pyproject.toml --service=github + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2 + with: + flag-name: run-${{ join(matrix.*, '-') }} + parallel: true finish: name: Finish Coverage Analysis needs: build + if: ${{ always() }} runs-on: ubuntu-latest steps: - name: Coveralls Finished - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - pip install --upgrade coveralls - coveralls --service=github --finish + uses: coverallsapp/github-action@v2 + with: + parallel-finished: true + diff --git a/.zenodo.json b/.zenodo.json index 433d6dbf..648769a0 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -5,6 +5,7 @@ "atmosphere", "thermosphere", "magnetosphere", + "mars", "heliosphere", "observations", "models", @@ -18,16 +19,16 @@ "name": "Klenzing, Jeff", "orcid": "0000-0001-8321-6074" }, - { - "affiliation": "Cosmic Studio", - "name": "Stoneback, Russell", - "orcid": "0000-0001-7216-4336" - }, { "affiliation": "U.S. Naval Research Laboratory", "name": "Burrell, Angeline G.", "orcid": "0000-0001-8875-9326" }, + { + "affiliation": "Cosmic Studio", + "name": "Stoneback, Russell", + "orcid": "0000-0001-7216-4336" + }, { "affiliation": "Catholic University of America, Goddard Space Flight Center", "name": "Smith, Jonathon", @@ -43,6 +44,11 @@ "name": "Esman, Teresa", "orcid": "0000-0003-0382-6281" }, + { + "affiliation": "Universities Space Research Association, Goddard Space Flight Center", + "name": "Govada, Aadarsh", + "orcid": "0009-0004-7873-5899" + }, { "affiliation": "Predictive Science", "name": "Pembroke, Asher" diff --git a/ACKNOWLEDGEMENTS.md b/ACKNOWLEDGEMENTS.md index 22c05861..2035a263 100644 --- a/ACKNOWLEDGEMENTS.md +++ b/ACKNOWLEDGEMENTS.md @@ -19,6 +19,7 @@ Missions - NASA Scintillation Observations and Response of the Ionosphere to Electrodynamics (SORTIE) - NASA Scintillation Prediction Observations Research Task (SPORT) +- NOAA Constellation Observing System for Meteorology Ionosphere and Climate (COSMIC-2) Disclaimers =========== diff --git a/CHANGELOG.md b/CHANGELOG.md index 88a0e1e4..85b3381c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,10 +2,10 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/). -## [0.0.6] - 2024-XX-XX +## [0.0.6] - 2024-10-03 * New Instruments * DE2 VEFIMAGB - electric and magnetic field on the same cadence - * MAVEN mag + * MAVEN MAG * MAVEN SEP * MAVEN in situ key parameters * REACH Dosimeter @@ -14,6 +14,7 @@ This project adheres to [Semantic Versioning](https://semver.org/). * Allow files to be unzipped after download * Added custom `concat_data` method to JHUAPL methods, for TIMED-GUVI and DMSP-SSUSI data + * Added time-dependent, file format function for DMSP SSUSI to DMSP methods * Added cleaning to TIMED-GUVI SDR imaging data * Bug Fixes * Fix general clean routine to skip transformation matrices @@ -23,10 +24,13 @@ This project adheres to [Semantic Versioning](https://semver.org/). * Allow graceful failure with no files in jhuapl load functions * New window needs to be integer for calculate_imf_steadiness * Fixed a bug where cdas_download may drop the requested end date file + * Reverted the coveralls integration to the GitHub service for MacOS runs + * Fixed a bug where cdas_list_remote_files errored without remote data * Documentation * Added example of how to export data for archival * Updated documentation refs * Add keywords to zenodo + * Fixed broken links * Deprecations * Deprecated '' tag for de2_vefi module, support moved to de2_vefimagb * Maintenance @@ -38,6 +42,10 @@ This project adheres to [Semantic Versioning](https://semver.org/). * Added version cap for sphinx_rtd_theme * Include standard tests for ICON IVM-B * Update NEP29 standards for Jun 2024 + * Updated standards for pandas, numpy, and pysat + * Updated versions in GitHub Actions + * Implement coveralls app in GitHub Actions + * Cycled Operational Environment testing ## [0.0.5] - 2023-06-27 * New Instruments diff --git a/docs/archival.rst b/docs/archival.rst index 833905d5..e8c5d0e2 100644 --- a/docs/archival.rst +++ b/docs/archival.rst @@ -52,7 +52,11 @@ preferred formats. An example of this is: In this case, note that the pysat 'name' label is output to three different metadata values required by the ITSP standards. Additionally, the :py:attr:`export_pysat_info` option is set to false here. This drops several -internal :py:mod:`pysat` metadata values before writing to file. +internal :py:mod:`pysat` metadata values before writing to file. Note that +this includes the default acknowledgements and references objects. These +are set manually to avoid conflicts between the original dataset and the +new dataset, as well as keeping in line with requirements with potentially +different data servers. An example can be found in the [REACH Operational Software](https://github.com/jklenzing/ops_reach/blob/main/ops_reach/instruments/methods/reach.py). A full guide to SPDF metadata standards can be found `at SPDF `_. diff --git a/docs/supported_instruments.rst b/docs/supported_instruments.rst index d29d4c3f..4d368542 100644 --- a/docs/supported_instruments.rst +++ b/docs/supported_instruments.rst @@ -125,6 +125,8 @@ DMSP SSUSI FORMOSAT-1 IVM -------------- +Note that the Ion Velocity Meter data product is managed here since the repository is +at CDAWeb. For other FORMOSAT-X data products, please see pysatCDAAC. .. automodule:: pysatNASA.instruments.formosat1_ivm :members: @@ -188,8 +190,8 @@ JPL GPS .. _maven_insitu_kp: -MAVEN INSITU KP ---------------- +MAVEN IN SITU KP +---------------- .. automodule:: pysatNASA.instruments.maven_insitu_kp :members: diff --git a/pyproject.toml b/pyproject.toml index 686c8ac5..49306051 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pysatNASA" -version = "0.0.5" +version = "0.0.6" description = "pysat support for NASA Instruments" readme = "README.md" requires-python = ">=3.6" @@ -44,7 +44,7 @@ dependencies = [ "netCDF4", "numpy", "pandas", - "pysat >= 3.1", + "pysat >= 3.2", "requests", "scipy >= 1.4", "xarray" @@ -53,7 +53,6 @@ dependencies = [ [project.optional-dependencies] pysatcdf = ["pysatCDF"] test = [ - "coveralls", "flake8", "flake8-docstrings", "hacking >= 1.0", @@ -84,6 +83,7 @@ markers = [ "download", "no_download", "load_options", + "new_tests", "first", "second" ] diff --git a/pysatNASA/instruments/cnofs_vefi.py b/pysatNASA/instruments/cnofs_vefi.py index 80bfc905..a297f719 100644 --- a/pysatNASA/instruments/cnofs_vefi.py +++ b/pysatNASA/instruments/cnofs_vefi.py @@ -23,7 +23,7 @@ spacecraft is to enable an accurate V x B measurement along the spacecraft trajectory. In order to provide an in-flight calibration of the magnetic field data, we compare the most recent POMME model (the POtsdam Magnetic Model of the -Earth, https://geomag.us/models/pomme5.html) with the actual magnetometer +Earth, https://geomag.org/models/pomme5.html) with the actual magnetometer measurements to help determine a set of calibration parameters for the gains, offsets, and non-orthogonality matrix of the sensor axes. The calibrated magnetic field measurements are provided in the data file here. The VEFI diff --git a/pysatNASA/instruments/de2_vefi.py b/pysatNASA/instruments/de2_vefi.py index 28f13a15..bcc6e9a0 100644 --- a/pysatNASA/instruments/de2_vefi.py +++ b/pysatNASA/instruments/de2_vefi.py @@ -43,12 +43,14 @@ tag '', 'dca', 'ac' inst_id - none supported + None supported Warnings -------- - Currently no cleaning routine. +- The deprecated '' tag will drop the E-field data. To use this data + product, please use the new de2_vefimagb instrument. """ @@ -133,7 +135,9 @@ def load(fnames, tag='', inst_id='', **kwargs): inst_id : str Instrument ID used to identify particular data set to be loaded. This input is nominally provided by pysat itself. (default='') - + **kwargs : unpacked dict + Optional kwargs that will be passed to the + `pysatNASA.instruments.methods.cdaweb.load_xarray` function Returns ------- data : pds.DataFrame @@ -146,12 +150,16 @@ def load(fnames, tag='', inst_id='', **kwargs): Several variables relating to time stored in different formats are dropped. These are redundant and complicate the load procedure. + See Also + -------- + pysatNASA.instruments.methods.cdaweb.load_xarray + """ if tag == '': # Warn user that e-field data is dropped. estr = 'E-field data dropped' - pysat.logger.warn(estr) + pysat.logger.warning(estr) # Drop E-field data if 'use_cdflib' in kwargs.keys(): diff --git a/pysatNASA/instruments/de2_vefimagb.py b/pysatNASA/instruments/de2_vefimagb.py index 83ec6432..65186628 100644 --- a/pysatNASA/instruments/de2_vefimagb.py +++ b/pysatNASA/instruments/de2_vefimagb.py @@ -111,7 +111,7 @@ # Set the load routine def load(fnames, tag='', inst_id='', **kwargs): - """Load DE2 VEFI data. + """Load DE2 VEFI MAG B-field data. This routine is called as needed by pysat. It is not intended for direct user interaction. diff --git a/pysatNASA/instruments/dmsp_ssusi.py b/pysatNASA/instruments/dmsp_ssusi.py index 96cee90d..add9f373 100644 --- a/pysatNASA/instruments/dmsp_ssusi.py +++ b/pysatNASA/instruments/dmsp_ssusi.py @@ -61,6 +61,7 @@ import datetime as dt import functools +import pandas as pds import pysat from pysat.instruments.methods import general as mm_gen @@ -130,12 +131,12 @@ def clean(self): def concat_data(self, new_data, combine_times=False, **kwargs): - """Concatonate data to self.data for DMSP SSUSI data. + """Concatenate data to self.data for DMSP SSUSI data. Parameters ---------- new_data : xarray.Dataset or list of such objects - New data objects to be concatonated + New data objects to be concatenated combine_times : bool For SDR data, optionally combine the different datetime coordinates into a single time coordinate (default=False) @@ -154,7 +155,7 @@ def concat_data(self, new_data, combine_times=False, **kwargs): if self.tag in ['sdr-disk', 'sdr2-dist']: time_dims.append('time_auroral') - # Concatonate using the appropriate method for the number of time + # Concatenate using the appropriate method for the number of time # dimensions jhuapl.concat_data(self, time_dims, new_data, combine_times=combine_times, **kwargs) @@ -163,32 +164,157 @@ def concat_data(self, new_data, combine_times=False, **kwargs): # ---------------------------------------------------------------------------- # Instrument functions -# -# Use the default CDAWeb and pysat methods +remote_dir = ''.join(('/pub/data/dmsp/dmsp{inst_id:s}/ssusi/', + '/data/{tag:s}/{{year:4d}}/{{day:03d}}/')) + # Set the list_files routine -fname = ''.join(['dmsp{inst_id:s}_ssusi_{tag:s}_{{year:04d}}{{day:03d}}T', - '{{hour:02d}}{{minute:02d}}{{second:02d}}-???????T??????-REV', - '?????_vA{{version:1d}}.?.?r{{cycle:03d}}.nc']) -supported_tags = {sat_id: {tag: fname.format(tag=tag, inst_id=sat_id) - for tag in tags.keys()} - for sat_id in inst_ids.keys()} -list_files = functools.partial(mm_gen.list_files, - supported_tags=supported_tags) +def list_files(tag='', inst_id='', data_path='', **kwargs): + """Return a Pandas Series of every file for DMSP SSUSI data. + + Parameters + ---------- + tag : str + Tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + inst_id : str + Instrument ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + data_path : str + Path to data directory. This input is nominally provided by pysat + itself. (default='') + **kwargs : dict + Dict of kwargs allowed by `pysat.instruments.general.list_files` + + Returns + ------- + out : pds.Series + A Series containing the verified available files + + See Also + -------- + pysat.Files.from_os, pysat.instruments.general.list_files + + """ + # There are two potential file formats for DMSP SSUSI data, check both + file_fmts = mm_dmsp.ssusi_fname( + [mm_dmsp.fmt_swap_time - dt.timedelta(days=1), mm_dmsp.fmt_swap_time], + tag=tag, inst_id=inst_id) + + out_list = list() + for file_fmt in file_fmts: + supported_tags = {inst_id: {tag: file_fmt}} + out_list.append(mm_gen.list_files(tag=tag, inst_id=inst_id, + data_path=data_path, + supported_tags=supported_tags, + **kwargs)) + + # Combine the outputs + out = pds.concat(out_list) + return out + # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/dmsp/dmsp{inst_id:s}/ssusi/', - '/data/{tag:s}/{{year:4d}}/{{day:03d}}/')), - 'fname': fname} -download_tags = { - sat_id: {tag: {btag: basic_tag[btag].format(tag=tag, inst_id=sat_id) - for btag in basic_tag.keys()} for tag in tags.keys()} - for sat_id in inst_ids.keys()} -download = functools.partial(cdw.download, supported_tags=download_tags) +def download(date_array, tag='', inst_id='', data_path=None): + """Download DMSP SSUSI data. + + Parameters + ---------- + date_array : array-like + Array of datetimes to download data for. Provided by pysat. + tag : str + Data product tag (default='') + inst_id : str + Instrument ID (default='') + data_path : str or NoneType + Path to data directory. If None is specified, the value previously + set in Instrument.files.data_path is used. (default=None) + + """ + # Initalize the supported tags kwarg + supported_tags = {inst_id: {tag: {'remote_dir': remote_dir.format( + tag=tag, inst_id=inst_id)}}} + + # Determine the filename format for the desired period of time + file_fmts = mm_dmsp.ssusi_fname([date_array[0], date_array[-1]], tag, + inst_id) + + # Proceed differently if there are one or two potential file formats + supported_tags[inst_id][tag]['fname'] = file_fmts[0] + if file_fmts[0] == file_fmts[1]: + cdw.download(date_array, data_path, tag=tag, inst_id=inst_id, + supported_tags=supported_tags) + else: + # Get a mask for the time array + swap_mask = date_array < mm_dmsp.fmt_swap_time + + # Download the first set of data + cdw.download(date_array[swap_mask], data_path, tag=tag, inst_id=inst_id, + supported_tags=supported_tags) + + # Download the second set of data + supported_tags[inst_id][tag]['fname'] = file_fmts[1] + cdw.download(date_array[~swap_mask], data_path, tag=tag, + inst_id=inst_id, supported_tags=supported_tags) + return + # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, - supported_tags=download_tags) +def list_remote_files(tag='', inst_id='', start=None, stop=None): + """List every file for remote DMSP SSUSI data. + + Parameters + ---------- + tag : str + Data product tag (default='') + inst_id : str + Instrument ID (default='') + start : dt.datetime or NoneType + Starting time for file list. A None value will start with the first + file found. (default=None) + stop : dt.datetime or NoneType + Ending time for the file list. A None value will stop with the last + file found. (default=None) + + Returns + ------- + file_list : pds.Series + A Series containing the verified available files + + """ + # Initalize the supported tags kwarg + supported_tags = {inst_id: {tag: {'remote_dir': remote_dir.format( + tag=tag, inst_id=inst_id)}}} + + # Determine the filename format for the desired period of time + start_time = dt.datetime(1900, 1, 1) if start is None else start + stop_time = dt.datetime.now(tz=dt.timezone.utc) if stop is None else stop + file_fmts = mm_dmsp.ssusi_fname([start_time, stop_time], tag, inst_id) + + # Proceed differently if there are one or two potential file formats + supported_tags[inst_id][tag]['fname'] = file_fmts[0] + if file_fmts[0] == file_fmts[1]: + file_list = cdw.list_remote_files(tag=tag, inst_id=inst_id, start=start, + stop=stop, + supported_tags=supported_tags) + else: + # Get the first set of files + file_list_start = cdw.list_remote_files(tag=tag, inst_id=inst_id, + start=start, + stop=mm_dmsp.fmt_swap_time, + supported_tags=supported_tags) + + # Get the second set of files + supported_tags[inst_id][tag]['fname'] = file_fmts[1] + file_list_stop = cdw.list_remote_files(tag=tag, inst_id=inst_id, + start=mm_dmsp.fmt_swap_time, + stop=stop, + supported_tags=supported_tags) + + # Join the two file lists + file_list = pds.concat([file_list_start, file_list_stop]) + + return file_list # Set the load routine diff --git a/pysatNASA/instruments/maven_insitu_kp.py b/pysatNASA/instruments/maven_insitu_kp.py index d8239e73..b11bb6f8 100644 --- a/pysatNASA/instruments/maven_insitu_kp.py +++ b/pysatNASA/instruments/maven_insitu_kp.py @@ -7,9 +7,9 @@ # DISTRIBUTION STATEMENT A: Approved for public release. Distribution is # unlimited. # ---------------------------------------------------------------------------- -"""Module for the MAVEN insitu instruments. +"""Module for the MAVEN in situ instruments. -Supports the in situ Key Parameter (kp) data from multiple instruments +Supports the in situ Key Parameter (KP) data from multiple instruments onboard the Mars Atmosphere and Volatile Evolution (MAVEN) satellite. Accesses local data in CDF format. @@ -32,9 +32,9 @@ :: import pysat - insitu = pysat.Instrument(platform='maven', name='insitu_kp') - insitu.download(dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 31)) - insitu.load(2020, 1, use_header=True) + insitukp = pysat.Instrument(platform='maven', name='insitu_kp') + insitukp.download(dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 31)) + insitukp.load(2020, 1, use_header=True) """ @@ -88,14 +88,11 @@ list_files = functools.partial(mm_gen.list_files, supported_tags=supported_tags) # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/maven/insitu/kp-4sec/', - 'cdfs/{year:04d}/{month:02d}')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'MVN_INSITU_KP-4SEC'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/maven_mag.py b/pysatNASA/instruments/maven_mag.py index 738d1fed..2a5741a1 100644 --- a/pysatNASA/instruments/maven_mag.py +++ b/pysatNASA/instruments/maven_mag.py @@ -7,7 +7,7 @@ # DISTRIBUTION STATEMENT A: Approved for public release. Distribution is # unlimited. # ---------------------------------------------------------------------------- -"""Module for the MAVEN mag instrument. +"""Module for the MAVEN magnetometer. Supports the Magnetometer (MAG) onboard the Mars Atmosphere and Volatile Evolution (MAVEN) satellite. @@ -86,14 +86,11 @@ list_files = functools.partial(mm_gen.list_files, supported_tags=supported_tags) # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/maven/mag/l2/sunstate-1sec', - '/cdfs/{year:04d}/{month:02d}')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'MVN_MAG_L2-SUNSTATE-1SEC'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) # Set the load routine diff --git a/pysatNASA/instruments/maven_sep.py b/pysatNASA/instruments/maven_sep.py index 9a16945b..ef577c5f 100644 --- a/pysatNASA/instruments/maven_sep.py +++ b/pysatNASA/instruments/maven_sep.py @@ -7,7 +7,7 @@ # DISTRIBUTION STATEMENT A: Approved for public release. Distribution is # unlimited. # ---------------------------------------------------------------------------- -"""Module for the MAVEN sep instrument. +"""Module for the MAVEN Solar Energetic Particle instrument. Supports the Solar Energetic Particle (SEP) data from onboard the Mars Atmosphere and Volatile Evolution (MAVEN) satellite. @@ -31,9 +31,9 @@ :: import pysat - insitu = pysat.Instrument(platform='maven', name='sep', inst_id='s1') - insitu.download(dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 31)) - insitu.load(2020, 1, use_header=True) + sep = pysat.Instrument(platform='maven', name='sep', inst_id='s1') + sep.download(dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 31)) + sep.load(2020, 1, use_header=True) """ @@ -93,22 +93,14 @@ supported_tags=supported_tags) # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/maven/sep/l2/s1-cal-svy-full', - '/{year:04d}/{month:02d}')), - 'fname': fname} - -basic_tag2 = {'remote_dir': ''.join(('/pub/data/maven/sep/l2/s2-cal-svy-full', - '/{year:04d}/{month:02d}')), - 'fname': fname2} - -download_tags = {'s1': {'': basic_tag}, - 's2': {'': basic_tag2}} +download_tags = {'s1': {'': 'MVN_SEP_L2_S1-CAL-SVY-FULL'}, + 's2': {'': 'MVN_SEP_L2_S2-CAL-SVY-FULL'}} # Set the download routine -download = functools.partial(cdw.download, supported_tags=download_tags) +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) # Set the load routine diff --git a/pysatNASA/instruments/methods/cdaweb.py b/pysatNASA/instruments/methods/cdaweb.py index 1a3b1382..b0e71803 100644 --- a/pysatNASA/instruments/methods/cdaweb.py +++ b/pysatNASA/instruments/methods/cdaweb.py @@ -18,6 +18,7 @@ import datetime as dt import numpy as np import os +from packaging.version import Version as pack_ver import pandas as pds import requests import tempfile @@ -164,7 +165,7 @@ def load(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), else: if not use_cdflib: estr = 'The `use_cdflib` option is not currently enabled for xarray' - pysat.logger.warn(estr) + pysat.logger.warning(estr) data, meta = load_xarray(fnames, tag=tag, inst_id=inst_id, epoch_name=epoch_name, @@ -264,8 +265,8 @@ def load_pandas(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), tdata = tdata.loc[date:date2, :] ldata.append(tdata) except ValueError as verr: - logger.warn("unable to load {:}: {:}".format(fname, - str(verr))) + logger.warning( + "unable to load {:}: {:}".format(fname, str(verr))) else: # Basic data return with CDF(lfname) as cdf: @@ -274,8 +275,8 @@ def load_pandas(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), flatten_twod=flatten_twod) ldata.append(temp_data) except ValueError as verr: - logger.warn("unable to load {:}: {:}".format(lfname, - str(verr))) + logger.warning( + "unable to load {:}: {:}".format(lfname, str(verr))) # Combine individual files together if len(ldata) > 0: @@ -541,6 +542,7 @@ def download(date_array, data_path, tag='', inst_id='', supported_tags=None, stop=date_array[-1]) # Create temproary directory if files need to be unzipped. + # Use one temp dir for all files if needed. if 'zip_method' in inst_dict.keys(): zip_method = inst_dict['zip_method'] temp_dir = tempfile.TemporaryDirectory() @@ -576,10 +578,11 @@ def download(date_array, data_path, tag='', inst_id='', supported_tags=None, with requests.get(remote_path) as req: if req.status_code != 404: if zip_method: - get_file(req.content, data_path, fname, - temp_path=temp_dir.name, zip_method=zip_method) + _get_file(req.content, data_path, fname, + temp_path=temp_dir.name, + zip_method=zip_method) else: - get_file(req.content, data_path, fname) + _get_file(req.content, data_path, fname) logger.info(''.join(('Successfully downloaded ', fname, '.'))) else: @@ -598,7 +601,7 @@ def download(date_array, data_path, tag='', inst_id='', supported_tags=None, return -def get_file(remote_file, data_path, fname, temp_path=None, zip_method=None): +def _get_file(remote_file, data_path, fname, temp_path=None, zip_method=None): """Retrieve a file, unzipping if necessary. Parameters @@ -610,16 +613,24 @@ def get_file(remote_file, data_path, fname, temp_path=None, zip_method=None): fname : str Name of file on the remote server. temp_path : str - Path to temporary directory. (Default=None) + Path to temporary directory. Must be specified if zip_method is True. + (Default=None) zip_method : str The method used to zip the file. Supports 'zip' and None. If None, downloads files directly. (default=None) + Raises + ------ + ValueError if temp_path not specified for zip_method + """ if zip_method: # Use a temporary location. - dl_fname = os.path.join(temp_path, fname) + if temp_path: + dl_fname = os.path.join(temp_path, fname) + else: + raise ValueError('Temp path needs to be set if unzipping') else: # Use the pysat data directory. dl_fname = os.path.join(data_path, fname) @@ -845,9 +856,15 @@ def list_remote_files(tag='', inst_id='', start=None, stop=None, if 'year' in search_dir['keys']: url_list = [] if 'month' in search_dir['keys']: + # TODO(#242): remove if/else once support for older pandas is + # dropped. + if pack_ver(pds.__version__) >= pack_ver('2.2.0'): + freq_key = 'ME' + else: + freq_key = 'M' search_times = pds.date_range(start, stop + pds.DateOffset(months=1), - freq='M') + freq=freq_key) for time in search_times: subdir = format_dir.format(year=time.year, month=time.month) url_list.append('/'.join((remote_url, subdir))) @@ -857,9 +874,16 @@ def list_remote_files(tag='', inst_id='', start=None, stop=None, + pds.DateOffset(days=1), freq='D') else: + + # TODO(#242): remove if/else once support for older pandas + # is dropped. + if pack_ver(pds.__version__) >= pack_ver('2.2.0'): + freq_key = 'YE' + else: + freq_key = 'Y' search_times = pds.date_range(start, stop + pds.DateOffset(years=1), - freq='Y') + freq=freq_key) for time in search_times: doy = int(time.strftime('%j')) subdir = format_dir.format(year=time.year, day=doy) @@ -981,7 +1005,9 @@ def cdas_list_remote_files(tag='', inst_id='', start=None, stop=None, og_files = cdas.get_original_files(dataset=dataset, start=start, end=stop) - if series_out: + if og_files[1] is None: + file_list = pds.Series(dtype=str) if series_out else [] + elif series_out: name_list = [os.path.basename(f['Name']) for f in og_files[1]] t_stamp = [pds.Timestamp(f['StartTime'][:10]) for f in og_files[1]] file_list = pds.Series(data=name_list, index=t_stamp) diff --git a/pysatNASA/instruments/methods/dmsp.py b/pysatNASA/instruments/methods/dmsp.py index 16b55753..aa406beb 100644 --- a/pysatNASA/instruments/methods/dmsp.py +++ b/pysatNASA/instruments/methods/dmsp.py @@ -9,6 +9,8 @@ # ---------------------------------------------------------------------------- """Provides non-instrument specific routines for the DMSP data.""" +import datetime as dt + ackn_str = "".join(["This Defense Meteorological Satellite Program (DMSP) ", "satellite data is provided through CDAWeb"]) @@ -22,3 +24,54 @@ 'Sensing, and Instrumentation for Atmospheric and ', 'Space Research IV, (30 January 2002); ', 'doi:10.1117/12.454268'))} + +# The DMSP SSUSI filename format in SPDF fully changes on 81-2023 to have a +# 6-padded revision number (previously 5-digit revision number +fmt_swap_time = dt.datetime(2023, 3, 22) + + +def ssusi_fname(ftimes, tag=None, inst_id=None): + """Provide a DMSP SSUSI filename format for the desired time. + + Parameters + ---------- + ftimes : list of dt.datetime + List of dates and times to retrieve the filename format. + tag : str or NoneType + Tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default=None) + inst_id : str or NoneType + Satellite ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default=None) + + Returns + ------- + file_fmts : list + List of filename formats for the desired times, if either `tag` or + `inst_id` are not supplied (NoneType provided), these will be included + as first-level format options, while the date, version, and cycle are + second-level format options. Otherwise the date, version, and cycle are + first-level format options. + + """ + file_fmts = list() + for ftime in ftimes: + if ftime < fmt_swap_time: + file_fmt = ''.join(['dmsp{inst_id:s}_ssusi_{tag:s}_{{year:04d}}', + '{{day:03d}}T{{hour:02d}}{{minute:02d}}', + '{{second:02d}}-???????T??????-REV?????_vA', + '{{version:1d}}.?.?r{{cycle:03d}}.nc']) + else: + file_fmt = ''.join(['dmsp{inst_id:s}_ssusi_{tag:s}_{{year:04d}}', + '{{day:03d}}T{{hour:02d}}{{minute:02d}}', + '{{second:02d}}-???????T??????-REV??????_vA', + '{{version:1d}}.?.?r{{cycle:03d}}.nc']) + + # If desired, format the tag and inst_id + if tag is not None and inst_id is not None: + file_fmt = file_fmt.format(tag=tag, inst_id=inst_id) + + # Save to the list + file_fmts.append(file_fmt) + + return file_fmts diff --git a/pysatNASA/instruments/methods/jhuapl.py b/pysatNASA/instruments/methods/jhuapl.py index 4a3e3bb0..a855c1e2 100644 --- a/pysatNASA/instruments/methods/jhuapl.py +++ b/pysatNASA/instruments/methods/jhuapl.py @@ -282,15 +282,15 @@ def load_sdr_aurora(fnames, name='', tag='', inst_id='', pandas_format=False, # Ensure identical day and night dimensions for GUVI if name == 'guvi': - if sdata.dims['nAlongDay'] != sdata.dims['nAlongNight']: + if sdata.sizes['nAlongDay'] != sdata.sizes['nAlongNight']: raise ValueError('Along-track day and night dimensions differ') if 'nCrossDay' in rename_dims.keys(): - if sdata.dims['nCrossDay'] != sdata.dims['nCrossNight']: + if sdata.sizes['nCrossDay'] != sdata.sizes['nCrossNight']: raise ValueError(''.join([ 'Cross-track day and night dimensions differ ', - '{:} != {:}'.format(sdata.dims['nCrossDay'], - sdata.dims['nCrossNight'])])) + '{:} != {:}'.format(sdata.sizes['nCrossDay'], + sdata.sizes['nCrossNight'])])) # Combine identical dimensions and rename some time dimensions sdata = sdata.rename_dims(rename_dims) diff --git a/pysatNASA/instruments/methods/maven.py b/pysatNASA/instruments/methods/maven.py index 1a00fb74..e935ebd7 100644 --- a/pysatNASA/instruments/methods/maven.py +++ b/pysatNASA/instruments/methods/maven.py @@ -7,12 +7,7 @@ # DISTRIBUTION STATEMENT A: Approved for public release. Distribution is # unlimited. # ---------------------------------------------------------------------------- -"""Provides non-instrument specific routines for MAVEN data. - -Created on Thu Jul 13 11:21:01 2023 - -@author: tesman -""" +"""Provides non-instrument specific routines for MAVEN data.""" ackn_str = ''.join(('Jakosky, B.M., Lin, R.P., Grebowsky, J.M. et al.', ' The Mars Atmosphere and Volatile Evolution', diff --git a/pysatNASA/instruments/methods/omni.py b/pysatNASA/instruments/methods/omni.py index a6c3df46..b456044d 100644 --- a/pysatNASA/instruments/methods/omni.py +++ b/pysatNASA/instruments/methods/omni.py @@ -51,15 +51,15 @@ def time_shift_to_magnetic_poles(inst): # Need to fill in Vx to get an estimate of what is going on. inst['Vx'] = inst['Vx'].interpolate('nearest') - inst['Vx'] = inst['Vx'].fillna(method='backfill') - inst['Vx'] = inst['Vx'].fillna(method='pad') + inst['Vx'] = inst['Vx'].bfill() + inst['Vx'] = inst['Vx'].ffill() inst['BSN_x'] = inst['BSN_x'].interpolate('nearest') - inst['BSN_x'] = inst['BSN_x'].fillna(method='backfill') - inst['BSN_x'] = inst['BSN_x'].fillna(method='pad') + inst['BSN_x'] = inst['BSN_x'].bfill() + inst['BSN_x'] = inst['BSN_x'].ffill() # Make sure there are no gaps larger than a minute. - inst.data = inst.data.resample('1T').interpolate('time') + inst.data = inst.data.resample('1min').interpolate('time') time_x = inst['BSN_x'] * 6371.2 / -inst['Vx'] idx, = np.where(np.isnan(time_x)) @@ -164,12 +164,14 @@ def calculate_imf_steadiness(inst, steady_window=15, min_window_frac=0.75, if steady: del_min = int((inst.data.index[i] - inst.data.index[i - 1]).total_seconds() / 60.0) - if np.isnan(cv) or np.isnan(ca_std[i]) or del_min > sample_rate: + if np.any([np.isnan(cv), + np.isnan(ca_std.iloc[i]), + del_min > sample_rate]): # Reset the steadiness flag if fill values are encountered, or # if an entry is missing steady = False - if cv <= max_bmag_cv and ca_std[i] <= max_clock_angle_std: + if cv <= max_bmag_cv and ca_std.iloc[i] <= max_clock_angle_std: # Steadiness conditions have been met if steady: imf_steady[i] = imf_steady[i - 1] diff --git a/pysatNASA/instruments/reach_dosimeter.py b/pysatNASA/instruments/reach_dosimeter.py index d56cc0c5..307ef3f7 100644 --- a/pysatNASA/instruments/reach_dosimeter.py +++ b/pysatNASA/instruments/reach_dosimeter.py @@ -14,7 +14,7 @@ The Responsive Environmental Assessment Commercially Hosted (REACH) constellation is collection of 32 small sensors hosted on six orbital planes of -the Iridium-Next space vehicles in low earth orbit. Each sensor contains two +the Iridium-Next space vehicles in Low Earth Orbit. Each sensor contains two micro-dosimeters sensitive to the passage of charged particles from the Earth's radiation belts. There are six distinct dosimeter types spread among the 64 individual sensors, which are unique in shielding and electronic threshold. @@ -33,8 +33,6 @@ '101', '102', '105', '108', '113', '114', '115', '116', '133', '134', '135', '136', '137', '138', '139', '140', '148', '149', '162', '163', '164', '165', '166', '169', '170', '171', '172', '173', '175', '176', '180', '181' - - """ import datetime as dt @@ -69,8 +67,6 @@ # ---------------------------------------------------------------------------- # Instrument methods - - # Use standard init routine init = functools.partial(mm_nasa.init, module=mm_reach, name=name) @@ -84,13 +80,12 @@ def preprocess(self): self.acknowledgements = self.meta.header.Acknowledgement return - - # ---------------------------------------------------------------------------- # Instrument functions # # Use the default CDAWeb and pysat methods + # Set the list_files routine datestr = '{year:04d}{month:02d}{day:02d}' fname = 'reach-vid-{inst_id}_dosimeter-l1c_{datestr}_v{{version:01d}}.nc' @@ -100,6 +95,16 @@ def preprocess(self): supported_tags=supported_tags) +# Support download routine +download_tags = {iid: {'': 'REACH-VID-{iid}_DOSIMETER-L1C'.format(iid=iid)} + for iid in inst_ids.keys()} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Support listing files currently on CDAWeb +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) + + def load(fnames, tag=None, inst_id=None): """Load REACH data into `pandas.DataFrame` and `pysat.Meta` objects. @@ -160,13 +165,3 @@ def load(fnames, tag=None, inst_id=None): meta.header = MetaHeader(new_header) return data, meta - - -# Support download routine -download_tags = {iid: {'': 'REACH-VID-{iid}_DOSIMETER-L1C'.format(iid=iid)} - for iid in inst_ids.keys()} -download = functools.partial(cdw.cdas_download, supported_tags=download_tags) - -# Support listing files currently on CDAWeb -list_remote_files = functools.partial(cdw.cdas_list_remote_files, - supported_tags=download_tags) diff --git a/pysatNASA/instruments/ses14_gold.py b/pysatNASA/instruments/ses14_gold.py index 61bb8caa..3d657f8c 100644 --- a/pysatNASA/instruments/ses14_gold.py +++ b/pysatNASA/instruments/ses14_gold.py @@ -158,21 +158,19 @@ def list_remote_files(tag='', inst_id='', start=None, stop=None, Instrument ID (default='') start : dt.datetime or NoneType Starting time for file list. A None value will start with the first - file found. - (default=None) + file found. (default=None) stop : dt.datetime or NoneType Ending time for the file list. A None value will stop with the last - file found. - (default=None) + file found. (default=None) series_out : bool - boolean to determine output type. True for pandas series of file names, - and False for a list of the full web address. - (default=True) + Boolean to determine output type. True for pandas series of file names, + and False for a list of the full web address. (default=True) Returns ------- - file_list : list - A list containing the verified available files + file_list : pds.Series or list + A Series or list (if tag is not 'tlimb' and `series_out` is False) + containing the verified available files """ diff --git a/pysatNASA/instruments/templates/template_cdaweb_instrument.py b/pysatNASA/instruments/templates/template_cdaweb_instrument.py index 8c145998..c95f8e25 100644 --- a/pysatNASA/instruments/templates/template_cdaweb_instrument.py +++ b/pysatNASA/instruments/templates/template_cdaweb_instrument.py @@ -4,6 +4,8 @@ # Full author list can be found in .zenodo.json file # DOI:10.5281/zenodo.3986131 # +# Review Status for Classified or Controlled Information by NRL +# ------------------------------------------------------------- # DISTRIBUTION STATEMENT A: Approved for public release. Distribution is # unlimited. # ---------------------------------------------------------------------------- @@ -11,7 +13,9 @@ Copy and modify this file as needed when adding a new Instrument to pysat. -DO NOT include the NRL distribution statement in your new file. +DO NOT include the NRL distribution statement in your new file. Contributions +by NRL developers will need to be cleared for classified or controlled +information separately from the pysat pull request process. This is a good area to introduce the instrument, provide background on the mission, operations, instrumenation, and measurements. @@ -63,7 +67,8 @@ # The platform and name strings associated with this instrument # need to be defined at the top level. # These attributes will be copied over to the Instrument object by pysat. -# The strings used here should also be used to name this file `platform_name.py` +# The strings used here should also be used to name this file +# `platform_name.py` platform = '' name = '' diff --git a/pysatNASA/instruments/timed_guvi.py b/pysatNASA/instruments/timed_guvi.py index 5e1ef6ac..b97c6e5b 100644 --- a/pysatNASA/instruments/timed_guvi.py +++ b/pysatNASA/instruments/timed_guvi.py @@ -138,12 +138,12 @@ def clean(self): def concat_data(self, new_data, combine_times=False, **kwargs): - """Concatonate data to self.data for TIMED GUVI data. + """Concatenate data to self.data for TIMED GUVI data. Parameters ---------- new_data : xarray.Dataset or list of such objects - New data objects to be concatonated + New data objects to be concatenated combine_times : bool For SDR data, optionally combine the different datetime coordinates into a single time coordinate (default=False) @@ -164,7 +164,7 @@ def concat_data(self, new_data, combine_times=False, **kwargs): elif self.tag == 'sdr-spectrograph': time_dims.extend(['time_gaim_day', 'time_gaim_night']) - # Concatonate using the appropriate method for the number of time + # Concatenate using the appropriate method for the number of time # dimensions jhuapl.concat_data(self, time_dims, new_data, combine_times=combine_times, **kwargs) diff --git a/pysatNASA/tests/test_instruments.py b/pysatNASA/tests/test_instruments.py index 821dfe58..138dec02 100644 --- a/pysatNASA/tests/test_instruments.py +++ b/pysatNASA/tests/test_instruments.py @@ -47,14 +47,17 @@ instruments['cdf'] = [] # Create list of pandas instruments where this is not needed +# In general, this is for xarray instruments that are not supported +# by pysatCDF skip_cdf_list = ['de2_vefimagb'] for inst in instruments['download']: - fname = inst['inst_module'].supported_tags[inst['inst_id']][inst['tag']] - if '.cdf' in fname: - temp_inst, _ = clslib.initialize_test_inst_and_date(inst) - if temp_inst.pandas_format and temp_inst.name not in skip_cdf_list: - instruments['cdf'].append(inst) + if hasattr(inst['inst_module'], 'supported_tags'): + fname = inst['inst_module'].supported_tags[inst['inst_id']][inst['tag']] + if '.cdf' in fname: + temp_inst, _ = clslib.initialize_test_inst_and_date(inst) + if temp_inst.pandas_format and temp_inst.name not in skip_cdf_list: + instruments['cdf'].append(inst) class TestInstruments(clslib.InstLibTests): @@ -89,13 +92,13 @@ def test_load_cdflib(self, inst_dict): target = 'Fake Data to be cleared' test_inst.data = [target] try: - test_inst.load(date=date, use_header=True, use_cdflib=True) + test_inst.load(date=date, use_cdflib=True) except ValueError as verr: # Check if instrument is failing due to strict time flag if str(verr).find('Loaded data') > 0: test_inst.strict_time_flag = False with warnings.catch_warnings(record=True) as war: - test_inst.load(date=date, use_header=True) + test_inst.load(date=date) assert len(war) >= 1 categories = [war[j].category for j in range(0, len(war))] assert UserWarning in categories diff --git a/pysatNASA/tests/test_methods_cdaweb.py b/pysatNASA/tests/test_methods_cdaweb.py index c5184a06..666e0f2c 100644 --- a/pysatNASA/tests/test_methods_cdaweb.py +++ b/pysatNASA/tests/test_methods_cdaweb.py @@ -109,8 +109,8 @@ def test_bad_zip_warning_get_files(self, caplog): temp_dir = tempfile.TemporaryDirectory() with caplog.at_level(logging.WARNING, logger='pysat'): - cdw.get_file(req.content, '.', 'test.txt', temp_path=temp_dir.name, - zip_method='badzip') + cdw._get_file(req.content, '.', 'test.txt', temp_path=temp_dir.name, + zip_method='badzip') captured = caplog.text # Check for appropriate warning @@ -119,6 +119,14 @@ def test_bad_zip_warning_get_files(self, caplog): return + def test_get_file_unzip_without_temp_path(self): + """Test that warning when cdf file does not have expected params.""" + + with pytest.raises(ValueError) as excinfo: + cdw._get_file('remote_file', 'fake_path', 'fname', zip_method='zip') + assert str(excinfo.value).find('Temp path needs') >= 0 + return + @pytest.mark.parametrize("bad_key,bad_val,err_msg", [("tag", "badval", "inst_id / tag combo unknown."), ("inst_id", "badval", diff --git a/setup.cfg b/setup.cfg index 08661e10..32dc6c9c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ [metadata] name = pysatNASA -version = 0.0.5 +version = 0.0.6 [flake8] max-line-length = 80