Skip to content

Commit

Permalink
Merge pull request #20 from nasa/implement-pre-commit-hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
owenlittlejohns authored Apr 6, 2024
2 parents a1d7630 + 5a32f16 commit c725567
Show file tree
Hide file tree
Showing 31 changed files with 2,796 additions and 2,243 deletions.
5 changes: 5 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# For more information, see:
# https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view

# Black code formatting of entire repository
036cf6f687ae8c07f381c1537bdb527219558e10
2 changes: 1 addition & 1 deletion .github/workflows/run_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
run: make install

- name: Run tests
run: make test
run: make test

- name: Archive test results
uses: actions/upload-artifact@v3
Expand Down
20 changes: 20 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-json
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.4
hooks:
- id: ruff
args: ["--fix", "--show-fixes"]
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.3.0
hooks:
- id: black-jupyter
args: ["--skip-string-normalization"]
language_version: python3.11
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
## vX.Y.Z
## v2.2.1
### Unreleased

The `requests` package has been added as an explicit dependency of the package.
Additionally, black code formatting has been applied to the entire repository.

## v2.2.0
### 2023-11-30
Expand Down
33 changes: 33 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,39 @@ Run `unittest` suite:
$ make test
```

### pre-commit hooks:

This repository uses [pre-commit](https://pre-commit.com/) to enable pre-commit
checking the repository for some coding standard best practices. These include:

* Removing trailing whitespaces.
* Removing blank lines at the end of a file.
* JSON files have valid formats.
* [ruff](https://github.com/astral-sh/ruff) Python linting checks.
* [black](https://black.readthedocs.io/en/stable/index.html) Python code
formatting checks.

To enable these checks:

```bash
# Install pre-commit Python package as part of test requirements:
pip install -r dev-requirements.txt

# Install the git hook scripts:
pre-commit install

# (Optional) Run against all files:
pre-commit run --all-files
```

When you try to make a new commit locally, `pre-commit` will automatically run.
If any of the hooks detect non-compliance (e.g., trailing whitespace), that
hook will state it failed, and also try to fix the issue. You will need to
review and `git add` the changes before you can make a commit.

It is planned to implement additional hooks, possibly including tools such as
`mypy`.

## Releasing:

All CI/CD for this repository is defined in the `.github/workflows` directory:
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.2.0
2.2.1
1 change: 1 addition & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
coverage ~= 5.5
ipython ~= 8.0.1
jsonschema ~= 4.17.3
pre-commit ~= 3.7.0
pycodestyle ~= 2.11.0
pylint >= 2.5.0
unittest-xml-reporting ~= 3.0.4
22 changes: 19 additions & 3 deletions docs/earthdata-varinfo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,11 @@
"metadata": {},
"outputs": [],
"source": [
"gpm_imerg.get_required_variables({'/Grid/precipitationCal', })"
"gpm_imerg.get_required_variables(\n",
" {\n",
" '/Grid/precipitationCal',\n",
" }\n",
")"
]
},
{
Expand Down Expand Up @@ -212,10 +216,22 @@
"outputs": [],
"source": [
"print('Spatial dimensions for /Grid/precipitationCal')\n",
"print(gpm_imerg.get_spatial_dimensions({'/Grid/precipitationCal', }))\n",
"print(\n",
" gpm_imerg.get_spatial_dimensions(\n",
" {\n",
" '/Grid/precipitationCal',\n",
" }\n",
" )\n",
")\n",
"\n",
"print('\\nTemporal dimensions for /Grid/precipationCal')\n",
"print(gpm_imerg.get_temporal_dimensions({'/Grid/precipitationCal', }))"
"print(\n",
" gpm_imerg.get_temporal_dimensions(\n",
" {\n",
" '/Grid/precipitationCal',\n",
" }\n",
" )\n",
")"
]
},
{
Expand Down
27 changes: 9 additions & 18 deletions docs/how_to_generate_umm_var_via_cmr_graphql.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -109,29 +109,27 @@
"urs_urls = {\n",
" 'sit': 'https://sit.urs.earthdata.nasa.gov',\n",
" 'uat': 'https://uat.urs.earthdata.nasa.gov',\n",
" 'production': 'https://urs.earthdata.nasa.gov'\n",
" 'production': 'https://urs.earthdata.nasa.gov',\n",
"}\n",
"\n",
"\n",
"def get_edl_token(environment_name: str) -> str:\n",
" \"\"\" Retrieve an EDL token for use in requests to CMR graph. If\n",
" the user identified by a local .netrc file does not have a\n",
" token then a new one will be generated.\n",
" \"\"\"Retrieve an EDL token for use in requests to CMR graph. If\n",
" the user identified by a local .netrc file does not have a\n",
" token then a new one will be generated.\n",
"\n",
" \"\"\"\n",
" urs_url = urs_urls.get(environment_name)\n",
"\n",
" existing_tokens_response = requests.get(\n",
" f'{urs_url}/api/users/tokens',\n",
" headers={'Content-type': 'application/json'}\n",
" f'{urs_url}/api/users/tokens', headers={'Content-type': 'application/json'}\n",
" )\n",
" existing_tokens_response.raise_for_status()\n",
" existing_tokens_json = existing_tokens_response.json()\n",
"\n",
" if len(existing_tokens_json) == 0:\n",
" new_token_response = requests.post(\n",
" f'{urs_url}/api/users/token',\n",
" headers={'Content-type': 'application/json'}\n",
" f'{urs_url}/api/users/token', headers={'Content-type': 'application/json'}\n",
" )\n",
" new_token_response.raise_for_status()\n",
" new_token_json = new_token_response.json()\n",
Expand Down Expand Up @@ -163,7 +161,7 @@
" 'local': 'http://localhost:3013/dev/api',\n",
" 'sit': 'https://graphql.sit.earthdata.nasa.gov/api',\n",
" 'uat': 'https://graphql.uat.earthdata.nasa.gov/api',\n",
" 'production': 'https://graphql.earthdata.nasa.gov/api'\n",
" 'production': 'https://graphql.earthdata.nasa.gov/api',\n",
"}\n",
"\n",
"graphql_url = graphql_environments[environment_name]"
Expand Down Expand Up @@ -237,11 +235,7 @@
"metadata": {},
"outputs": [],
"source": [
"variables = {\n",
" 'params': {\n",
" 'conceptId': 'C1245618475-EEDTEST'\n",
" }\n",
"}"
"variables = {'params': {'conceptId': 'C1245618475-EEDTEST'}}"
]
},
{
Expand All @@ -261,10 +255,7 @@
"metadata": {},
"outputs": [],
"source": [
"payload = {\n",
" 'query': graphql_query,\n",
" 'variables': variables\n",
"}"
"payload = {'query': graphql_query, 'variables': variables}"
]
},
{
Expand Down
39 changes: 21 additions & 18 deletions docs/how_to_publish_to_cmr_with_earthdata_varinfo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,11 @@
"auth_header = '<Launchpad token>'\n",
"collection_concept_id_gldas = 'C1256543837-EEDTEST'\n",
"\n",
"generate_collection_umm_var(collection_concept_id=collection_concept_id_gldas,\n",
" auth_header=auth_header, publish=True)"
"generate_collection_umm_var(\n",
" collection_concept_id=collection_concept_id_gldas,\n",
" auth_header=auth_header,\n",
" publish=True,\n",
")"
]
},
{
Expand Down Expand Up @@ -199,9 +202,9 @@
"metadata": {},
"outputs": [],
"source": [
"granule_response = get_granules(concept_id=collection_concept_id_merra,\n",
" cmr_env=CMR_UAT,\n",
" auth_header=auth_header)\n",
"granule_response = get_granules(\n",
" concept_id=collection_concept_id_merra, cmr_env=CMR_UAT, auth_header=auth_header\n",
")\n",
"\n",
"url = get_granule_link(granule_response)\n",
"print(url)"
Expand Down Expand Up @@ -243,8 +246,9 @@
"metadata": {},
"outputs": [],
"source": [
"var_info = VarInfoFromNetCDF4('MERRA2_400.inst1_2d_asm_Nx.20220130.nc4',\n",
" short_name='M2I1NXASM')"
"var_info = VarInfoFromNetCDF4(\n",
" 'MERRA2_400.inst1_2d_asm_Nx.20220130.nc4', short_name='M2I1NXASM'\n",
")"
]
},
{
Expand Down Expand Up @@ -281,10 +285,9 @@
"metadata": {},
"outputs": [],
"source": [
"publish_all_umm_var(collection_concept_id_merra,\n",
" umm_var_dict,\n",
" auth_header=auth_header,\n",
" cmr_env=CMR_UAT)"
"publish_all_umm_var(\n",
" collection_concept_id_merra, umm_var_dict, auth_header=auth_header, cmr_env=CMR_UAT\n",
")"
]
},
{
Expand Down Expand Up @@ -325,8 +328,9 @@
"metadata": {},
"outputs": [],
"source": [
"var_info = VarInfoFromNetCDF4('MERRA2_400.inst1_2d_asm_Nx.20220130.nc4',\n",
" short_name='M2I1NXASM')\n",
"var_info = VarInfoFromNetCDF4(\n",
" 'MERRA2_400.inst1_2d_asm_Nx.20220130.nc4', short_name='M2I1NXASM'\n",
")\n",
"\n",
"variable = var_info.get_variable('/TROPPV')"
]
Expand All @@ -345,7 +349,7 @@
"outputs": [],
"source": [
"if variable is not None:\n",
" umm_var_entry = get_umm_var(var_info, variable)\n",
" umm_var_entry = get_umm_var(var_info, variable)\n",
"else:\n",
" print('Selected variable was not found in granule')\n",
"\n",
Expand All @@ -365,10 +369,9 @@
"metadata": {},
"outputs": [],
"source": [
"publish_umm_var(collection_concept_id_merra,\n",
" umm_var_entry,\n",
" auth_header=auth_header,\n",
" cmr_env=CMR_UAT)"
"publish_umm_var(\n",
" collection_concept_id_merra, umm_var_entry, auth_header=auth_header, cmr_env=CMR_UAT\n",
")"
]
}
],
Expand Down
32 changes: 19 additions & 13 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
https://pypi.org/pypi?%3Aaction=list_classifiers
"""

from typing import List
import io
import pathlib
Expand All @@ -16,28 +17,29 @@


def parse_dependencies(file_path: str) -> List[str]:
""" Parse a Pip requirements file, and extract the dependencies. """
"""Parse a Pip requirements file, and extract the dependencies."""
with open(file_path, 'r') as file_handler:
dependencies = file_handler.read().strip().split('\n')

return dependencies


def get_readme(current_directory: str) -> str:
""" Parse the README.md in the root of the repository, for the long
description of this Python package.
"""Parse the README.md in the root of the repository, for the long
description of this Python package.
"""
with io.open(os.path.join(current_directory, 'README.md'),
'r', encoding='utf-8') as file_handler:
with io.open(
os.path.join(current_directory, 'README.md'), 'r', encoding='utf-8'
) as file_handler:
readme = file_handler.read()

return readme


def get_semantic_version(current_directory: str) -> str:
""" Parse the VERSION file in the root of the repository for the semantic
version number of the version.
"""Parse the VERSION file in the root of the repository for the semantic
version number of the version.
"""
with open(os.path.join(current_directory, 'VERSION'), 'r') as file_handler:
Expand All @@ -51,9 +53,11 @@ def get_semantic_version(current_directory: str) -> str:
version=get_semantic_version(CURRENT_DIRECTORY),
author='NASA EOSDIS SDPS Data Services Team',
author_email='[email protected]',
description=('A package for parsing Earth Observation science granule '
'structure and extracting relations between science variables'
' and their associated metadata, such as coordinates.'),
description=(
'A package for parsing Earth Observation science granule '
'structure and extracting relations between science variables'
' and their associated metadata, such as coordinates.'
),
long_description=get_readme(CURRENT_DIRECTORY),
long_description_content_type='text/markdown',
url='https://github.com/nasa/earthdata-varinfo',
Expand All @@ -64,7 +68,9 @@ def get_semantic_version(current_directory: str) -> str:
test_suite='tests',
python_requires='>=3.7',
license='License :: OSI Approved :: Apache Software License',
classifiers=['Programming Language :: Python',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent',
],
)
Loading

0 comments on commit c725567

Please sign in to comment.