diff --git a/Dockerfile b/Dockerfile index b23e60786..a8b9f983f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,7 +10,7 @@ LABEL org.opencontainers.image.source="https://github.com/insarlab/MintPy" LABEL org.opencontainers.image.documentation="https://mintpy.readthedocs.io/en/latest/" LABEL org.opencontainers.image.licenses="GPL-3.0-or-later" -# Dynamic lables to define at build time via `docker build --label` +# Dynamic labels to define at build time via `docker build --label` # LABEL org.opencontainers.image.created="" # LABEL org.opencontainers.image.version="" # LABEL org.opencontainers.image.revision="" diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 23871b40b..08927f4c6 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -36,7 +36,7 @@ is a great starting point if you are new to version control. - `origin`, which refers to your personal fork + Setting up [`pre-commit`](https://pre-commit.com/) within `MintPy` directory: - - Run `pre-commit install` to set up the git hook scripts, so that `pre-commit` will run automatically on `git commit`. If the `No .pre-commit-config.yaml file was found` error occurrs, update your local MintPy to the latest upstream version to have this config file. + - Run `pre-commit install` to set up the git hook scripts, so that `pre-commit` will run automatically on `git commit`. If the `No .pre-commit-config.yaml file was found` error occurs, update your local MintPy to the latest upstream version to have this config file. #### 2. Develop your contribution: #### @@ -56,7 +56,7 @@ is a great starting point if you are new to version control. git checkout -b seasonal_fitting ``` -+ Work on your idea, run tests and commit locally (`git add` and `git commit`) and/or to your fork on GitHub as you progress (`git push` in command line or [GitHub Desktop](https://desktop.github.com/) with graphical user interface). Use a clear commit message describing the motivation of a change, the nature of a bug for bug fixes or some details on what an enchancement does. ++ Work on your idea, run tests and commit locally (`git add` and `git commit`) and/or to your fork on GitHub as you progress (`git push` in command line or [GitHub Desktop](https://desktop.github.com/) with graphical user interface). Use a clear commit message describing the motivation of a change, the nature of a bug for bug fixes or some details on what an enhancement does. + Run the [overall test](./CONTRIBUTING.md#testing) locally. diff --git a/docs/FAQs.md b/docs/FAQs.md index 6cfb400d8..2451e5723 100644 --- a/docs/FAQs.md +++ b/docs/FAQs.md @@ -6,7 +6,7 @@ For line-of-sight (LOS) phase in the unit of radians, i.e. 'unwrapPhase' dataset For LOS displacement (velocity) in the unit of meters (m/yr), i.e. 'timeseries' dataset in `timeseries.h5` file, positive value represents motion toward the satellite (uplift for pure vertical motion). -### 2. How to prepare the input for MintPy if I am using currently un-supported InSAR softwares? +### 2. How to prepare the input for MintPy if I am using currently un-supported InSAR software? The input of MintPy routine workflow (`smallbaselineApp.py`) is a stack of unwrapped interferograms. For "stack", we mean all the interferograms (unwrapped phase and spatial coherence) and geometries (DEM, incidence angle, etc.) have the same spatial extent and same spatial resolution, either in geo-coordinates or radar (range-doppler) coordinates. The input has 2 components: data and attributes. @@ -39,7 +39,7 @@ For dataset in geo-coordinates [recommended]: For dataset in radar-coordinates, the extra lookup table file(s) is required (_e.g._ lat/lon.rdr for `ISCE-2`, sim_\*.UTM_TO_RDC for `Gamma`, geo_\*.trans for `ROI_PAC`). -All the files above should be in the same spatial extent and same spatial resolution (except for the lookup table in geo-coordinates from Gamma/ROI_PAC). If they are not (e.g. different row/column number, different spatial extent in terms of SNWE, different spatial resolution, etc.), the easiest way is to geocode them with the same ouput spatial extent and same output spatial resolution. +All the files above should be in the same spatial extent and same spatial resolution (except for the lookup table in geo-coordinates from Gamma/ROI_PAC). If they are not (e.g. different row/column number, different spatial extent in terms of SNWE, different spatial resolution, etc.), the easiest way is to geocode them with the same output spatial extent and same output spatial resolution. MintPy read data files via `mintpy.utils.readfile.read()`. It supports the following two types of file formats: diff --git a/docs/api/data_structure.md b/docs/api/data_structure.md index 78e348761..0b0f694f4 100644 --- a/docs/api/data_structure.md +++ b/docs/api/data_structure.md @@ -116,7 +116,7 @@ coordinates : RADAR Start Date: 20141213 End Date: 20180619 Number of acquisitions : 98 -Std. of acquisition times : 0.99 yeras +Std. of acquisition times : 0.99 years ---------------------- List of dates: ['20141213', '20141225', '20150307', '20150319', '20150331', '20150412', '20150424', '20150506', '20150518', '20150530', '20150611', '20150623', '20150717', '20150729', '20150822', '20150903', '20150915', '20150927', '20151009', '20151021', '20151102', '20151114', '20151126', '20151208', '20151220', '20160101', '20160113', '20160125', '20160206', '20160218', '20160301', '20160406', '20160418', '20160430', '20160512', '20160524', '20160605', '20160629', '20160711', '20160723', '20160804', '20160816', '20160828', '20160909', '20160921', '20161003', '20161015', '20161027', '20161108', '20161120', '20161202', '20161214', '20161226', '20170107', '20170119', '20170131', '20170212', '20170224', '20170308', '20170320', '20170401', '20170413', '20170425', '20170507', '20170519', '20170531', '20170612', '20170624', '20170706', '20170718', '20170730', '20170811', '20170823', '20170904', '20170916', '20170928', '20171010', '20171022', '20171103', '20171115', '20171127', '20171209', '20171221', '20180102', '20180114', '20180126', '20180207', '20180219', '20180303', '20180315', '20180327', '20180408', '20180420', '20180502', '20180514', '20180526', '20180607', '20180619'] diff --git a/docs/api/doc_generation.md b/docs/api/doc_generation.md index 365a01992..1633c63ec 100644 --- a/docs/api/doc_generation.md +++ b/docs/api/doc_generation.md @@ -1,6 +1,6 @@ We use [Doxygen](http://www.doxygen.nl/) to generate the API documentation automatically. -+ Install Doxygen following [link](http://www.doxygen.nl/download.html) if you have not already doen so. ++ Install Doxygen following [link](http://www.doxygen.nl/download.html) if you have not already done so. + Run doxygen command with `MintPy/docs/Doxyfile` to generate the API documentation in html and latex format (to `$MINTPY_HOME/docs/api_docs` by default). diff --git a/docs/dask.md b/docs/dask.md index 811ba6664..e2ffef464 100644 --- a/docs/dask.md +++ b/docs/dask.md @@ -106,7 +106,7 @@ smallbaselineApp.py smallbaselineApp.cfg #### 2.3 Configuration parameters in `~/.config/dask/mintpy.yaml` #### -We provide a brief description below for the most commonly used configurations of dask-jobqueue for MintPy. Users are recommended to check [Dask-Jobqueue](https://jobqueue.dask.org/en/latest/configuration-setup.html) for more detailed and comprehensive documentaion. +We provide a brief description below for the most commonly used configurations of dask-jobqueue for MintPy. Users are recommended to check [Dask-Jobqueue](https://jobqueue.dask.org/en/latest/configuration-setup.html) for more detailed and comprehensive documentation. + **name:** Name of the worker job as it will appear to the job scheduler. Any values are perfectly fine. diff --git a/docs/docker.md b/docs/docker.md index 788bd95d0..58f7bb7e6 100644 --- a/docs/docker.md +++ b/docs/docker.md @@ -45,7 +45,7 @@ docker run -it -v :/home/mambauser/data ghcr.io/insarlab/mint docker run -it -v :/home/mambauser/data ghcr.io/insarlab/mintpy:latest smallbaselineApp.py /home/mambauser/data/FernandinaSenDT128/mintpy/FernandinaSenDT128.txt ``` -Or run the following to launch the Jupyter Lab server, then copy and paste the printed `http://localhost:8888/lab?token=` url in a brower. +Or run the following to launch the Jupyter Lab server, then copy and paste the printed `http://localhost:8888/lab?token=` url in a browser. ```shell # to launch a Jupyter Notebook frontend, replace "lab" with "notebook" in the command below diff --git a/docs/google_earth.md b/docs/google_earth.md index d65dd2923..0727e56c2 100644 --- a/docs/google_earth.md +++ b/docs/google_earth.md @@ -26,7 +26,7 @@ save_kmz_timeseries.py embeds a [dygraphs](http://dygraphs.com) javascript for i The script also use the [regions KML feature](https://developers.google.com/kml/documentation/regions) to support very large datasets without sacrificing resolution. It divides the data matrix into regionalized boxes, nests them using network links so that Google Earth could load them in a "smart" way. -**Alert: for very large datasets, the default settings are not generic due to the various computer memories, data sizes and different prefered details. The user is highly recommended to read the following to understand how the regions feature works and adjust parameters accordingly.** +**Alert: for very large datasets, the default settings are not generic due to the various computer memories, data sizes and different preferred details. The user is highly recommended to read the following to understand how the regions feature works and adjust parameters accordingly.** 1. Level of Detail (LOD) diff --git a/docs/hdfeos5.md b/docs/hdfeos5.md index a24d19a88..be05436c7 100644 --- a/docs/hdfeos5.md +++ b/docs/hdfeos5.md @@ -86,7 +86,7 @@ E.g. S1_IW12_128_0593_0597_20141213_20170928.he5 ### 4. Web Viewer ### -HDF-EOS5 file format is used as the input of the University of Miami's web viewer for InSAR time-series products. Below is a screenshot of the web viewer for the dataset on Kuju volcano from ALOS-1 acending track 422. +HDF-EOS5 file format is used as the input of the University of Miami's web viewer for InSAR time-series products. Below is a screenshot of the web viewer for the dataset on Kuju volcano from ALOS-1 ascending track 422.
http://insarmaps.miami.edu
~/.
```bash
export VRT_SHARED_SOURCE=0 # do not share dataset while using GDAL VRT in a multi-threading environment
-export HDF5_DISABLE_VERSION_CHECK=2 # supress the HDF5 version warning message (0 for abort; 1/2 for printout/suppress warning message)
+export HDF5_DISABLE_VERSION_CHECK=2 # suppress the HDF5 version warning message (0 for abort; 1/2 for printout/suppress warning message)
export HDF5_USE_FILE_LOCKING=FALSE # request that HDF5 file locks should NOT be used
```
diff --git a/scripts/fix_typos.sh b/scripts/fix_typos.sh
new file mode 100755
index 000000000..228c8b0bd
--- /dev/null
+++ b/scripts/fix_typos.sh
@@ -0,0 +1,79 @@
+#!/bin/sh
+# -*- coding: utf-8 -*-
+###############################################################################
+# $Id$
+#
+# Project: GDAL
+# Purpose: (Interactive) script to identify and fix typos
+# Author: Even Rouault
+#
+###############################################################################
+# Copyright (c) 2016, Even Rouault
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the "Software"),
+# to deal in the Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish, distribute, sublicense,
+# and/or sell copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+# DEALINGS IN THE SOFTWARE.
+###############################################################################
+
+set -eu
+
+SCRIPT_DIR=$(dirname "$0")
+case $SCRIPT_DIR in
+ "/"*)
+ ;;
+ ".")
+ SCRIPT_DIR=$(pwd)
+ ;;
+ *)
+ SCRIPT_DIR=$(pwd)"/"$(dirname "$0")
+ ;;
+esac
+GDAL_ROOT=$SCRIPT_DIR/..
+cd "$GDAL_ROOT"
+
+if ! test -d fix_typos; then
+ # Get our fork of codespell that adds --words-white-list and full filename support for -S option
+ mkdir fix_typos
+ (cd fix_typos
+ git clone https://github.com/rouault/codespell
+ (cd codespell && git checkout gdal_improvements)
+ # Aggregate base dictionary + QGIS one + Debian Lintian one
+ curl https://raw.githubusercontent.com/qgis/QGIS/master/scripts/spell_check/spelling.dat | sed "s/:/->/" | sed "s/:%//" | grep -v "colour->" | grep -v "colours->" > qgis.txt
+ curl https://salsa.debian.org/lintian/lintian/-/raw/master/data/spelling/corrections | grep "||" | grep -v "#" | sed "s/||/->/" > debian.txt
+ cat codespell/data/dictionary.txt qgis.txt debian.txt | awk 'NF' > gdal_dict.txt
+ echo "difered->deferred" >> gdal_dict.txt
+ echo "differed->deferred" >> gdal_dict.txt
+ grep -v 404 < gdal_dict.txt > gdal_dict.txt.tmp
+ mv gdal_dict.txt.tmp gdal_dict.txt
+ )
+fi
+
+EXCLUDED_FILES="*/.svn*,*/.git/*,configure,config.log,config.status,config.guess,config.sub,*/autom4te.cache/*,*.ai,*.svg"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,te" # gdalwarp switch
+AUTHORIZED_LIST="$AUTHORIZED_LIST,LaTeX,BibTeX"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,ALOS,Alos"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,lon,Lon,LON"
+# New Mintpy ones
+AUTHORIZED_LIST="$AUTHORIZED_LIST,alos,ALOS,alosStack"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,NED"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,waterMask,watermask"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,smallbaselineApp"
+AUTHORIZED_LIST="$AUTHORIZED_LIST,Nealy" # Author in reference
+
+python fix_typos/codespell/codespell.py -w -i 3 -q 2 -S "$EXCLUDED_FILES,./autotest/*,./build*/*" \
+ --words-white-list="$AUTHORIZED_LIST" \
+ -D ./fix_typos/gdal_dict.txt .
diff --git a/src/mintpy/asc_desc2horz_vert.py b/src/mintpy/asc_desc2horz_vert.py
index cd5fabaa1..4585781e4 100644
--- a/src/mintpy/asc_desc2horz_vert.py
+++ b/src/mintpy/asc_desc2horz_vert.py
@@ -78,7 +78,7 @@ def get_design_matrix4horz_vert(los_inc_angle, los_az_angle, horz_az_angle=-90):
+ dV * cos(inc_angle)
with dH_perp = 0.0
This could be easily modified to support multiple view geometry
- (e.g. two adjcent tracks from asc & desc) to resolve 3D
+ (e.g. two adjacent tracks from asc & desc) to resolve 3D
Parameters: los_inc_angle - 1D np.ndarray in size of (num_file), LOS incidence angle in degree.
los_az_angle - 1D np.ndarray in size of (num_file), LOS azimuth angle in degree.
@@ -155,7 +155,7 @@ def run_asc_desc2horz_vert(inps):
Returns: inps.outfile - str(s) output file(s)
"""
- ## 1. calculate the overlaping area in lat/lon
+ ## 1. calculate the overlapping area in lat/lon
atr_list = [readfile.read_attribute(fname, datasetName=inps.ds_name) for fname in inps.file]
S, N, W, E = get_overlap_lalo(atr_list)
lat_step = float(atr_list[0]['Y_STEP'])
diff --git a/src/mintpy/cli/closure_phase_bias.py b/src/mintpy/cli/closure_phase_bias.py
index e63445f7d..4155ef844 100755
--- a/src/mintpy/cli/closure_phase_bias.py
+++ b/src/mintpy/cli/closure_phase_bias.py
@@ -23,7 +23,7 @@
# Notebook tutorial:
# https://nbviewer.org/github/insarlab/MintPy-tutorial/blob/main/applications/closure_phase_bias.ipynb
- # create mask for areas suseptible to biases
+ # create mask for areas susceptible to biases
closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 5 -a mask
closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 20 -a mask --num-sigma 2.5
diff --git a/src/mintpy/cli/dem_error.py b/src/mintpy/cli/dem_error.py
index 3e02c4723..278619282 100755
--- a/src/mintpy/cli/dem_error.py
+++ b/src/mintpy/cli/dem_error.py
@@ -41,7 +41,7 @@ def create_parser(subparsers=None):
parser = arg_utils.create_argument_parser(
name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
- parser.add_argument('ts_file', help='Time-series HDF5 file to be corrrected.')
+ parser.add_argument('ts_file', help='Time-series HDF5 file to be corrected.')
parser.add_argument('-g', '--geometry', dest='geom_file',
help='geometry file including datasets:\n'+
'incidence angle\n'+
diff --git a/src/mintpy/cli/dem_gsi.py b/src/mintpy/cli/dem_gsi.py
index d9561ca95..a6e6de053 100755
--- a/src/mintpy/cli/dem_gsi.py
+++ b/src/mintpy/cli/dem_gsi.py
@@ -20,7 +20,7 @@
"""
NOTE = """DEHM: Digital Ellipsoidal Height Model
- yyxx.dehm with yy and xx indicating the coordinates of the upper left corner of the firt pixel.
+ yyxx.dehm with yy and xx indicating the coordinates of the upper left corner of the first pixel.
where latitude = (yy + 1) / 1.5, longitude = xx + 100
"""
diff --git a/src/mintpy/cli/diff.py b/src/mintpy/cli/diff.py
index 2f04256e5..dd6a0c8ab 100755
--- a/src/mintpy/cli/diff.py
+++ b/src/mintpy/cli/diff.py
@@ -58,7 +58,7 @@ def cmd_line_parse(iargs=None):
ftype = readfile.read_attribute(inps.file1)['FILE_TYPE']
if ftype in ['timeseries', 'ifgramStack', '.unw']:
if len(inps.file2) > 1:
- raise SystemExit(f'ERROR: ONLY ONE file2 is inputed for {ftype} type!')
+ raise SystemExit(f'ERROR: ONLY ONE file2 is inputted for {ftype} type!')
# check: --output (output file is required for number of files >=2)
if not inps.out_file:
diff --git a/src/mintpy/cli/generate_mask.py b/src/mintpy/cli/generate_mask.py
index 1b687fd47..3611d84ec 100755
--- a/src/mintpy/cli/generate_mask.py
+++ b/src/mintpy/cli/generate_mask.py
@@ -40,7 +40,7 @@
# common mask file of pixels in all connected components / with non-zero unwrapped phase
generate_mask.py ifgramStack.h5 --nonzero -o maskConnComp.h5 --update
- # interative polygon selection of region of interest
+ # interactive polygon selection of region of interest
# useful for custom mask generation in unwrap error correction with bridging
generate_mask.py waterMask.h5 -m 0.5 --roipoly
generate_mask.py azOff.h5 --roipoly --view-cmd "-v -0.1 0.1"
diff --git a/src/mintpy/cli/geocode.py b/src/mintpy/cli/geocode.py
index ff0403910..80c6003f4 100755
--- a/src/mintpy/cli/geocode.py
+++ b/src/mintpy/cli/geocode.py
@@ -70,7 +70,7 @@ def create_parser(subparsers=None):
out = parser.add_argument_group('grid in geo-coordinates')
out.add_argument('-b', '--bbox', dest='SNWE', type=float, nargs=4, metavar=('S', 'N', 'W', 'E'),
help='Bounding box for the area of interest.\n'
- 'using coordinates of the uppler left corner of the first pixel\n'
+ 'using coordinates of the upper left corner of the first pixel\n'
' and the lower right corner of the last pixel\n'
"for radar2geo, it's the output spatial extent\n"
"for geo2radar, it's the input spatial extent")
diff --git a/src/mintpy/cli/ifgram_inversion.py b/src/mintpy/cli/ifgram_inversion.py
index b9e5a6ec2..7b7ebbd53 100755
--- a/src/mintpy/cli/ifgram_inversion.py
+++ b/src/mintpy/cli/ifgram_inversion.py
@@ -80,7 +80,7 @@ def create_parser(subparsers=None):
help=('Enable inversion with minimum-norm deformation phase,'
' instead of the default minimum-norm deformation velocity.'))
#solver.add_argument('--norm', dest='residualNorm', default='L2', choices=['L1', 'L2'],
- # help='Optimization mehtod, L1 or L2 norm. (default: %(default)s).')
+ # help='Optimization method, L1 or L2 norm. (default: %(default)s).')
# uncertainty propagation
parser.add_argument('--calc-cov', dest='calcCov', action='store_true',
@@ -97,9 +97,9 @@ def create_parser(subparsers=None):
help='minimum redundancy of interferograms for every SAR acquisition. (default: %(default)s).')
# for offset ONLY
#mask.add_argument('--mask-min-snr', dest='maskMinSNR', type=float, default=10.0,
- # help='minimum SNR to diable/ignore the threshold-based masking [for offset only].')
+ # help='minimum SNR to disable/ignore the threshold-based masking [for offset only].')
#mask.add_argument('--mask-min-area-size', dest='maskMinAreaSize', type=float, default=16.0,
- # help='minimum area size to diable/ignore the threshold-based masking [for offset only]')
+ # help='minimum area size to disable/ignore the threshold-based masking [for offset only]')
# computing
parser = arg_utils.add_memory_argument(parser)
diff --git a/src/mintpy/cli/local_oscilator_drift.py b/src/mintpy/cli/local_oscilator_drift.py
index b91ba6054..f265203fe 100755
--- a/src/mintpy/cli/local_oscilator_drift.py
+++ b/src/mintpy/cli/local_oscilator_drift.py
@@ -26,7 +26,7 @@
"""
def create_parser(subparsers=None):
- synopsis = 'Local Oscilator Drift (LOD) correction of Envisat'
+ synopsis = 'Local Oscillator Drift (LOD) correction of Envisat'
epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
name = __name__.split('.')[-1]
parser = create_argument_parser(
diff --git a/src/mintpy/cli/modify_network.py b/src/mintpy/cli/modify_network.py
index 7bf543866..f3409efa3 100755
--- a/src/mintpy/cli/modify_network.py
+++ b/src/mintpy/cli/modify_network.py
@@ -115,7 +115,7 @@ def cmd_line_parse(iargs=None):
if not os.path.isfile(inps.maskFile):
inps.maskFile = None
- # check: --exclude-ifg-index option (convert input index to continous index list)
+ # check: --exclude-ifg-index option (convert input index to continuous index list)
inps.excludeIfgIndex = read_input_index_list(inps.excludeIfgIndex, stackFile=inps.file)
# check: -t / --template option
diff --git a/src/mintpy/cli/plate_motion.py b/src/mintpy/cli/plate_motion.py
index 28eb4ce9d..519a0bf16 100755
--- a/src/mintpy/cli/plate_motion.py
+++ b/src/mintpy/cli/plate_motion.py
@@ -37,7 +37,7 @@
"""
EXAMPLE = """example:
- # Use build-in plate motion model of Table 1 from Altamimi et al. (2017)
+ # Use built-in plate motion model of Table 1 from Altamimi et al. (2017)
plate_motion.py -g inputs/geometryGeo.h5 --plate Arabia
plate_motion.py -g inputs/geometryRadar.h5 --plate Eurasia
diff --git a/src/mintpy/cli/s1ab_range_bias.py b/src/mintpy/cli/s1ab_range_bias.py
index cc3c3ef2b..cb9b55253 100755
--- a/src/mintpy/cli/s1ab_range_bias.py
+++ b/src/mintpy/cli/s1ab_range_bias.py
@@ -42,7 +42,7 @@ def create_parser(subparsers=None):
name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
# input/output files
- parser.add_argument('ts_file', help='Range offset timeseries file to be corrrected, e.g. timeseriesRg_SET_ERA5.h5.')
+ parser.add_argument('ts_file', help='Range offset timeseries file to be corrected, e.g. timeseriesRg_SET_ERA5.h5.')
parser.add_argument('-g', '--geom', '--geometry', dest='geom_file', help='geometry file including datasets:\nheight')
parser.add_argument('-m', '--mask', dest='mask_file', help='mask file')
diff --git a/src/mintpy/cli/save_hdfeos5.py b/src/mintpy/cli/save_hdfeos5.py
index 1559336e5..1f1e530a1 100755
--- a/src/mintpy/cli/save_hdfeos5.py
+++ b/src/mintpy/cli/save_hdfeos5.py
@@ -13,7 +13,7 @@
from mintpy.utils.arg_utils import create_argument_parser
################################################################
-TEMPALTE = TEMPLATE = get_template_content('hdfeos5')
+TEMPLATE = get_template_content('hdfeos5')
EXAMPLE = """example:
save_hdfeos5.py geo/geo_timeseries_ERA5_ramp_demErr.h5
@@ -29,7 +29,7 @@
def create_parser(subparsers=None):
synopsis = 'Convert MintPy timeseries product into HDF-EOS5 format'
- epilog = TEMPALTE + '\n' + EXAMPLE
+ epilog = TEMPLATE + '\n' + EXAMPLE
name = __name__.split('.')[-1]
parser = create_argument_parser(
name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
diff --git a/src/mintpy/cli/save_kmz_timeseries.py b/src/mintpy/cli/save_kmz_timeseries.py
index 76e9b7102..c0100508a 100755
--- a/src/mintpy/cli/save_kmz_timeseries.py
+++ b/src/mintpy/cli/save_kmz_timeseries.py
@@ -73,7 +73,7 @@ def cmd_line_parse(iargs=None):
# import
from mintpy.utils import readfile
- # check: intput file coordinates system (required in geo)
+ # check: input file coordinates system (required in geo)
atr = readfile.read_attribute(inps.ts_file)
if "Y_FIRST" not in atr.keys():
raise ValueError(f"input file {inps.ts_file} is NOT geocoded")
diff --git a/src/mintpy/cli/tsview.py b/src/mintpy/cli/tsview.py
index abe76a2a7..b93c64d88 100755
--- a/src/mintpy/cli/tsview.py
+++ b/src/mintpy/cli/tsview.py
@@ -78,7 +78,7 @@ def create_parser(subparsers=None):
dest='plot_model_conf_int', action='store_true',
help='Plot the time function prediction confidence intervals.\n'
'[!-- Preliminary feature alert! --!]\n'
- '[!-- This feature is NOT throughly checked. '
+ '[!-- This feature is NOT thoroughly checked. '
'Read the code before use. Interpret at your own risk! --!]')
parser = arg_utils.add_timefunc_argument(parser)
diff --git a/src/mintpy/cli/unwrap_error_phase_closure.py b/src/mintpy/cli/unwrap_error_phase_closure.py
index ffd6c88eb..07c5b8600 100755
--- a/src/mintpy/cli/unwrap_error_phase_closure.py
+++ b/src/mintpy/cli/unwrap_error_phase_closure.py
@@ -116,7 +116,7 @@ def cmd_line_parse(iargs=None):
def read_template2inps(template_file, inps):
"""Read input template options into Namespace inps"""
- print('read options from tempalte file: '+os.path.basename(inps.template_file))
+ print('read options from template file: '+os.path.basename(inps.template_file))
from mintpy.unwrap_error_phase_closure import key_prefix
from mintpy.utils import readfile, utils1 as ut
diff --git a/src/mintpy/closure_phase_bias.py b/src/mintpy/closure_phase_bias.py
index 7124bfadb..cebb0aba3 100644
--- a/src/mintpy/closure_phase_bias.py
+++ b/src/mintpy/closure_phase_bias.py
@@ -22,7 +22,7 @@
################################# Mask #######################################
def calc_closure_phase_mask(stack_file, bias_free_conn, num_sigma=3, threshold_amp=0.3,
outdir='./', max_memory=4.0):
- """Calculate a mask for areas suseptible to biases, based on the average closure phase tau.
+ """Calculate a mask for areas susceptible to biases, based on the average closure phase tau.
Equation: tau = 1 / K * Sigma_{k=1}^K (np.exp(j * Phi_k^{nl}))
where K is the number of closure phase for connection nl, Phi_k^{nl} is the k-th sequential
@@ -35,7 +35,7 @@ def calc_closure_phase_mask(stack_file, bias_free_conn, num_sigma=3, threshold_a
threshold_amp - float, threshold of ampliutde of the cumulative sequential closure phase
outdir - str, directory of output files
max_mermory - float, maximum memory in GB for each patch processed
- Returns: mask - 2D np.ndarray of size (length, width) in boolean, 0 for areas suseptible to biases.
+ Returns: mask - 2D np.ndarray of size (length, width) in boolean, 0 for areas susceptible to biases.
Saved to file: maskClosurePhase.h5
avg_cp - 2D np.ndarray of size (length, width) in complex64, average cum. seq. closure phase
Saved to file: avgCpxClosurePhase.h5
@@ -61,7 +61,7 @@ def calc_closure_phase_mask(stack_file, bias_free_conn, num_sigma=3, threshold_a
# key info
print('\n'+'-'*80)
- print('calculating the mask to flag areas suseptible to non-closure-phase related biases (as zero) ...')
+ print('calculating the mask to flag areas susceptible to non-closure-phase related biases (as zero) ...')
print(f'number of valid acquisitions: {len(date_list)} ({date_list[0]} - {date_list[-1]})')
print(f'average complex closure phase threshold in amplitude/correlation: {threshold_amp}')
print(f'average complex closure phase threshold in phase: {num_sigma} sigma ({threshold_pha:.1f} rad)')
@@ -98,7 +98,7 @@ def calc_closure_phase_mask(stack_file, bias_free_conn, num_sigma=3, threshold_a
avg_cp[no_data_mask] = np.nan
# create mask
- print('\ncreate mask for areas suseptible to non-closure phase biases')
+ print('\ncreate mask for areas susceptible to non-closure phase biases')
mask = np.ones([length,width], dtype=bool)
# mask areas with potential bias
@@ -106,7 +106,7 @@ def calc_closure_phase_mask(stack_file, bias_free_conn, num_sigma=3, threshold_a
mask[np.abs(np.angle(avg_cp)) > threshold_pha] = 0
# unmask areas with low correlation
- # where it's hard to know wheter there is bias or not
+ # where it's hard to know whether there is bias or not
print(f'set pixels with average complex closure phase amplitude (correlation) < {threshold_amp} to 1.')
mask[np.abs(np.abs(avg_cp) < threshold_amp)] = 1
@@ -349,7 +349,7 @@ def compute_unwrap_closure_phase(stack_file, conn, num_worker=1, outdir='./', ma
## calc the cumulativev unwrapped closure phase time-series
print('-'*60)
print('step 3/3: calculate the unwrapped cumulative sequential closure phase time-series ...')
- print(' Note that a referece point in the ifgramStack.h5 (as attributes "REF_Y/X") is needed to continue. ')
+ print(' Note that a reference point in the ifgramStack.h5 (as attributes "REF_Y/X") is needed to continue. ')
print(' A good reference point should be a pixel that has good temporal coherence and no bias.')
cum_seq_unw_closure_phase_timeseries(conn, conn_dir, date_list, meta)
@@ -423,7 +423,7 @@ def estimate_wratio(tbase, conn, bias_free_conn, wvl, box, outdir='./', mask=Fal
vel_bias_connN = np.multiply(wratio_connN, vel_bias_connF)
if mask:
# if average velocity smaller than 1 mm/year (hardcoded here), mask out for better visual
- # this option is only turned on while outputing wratio.h5 file.
+ # this option is only turned on while outputting wratio.h5 file.
wratio_connN[abs(vel_bias_connF) < 0.001] = np.nan
# debug mode
@@ -531,7 +531,7 @@ def estimate_bias_timeseries_approx_patch(bias_free_conn, bw, tbase, date_ordina
date_ordinal - list of size (num_date,) in integer, time in days
wvl - float, wavelength of the SAR system
box - list in size of (4,) in integer, coordinates of bounding box
- outdir - string, directory for outputing files
+ outdir - string, directory for outputting files
Returns: bias_ts - 3D array in size of (num_date, box_len, box_wid) in float, bias timeseries
'''
print('\n'+'-'*60)
@@ -719,7 +719,7 @@ def bandwidth2num_ifgram(bw, num_date):
Reference: Equation (15) in Zheng et al. (2022)
- Parameters: bw - int, bandwith
+ Parameters: bw - int, bandwidth
num_date - int, number of acquisitions
Returns: num_ifgram - int, number of interferograms
'''
@@ -745,7 +745,7 @@ def get_design_matrix_Wr(date12_list, bw, box, bias_free_conn, outdir='./'):
# get w(delta_t) * phi^x - section VI-A
wratio_all = estimate_wratio_all(bw, bias_free_conn, outdir, box)
- # intial output value
+ # initial output value
num_pix = (box[2] - box[0]) * (box[3] - box[1])
Wr = np.zeros((num_ifgram, num_pix), dtype=np.float32)
for i in range(num_ifgram):
@@ -915,7 +915,7 @@ def estimate_bias_timeseries(stack_file, bias_free_conn, bw, cluster_kwargs, wat
Parameters: stack_file - string, path for ifgramStack.h5
bias_free_conn - integer, connection level at which we assume is bias-free
bw - integer, bandwidth of the given time-series.
- cluster_kwargs - dictonary containing settings of parallel computing. To turn off, set parallel['clustertype']=''
+ cluster_kwargs - dictionary containing settings of parallel computing. To turn off, set parallel['clustertype']=''
outdir - string, directory for output files
max_memory - float, maximum memory in GB for each patch processed
Returns: bias_ts_file - str, path to the bias time series file: timeseriesBias.h5
diff --git a/src/mintpy/defaults/auto_path.py b/src/mintpy/defaults/auto_path.py
index 54bccadb4..f983dcbab 100644
--- a/src/mintpy/defaults/auto_path.py
+++ b/src/mintpy/defaults/auto_path.py
@@ -1,4 +1,4 @@
-"""Utilities for automatic configuration fo input file pathes"""
+"""Utilities for automatic configuration for input file paths"""
############################################################
# Program is part of MintPy #
# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi #
@@ -114,7 +114,7 @@
prefix = 'mintpy.load.'
-##----------------- Functions from mintpy.utils.readfile to be independnt module ---------##
+##----------------- Functions from mintpy.utils.readfile to be independent module ---------##
def read_str2dict(inString, delimiter='=', print_msg=False):
'''Read multiple lines of string into dict
Based on mintpy.utils.readfile.read_template()
diff --git a/src/mintpy/defaults/smallbaselineApp.cfg b/src/mintpy/defaults/smallbaselineApp.cfg
index e5db723c7..0ed5ccb6e 100644
--- a/src/mintpy/defaults/smallbaselineApp.cfg
+++ b/src/mintpy/defaults/smallbaselineApp.cfg
@@ -163,7 +163,7 @@ mintpy.unwrapError.bridgePtsRadius = auto #[1-inf], auto for 50, half size of t
########## 5. invert_network
-## Invert network of interferograms into time-series using weighted least sqaure (WLS) estimator.
+## Invert network of interferograms into time-series using weighted least square (WLS) estimator.
## weighting options for least square inversion [fast option available but not best]:
## a. var - use inverse of covariance as weight (Tough et al., 1995; Guarnieri & Tebaldini, 2008) [recommended]
## b. fim - use Fisher Information Matrix as weight (Seymour & Cumming, 1994; Samiei-Esfahany et al., 2016).
diff --git a/src/mintpy/dem_error.py b/src/mintpy/dem_error.py
index 4dfb27ba3..f38b06c30 100644
--- a/src/mintpy/dem_error.py
+++ b/src/mintpy/dem_error.py
@@ -577,7 +577,7 @@ def correct_dem_error(inps):
datasetName='timeseries',
block=block)
- # roll back to the origial number of threads
+ # roll back to the original number of threads
cluster.roll_back_num_threads(num_threads_dict)
# time info
diff --git a/src/mintpy/ifgram_inversion.py b/src/mintpy/ifgram_inversion.py
index 558cd9387..3cd7c4a6f 100644
--- a/src/mintpy/ifgram_inversion.py
+++ b/src/mintpy/ifgram_inversion.py
@@ -134,7 +134,7 @@ def estimate_timeseries(A, B, y, tbase_diff, weight_sqrt=None, min_norm_velocity
inv_quality_name - str, inversion quality type/name
temporalCoherence for phase
residual for offset
- no to turn OFF the calcualtion
+ no to turn OFF the calculation
Returns: ts - 2D np.ndarray in size of (num_date, num_pixel), phase time-series
inv_quality - 1D np.ndarray in size of (num_pixel), temporal coherence (for phase) or residual (for offset)
num_inv_obs - 1D np.ndarray in size of (num_pixel), number of observations (ifgrams / offsets)
@@ -681,7 +681,7 @@ def run_ifgram_inversion_patch(ifgram_file, box=None, ref_phase=None, obs_ds_nam
weight_sqrt[weight_sqrt < 0.005] = 0.005
print('convert std. dev. to the inverse of variance')
- weight_sqrt = 1. / weight_sqrt # use squre root of weight, to faciliate WLS, same as for phase.
+ weight_sqrt = 1. / weight_sqrt # use squre root of weight, to facilitate WLS, same as for phase.
# prepare for Std time-series
if calc_cov:
@@ -705,7 +705,7 @@ def run_ifgram_inversion_patch(ifgram_file, box=None, ref_phase=None, obs_ds_nam
dropIfgram=True)
# translate zero phase value to nan (no-data value)
- # becuase it's the common filled value used in phase masking
+ # because it's the common filled value used in phase masking
if 'phase' in obs_ds_name.lower():
stack_obs[stack_obs == 0.] = np.nan
print(f'convert zero value in {obs_ds_name} to NaN (no-data value)')
@@ -937,7 +937,7 @@ def run_ifgram_inversion(inps):
## limit the number of threads in numpy/scipy to 1
# and save the original value for roll back afterwards
- # becuase it does not increase the speed much but does increase the CPU usage significantly
+ # because it does not increase the speed much but does increase the CPU usage significantly
# as shown in the test note below.
# Dataset: SanFranSenDT42 version 1.x, patch 1 (505 x 510 x 1021) only
# Machine 1: Mac (6 Intel i7 CPUs/cores in 2.6 GHz)
diff --git a/src/mintpy/image_stitch.py b/src/mintpy/image_stitch.py
index 19659ec5b..b4762c0ac 100644
--- a/src/mintpy/image_stitch.py
+++ b/src/mintpy/image_stitch.py
@@ -133,13 +133,13 @@ def stitch_two_matrices(mat1, atr1, mat2, atr2, apply_offset=True, print_msg=Tru
length = int(np.ceil((S - N) / lat_step))
# index of input matrices in output matrix
- vprint('estimate difference in the overlaping area')
+ vprint('estimate difference in the overlapping area')
lon_seq = np.arange(W, W + width * lon_step, lon_step)
lat_seq = np.arange(N, N + length * lat_step, lat_step)
x1, y1 = np.argmin(np.square(lon_seq - W1)), np.argmin(np.square(lat_seq - N1))
x2, y2 = np.argmin(np.square(lon_seq - W2)), np.argmin(np.square(lat_seq - N2))
- # estimate offset of the overlaping area
+ # estimate offset of the overlapping area
mat11 = np.zeros([length, width]) * np.nan;
mat22 = np.zeros([length, width]) * np.nan;
mat11[y1:y1+length1, x1:x1+width1] = mat1
diff --git a/src/mintpy/legacy/insar_vs_gps_legacy.py b/src/mintpy/legacy/insar_vs_gps_legacy.py
index cc79a8cfd..49c0071e4 100755
--- a/src/mintpy/legacy/insar_vs_gps_legacy.py
+++ b/src/mintpy/legacy/insar_vs_gps_legacy.py
@@ -135,10 +135,10 @@ def usage():
-m min value of the x and y axis of the plot
-M max value of the x and y axis of the plot
- -r refernce GPS station
+ -r reference GPS station
-s second velocity map
-S source of the GPS data: (usgs,cmm4,mintpy)
- see documentation for more infromation
+ see documentation for more information
-I incidence angle (if not given average look angle is used instead)
-H Heading angle (if not given then the program reads it from the attributes of the velocity file)
@@ -299,7 +299,7 @@ def main(argv):
else:
print("""
%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
- WARNING: nan value for InSAR data at the refernce pixel!
+ WARNING: nan value for InSAR data at the reference pixel!
reference station should be a pixel with valid value in InSAR data.
please select another GPS station as the reference station.
diff --git a/src/mintpy/legacy/select_network.py b/src/mintpy/legacy/select_network.py
index 33857b67c..89f516159 100755
--- a/src/mintpy/legacy/select_network.py
+++ b/src/mintpy/legacy/select_network.py
@@ -130,7 +130,7 @@ def create_parser():
help='max temporal baseline in days')
threshold.add_argument('--keep-seasonal', dest='keepSeasonal', action='store_true',
help='keep seasonal pairs, even they are out of temporal baseline limit\n' +
- 'i.e. pairs in same/adjcent month within 3 years.')
+ 'i.e. pairs in same/adjacent month within 3 years.')
parser.add_argument('--inc-angle', dest='inc_angle',
type=float, help='Center incidence angle in degrees.')
@@ -154,7 +154,7 @@ def cmd_line_parse(iargs=None):
#########################################################################
def log(msg):
- """Log function writen by Falk"""
+ """Log function written by Falk"""
f = open('log', 'a')
callingFunction = os.path.basename(inspect.stack()[1][1])
dateStr = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%dT%H:%M:%S')
@@ -186,7 +186,7 @@ def read_template2inps(templateFile, inps=None):
msg += f'Use {prefix} instead'
raise Exception(msg)
if all(prefix not in key for key in template.keys()):
- msg = 'no valid input option deteced in template file!\n'
+ msg = 'no valid input option detected in template file!\n'
msg += 'Check the template below for supported options:\n'
msg += TEMPLATE
raise Exception(msg)
diff --git a/src/mintpy/legacy/transect_legacy.py b/src/mintpy/legacy/transect_legacy.py
index d0b23d1b1..2c069044d 100755
--- a/src/mintpy/legacy/transect_legacy.py
+++ b/src/mintpy/legacy/transect_legacy.py
@@ -307,7 +307,7 @@ def get_transect(z, x0, y0, x1, y1, interpolation='nearest'):
transect.py -f velocity.h5 -s 'y1,x1' -e 'y2,x2 -n number_of_transects -d distace_between_profiles(pixel)
-g gps velocity file -r reference station -L List of stations
- -s : strat point of the profile
+ -s : start point of the profile
-e : end point of the profile
-F : Fault coordinates (lat_first, lon_first, lat_end, lon_end)
-n : number of transections [default: 1]
@@ -638,7 +638,7 @@ def onclick(event):
print("""
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
- WARNING: nan value for InSAR data at the refernce pixel!
+ WARNING: nan value for InSAR data at the reference pixel!
reference station should be a pixel with valid value in InSAR data.
please select another GPS station as the reference station.
@@ -940,7 +940,7 @@ def onclick(event):
# print df0_km/1000.0
###################################################################
- # lower and higher bounds for diplaying the profile
+ # lower and higher bounds for displaying the profile
try:
lbound
diff --git a/src/mintpy/legacy/tropo_pyaps.py b/src/mintpy/legacy/tropo_pyaps.py
index 4a32ad926..75ee5e9ab 100755
--- a/src/mintpy/legacy/tropo_pyaps.py
+++ b/src/mintpy/legacy/tropo_pyaps.py
@@ -381,7 +381,7 @@ def dload_grib_pyaps(grib_file_list):
def get_delay(grib_file, inps):
"""Get delay matrix using PyAPS for one acquisition
Inputs:
- grib_file - strng, grib file path
+ grib_file - string, grib file path
atr - dict, including the following attributes:
dem_file - string, DEM file path
trop_model - string, Weather re-analysis data source
diff --git a/src/mintpy/load_data.py b/src/mintpy/load_data.py
index 972e01228..2a995099e 100644
--- a/src/mintpy/load_data.py
+++ b/src/mintpy/load_data.py
@@ -180,7 +180,7 @@ def read_subset_box(iDict):
geo_box = None
print('WARNING: mintpy.subset.lalo is not supported'
' if 1) no lookup file AND'
- ' 2) radar/unkonwn coded dataset')
+ ' 2) radar/unknown coded dataset')
print('\tignore it and continue.')
if not geo_box and not pix_box:
@@ -537,7 +537,7 @@ def run_or_skip(outFile, inObj, box, updateMode=True, xstep=1, ystep=1, geom_obj
Do not write HDF5 file if ALL the following meet:
1. HDF5 file exists and is readable,
- 2. HDF5 file constains all the datasets and in the same size
+ 2. HDF5 file contains all the datasets and in the same size
3. For ifgramStackDict, HDF5 file contains all date12.
Parameters: outFile - str, path to the output HDF5 file
diff --git a/src/mintpy/local_oscilator_drift.py b/src/mintpy/local_oscilator_drift.py
index 726538140..16d89b5f8 100644
--- a/src/mintpy/local_oscilator_drift.py
+++ b/src/mintpy/local_oscilator_drift.py
@@ -28,7 +28,7 @@ def get_relative_range_distance(meta):
def correct_local_oscilator_drift(fname, rg_dist_file=None, out_file=None):
print('-'*50)
- print('correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)')
+ print('correct Local Oscillator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)')
print('-'*50)
atr = readfile.read_attribute(fname)
diff --git a/src/mintpy/multi_transect.py b/src/mintpy/multi_transect.py
index 1ed77b2ec..6ead65740 100755
--- a/src/mintpy/multi_transect.py
+++ b/src/mintpy/multi_transect.py
@@ -997,7 +997,7 @@ def onclick(event):
ax.set_xlabel('Distance along profile [km]', fontsize=26)
###################################################################
- # lower and higher bounds for diplaying the profile
+ # lower and higher bounds for displaying the profile
try:
lbound
diff --git a/src/mintpy/objects/cluster.py b/src/mintpy/objects/cluster.py
index 846a6943f..fca965317 100644
--- a/src/mintpy/objects/cluster.py
+++ b/src/mintpy/objects/cluster.py
@@ -33,7 +33,7 @@ def split_box2sub_boxes(box, num_split, dimension='x', print_msg=False):
:param num_split: int, the initial number of sub_boxes to split a box into
:param dimension: str = 'y' or 'x', the dimension along which to split the boxes
:return: sub_boxes: list(list(4 int)), the splited sub boxes
- :return: num_split: int, the final number of splitted sub_boxes
+ :return: num_split: int, the final number of split sub_boxes
"""
import numpy as np
@@ -143,7 +143,7 @@ class DaskCluster:
3. all matrices will be in 2D in size of (len, wid) or 3D in size of (n, len, wid),
thus, the last two dimension (in space) will be the same.
This charateristics allows the automatic result collection without prior knowledge
- of the computing funciton, thus being a generic wrapper.
+ of the computing function, thus being a generic wrapper.
Check ifgram_inversion.py as an example.
@@ -176,7 +176,7 @@ def __init__(self, cluster_type, num_worker, config_name=None, **kwargs):
if self.config_name is not None:
print(f"input Dask config name: {self.config_name}")
- ## intitial value
+ ## initial value
self.cluster = None
self.client = None
@@ -317,7 +317,7 @@ def collect_result(self, futures, results, box, submission_time):
print(f"\nFUTURE #{num_future} complete. Time used: {sub_t:.0f} seconds")
# catch result - sub_box
- # and convert the abosulte sub_box into local col/row start/end relative to the primary box
+ # and convert the absolute sub_box into local col/row start/end relative to the primary box
# to assemble the result from each worker
sub_box = sub_results[-1]
x0, y0, x1, y1 = sub_box
diff --git a/src/mintpy/objects/colors.py b/src/mintpy/objects/colors.py
index 6406bce57..d356f356a 100644
--- a/src/mintpy/objects/colors.py
+++ b/src/mintpy/objects/colors.py
@@ -387,7 +387,7 @@ def cmap_map(function, cmap):
cdict = cmap._segmentdata
step_dict = {}
- # Firt get the list of points where the segments start or end
+ # First get the list of points where the segments start or end
for key in ('red', 'green', 'blue'):
step_dict[key] = list(map(lambda x: x[0], cdict[key]))
step_list = sum(step_dict.values(), [])
diff --git a/src/mintpy/objects/coord.py b/src/mintpy/objects/coord.py
index 3bdfe0785..837a5e430 100644
--- a/src/mintpy/objects/coord.py
+++ b/src/mintpy/objects/coord.py
@@ -185,7 +185,7 @@ def _get_lookup_row_col(self, y, x, y_factor=10, x_factor=10, geo_coord=False, d
row, col = np.nanmean(np.where(mask_yx), axis=1)
if any(np.isnan(i) for i in [row, col]):
- raise RuntimeError(f'No coresponding coordinate found for y/x: {y}/{x}')
+ raise RuntimeError(f'No corresponding coordinate found for y/x: {y}/{x}')
return row, col
diff --git a/src/mintpy/objects/euler_pole.py b/src/mintpy/objects/euler_pole.py
index beacffa94..92294c617 100644
--- a/src/mintpy/objects/euler_pole.py
+++ b/src/mintpy/objects/euler_pole.py
@@ -398,7 +398,7 @@ def get_velocity_enu(self, lat, lon, alt=0.0, ellps=True, print_msg=True):
ve, vn, vu = transform_xyz_enu(lat, lon, x=vx, y=vy, z=vz)
# enforce zero vertical velocitpy when ellps=False
- # to avoid artefacts due to numerical precision
+ # to avoid artifacts due to numerical precision
if not ellps:
if isinstance(lat, np.ndarray):
vu[:] = 0
@@ -545,10 +545,10 @@ def transform_xyz_enu(lat, lon, x=None, y=None, z=None, e=None, n=None, u=None):
def read_plate_outline(pmm_name='GSRM', plate_name=None):
"""Read the plate boundaries for the given plate motion model.
- Paramters: pmm_name - str, plate motion (model) name
- plate_name - str, plate name of interest, return all plates if None
- Returns: outline - dict, a dictionary that contains lists of vertices in lat/lon for all plates
- OR shapely.geometry.polygon.Polygon object, boundary of the given "plate".
+ Parameters: pmm_name - str, plate motion (model) name
+ plate_name - str, plate name of interest, return all plates if None
+ Returns: outline - dict, a dictionary that contains lists of vertices in lat/lon for all plates
+ OR shapely.geometry.polygon.Polygon object, boundary of the given "plate".
"""
# check input
@@ -631,7 +631,7 @@ def plot_plate_motion(plate_boundary, epole_obj, center_lalo=None, qscale=200, q
Parameters: plate_boundary - shapely.geometry.Polygon object
epole_obj - mintpy.objects.euler_pole.EulerPole object
- center_lalo - list of 2 float, center the map at this latitute, longitude
+ center_lalo - list of 2 float, center the map at this latitude, longitude
qscale - float, scaling factor of the quiver
qunit - float, length of the quiver legend in mm/yr
satellite_height - height of the perspective view looking in meters
@@ -658,8 +658,8 @@ def _sample_coords_within_polygon(polygon_obj, ny=10, nx=10):
"""Make a set of points inside the defined sphericalpolygon object.
Parameters: polygon_obj - shapely.geometry.Polygon, a polygon object in lat/lon.
- ny - int, number of intial sample points in the y (lat) direction.
- nx - int, number of intial sample points in the x (lon) direction.
+ ny - int, number of initial sample points in the y (lat) direction.
+ nx - int, number of initial sample points in the x (lon) direction.
Returns: sample_lats - 1D np.ndarray, sample coordinates in the y (lat) direction.
sample_lons - 1D np.ndarray, sample coordinates in the x (lon) direction.
"""
diff --git a/src/mintpy/objects/gps.py b/src/mintpy/objects/gps.py
index 76e97d413..354e5a3e8 100644
--- a/src/mintpy/objects/gps.py
+++ b/src/mintpy/objects/gps.py
@@ -508,7 +508,7 @@ def get_los_geometry(self, geom_obj, print_msg=False):
az_angle = ut.heading2azimuth_angle(float(geom_obj['HEADING']))
else:
- raise ValueError(f'input geom_obj is neight str nor dict: {geom_obj}')
+ raise ValueError(f'input geom_obj is neither str nor dict: {geom_obj}')
return inc_angle, az_angle
diff --git a/src/mintpy/objects/resample.py b/src/mintpy/objects/resample.py
index ef1d25a18..ce7b503af 100644
--- a/src/mintpy/objects/resample.py
+++ b/src/mintpy/objects/resample.py
@@ -206,7 +206,7 @@ def run_resample(self, src_data, box_ind=0, print_msg=True):
if self.software == 'pyresample':
# move 1st/time dimension to the last
- # so that rows/cols axis are the frist, as required by pyresample
+ # so that rows/cols axis are the first, as required by pyresample
if len(src_data.shape) == 3:
src_data = np.moveaxis(src_data, 0, -1)
@@ -246,7 +246,7 @@ def get_num_box(src_file=None, max_memory=0, scale_fac=3.0):
max_memory - float, memory size in GB
scale_fac - float, scale factor from data size to memory used
empirically estimated.
- Returns: num_box - int, number of boxes to be splitted
+ Returns: num_box - int, number of boxes to be split
"""
num_box = 1
@@ -393,7 +393,7 @@ def find_valid_lat_lon(lat, lon):
bin_value, bin_edge = np.histogram(data[mask], bins=10)
# if there is anomaly, histogram won't be evenly distributed
while np.max(bin_value) > np.sum(zero_mask) * 0.3:
- # find the continous bins where the largest bin is --> normal data range
+ # find the continuous bins where the largest bin is --> normal data range
bin_value_thres = ut.median_abs_deviation_threshold(bin_value, cutoff=3)
bin_label = ndimage.label(bin_value > bin_value_thres)[0]
idx = np.where(bin_label == bin_label[np.argmax(bin_value)])[0]
diff --git a/src/mintpy/objects/stack.py b/src/mintpy/objects/stack.py
index 655bf6813..59d7ce6ab 100644
--- a/src/mintpy/objects/stack.py
+++ b/src/mintpy/objects/stack.py
@@ -1144,7 +1144,7 @@ def get_closure_phase_index(self, conn, dropIfgram=True):
def get_sequential_closure_phase(self, box, conn, post_proc=None):
- """Computes wrapped sequential closure phases for a given conneciton level.
+ """Computes wrapped sequential closure phases for a given connection level.
Reference: Equation (21) in Zheng et al. (2022, TGRS)
For conn = 5, seq_closure_phase = p12 + p23 + p34 + p45 + p56 - p16.
diff --git a/src/mintpy/objects/stackDict.py b/src/mintpy/objects/stackDict.py
index 996a22d4e..7cf963cde 100644
--- a/src/mintpy/objects/stackDict.py
+++ b/src/mintpy/objects/stackDict.py
@@ -414,7 +414,7 @@ def get_metadata(self, family=IFGRAM_DSET_NAMES[0]):
########################################################################################
class geometryDict:
"""
- Geometry object for Lat, Lon, Heigt, Incidence, Heading, Bperp, ... from the same platform and track.
+ Geometry object for Lat, Lon, Height, Incidence, Heading, Bperp, ... from the same platform and track.
Example:
from mintpy.utils import readfile
@@ -476,7 +476,7 @@ def get_slant_range_distance(self, box=None, xstep=1, ystep=1):
if 'Y_FIRST' in self.extraMetadata.keys():
# for dataset in geo-coordinates, use:
# 1) incidenceAngle matrix if available OR
- # 2) contant value from SLANT_RANGE_DISTANCE.
+ # 2) constant value from SLANT_RANGE_DISTANCE.
ds_name = 'incidenceAngle'
key = 'SLANT_RANGE_DISTANCE'
if ds_name in self.dsNames:
@@ -491,7 +491,7 @@ def get_slant_range_distance(self, box=None, xstep=1, ystep=1):
data = ut.incidence_angle2slant_range_distance(self.extraMetadata, inc_angle)
elif key in self.extraMetadata.keys():
- print(f'geocoded input, use contant value from metadata {key}')
+ print(f'geocoded input, use constant value from metadata {key}')
length = int(self.extraMetadata['LENGTH'])
width = int(self.extraMetadata['WIDTH'])
range_dist = float(self.extraMetadata[key])
@@ -529,9 +529,9 @@ def get_incidence_angle(self, box=None, xstep=1, ystep=1):
return None
if 'Y_FIRST' in self.extraMetadata.keys():
- # for dataset in geo-coordinates, use contant value from INCIDENCE_ANGLE.
+ # for dataset in geo-coordinates, use constant value from INCIDENCE_ANGLE.
key = 'INCIDENCE_ANGLE'
- print(f'geocoded input, use contant value from metadata {key}')
+ print(f'geocoded input, use constant value from metadata {key}')
if key in self.extraMetadata.keys():
length = int(self.extraMetadata['LENGTH'])
width = int(self.extraMetadata['WIDTH'])
@@ -913,7 +913,7 @@ def getDatasetNames(self):
##################
# Despite the observation and quality files, the geometry may not exist
- # for all pairs. Therfore we need to look at all pairs and get possible
+ # for all pairs. Therefore we need to look at all pairs and get possible
# dataset names.
self.dsetGeometryNames = []
for pair in pairs:
diff --git a/src/mintpy/plot_network.py b/src/mintpy/plot_network.py
index 817f4d971..aca43ce49 100644
--- a/src/mintpy/plot_network.py
+++ b/src/mintpy/plot_network.py
@@ -68,7 +68,7 @@ def read_network_info(inps):
# cohList
inps.cohList = np.loadtxt(inps.file, dtype=bytes).astype(float)[:,1]
else:
- raise ValueError('un-recognized input file extention:', ext)
+ raise ValueError('un-recognized input file extension:', ext)
print(f'number of acquisitions: {len(inps.dateList)}')
print(f'number of interferograms: {len(inps.date12List)}')
diff --git a/src/mintpy/plot_transection.py b/src/mintpy/plot_transection.py
index 3de8cc624..f9f9d796c 100644
--- a/src/mintpy/plot_transection.py
+++ b/src/mintpy/plot_transection.py
@@ -221,7 +221,7 @@ def draw_transection(self, start_yx, end_yx, start_lalo=None, end_lalo=None):
dist_unit = 'km'
# plot
- # update distance values by excluding the commonly masked out pixels in the begining
+ # update distance values by excluding the commonly masked out pixels in the beginning
self.ax_txn.scatter(
x=(txn['distance'] - min_dist) * dist_scale,
y=txn['value'] - self.offset[i],
diff --git a/src/mintpy/reference_point.py b/src/mintpy/reference_point.py
index d63d47fb6..2f8450225 100644
--- a/src/mintpy/reference_point.py
+++ b/src/mintpy/reference_point.py
@@ -213,7 +213,7 @@ def onclick(event):
# plt.close(fig)
else:
print('\nWARNING:')
- print('The selectd pixel has NaN value in data.')
+ print('The selected pixel has NaN value in data.')
print('Try a difference location please.')
fig.canvas.mpl_connect('button_press_event', onclick)
diff --git a/src/mintpy/save_gbis.py b/src/mintpy/save_gbis.py
index b222012bc..7fed2c1c8 100644
--- a/src/mintpy/save_gbis.py
+++ b/src/mintpy/save_gbis.py
@@ -42,7 +42,7 @@ def read_data(inps):
if k == 'velocity':
if not inps.dset:
inps.dset = 'velocity'
- print('No selected datset, assuming "velocity" and continue.')
+ print('No selected dataset, assuming "velocity" and continue.')
inps.phase, atr = readfile.read(inps.file, datasetName=inps.dset)
# velocity to displacement
diff --git a/src/mintpy/save_hdfeos5.py b/src/mintpy/save_hdfeos5.py
index 8c05b88c2..369cf0cdb 100644
--- a/src/mintpy/save_hdfeos5.py
+++ b/src/mintpy/save_hdfeos5.py
@@ -214,7 +214,7 @@ def get_output_filename(metadata, suffix=None, update_mode=False, subset_mode=Fa
SW += str(metadata['beam_swath'])
RELORB = "{:03d}".format(int(metadata['relative_orbit']))
- # Frist and/or Last Frame
+ # First and/or Last Frame
frame1 = metadata['first_frame']
frame2 = metadata['last_frame']
FRAME = f"{int(frame1):04d}"
diff --git a/src/mintpy/save_roipac.py b/src/mintpy/save_roipac.py
index edc27b023..381fba05f 100644
--- a/src/mintpy/save_roipac.py
+++ b/src/mintpy/save_roipac.py
@@ -48,7 +48,7 @@ def read_data(inps):
# read/prepare data
if not inps.dset:
inps.dset = 'velocity'
- print('No selected datset, assuming "velocity" and continue.')
+ print('No selected dataset, assuming "velocity" and continue.')
data, atr = readfile.read(inps.file, datasetName=inps.dset)
# convert velocity to cumulative displacement
diff --git a/src/mintpy/simulation/fractal.py b/src/mintpy/simulation/fractal.py
index c9f890395..de6b4f7b0 100644
--- a/src/mintpy/simulation/fractal.py
+++ b/src/mintpy/simulation/fractal.py
@@ -33,7 +33,7 @@
def fractal_surface_atmos(shape=(128, 128), resolution=60., p0=1., freq0=1e-3,
regime=(0.001, 0.999, 1.00), beta=(5./3., 8./3., 2./3.)):
- """Simulate an isotropic 2D fractal surface with a power law behavior, which cooresponds with the
+ """Simulate an isotropic 2D fractal surface with a power law behavior, which corresponds with the
[-5/3, -8/3, -2/3] power law.
E.g. equation (4.7.28) from Hanssen (2001):
@@ -137,13 +137,13 @@ def fractal_surface_atmos(shape=(128, 128), resolution=60., p0=1., freq0=1e-3,
def get_power_spectral_density(data, resolution=60., freq0=1e-3, display=False, outfig=None):
"""Get the radially averaged 1D spectrum (power density) of input 2D matrix
- Check Table 4.5 in Hanssen, 2001 (Page 143) for explaination of outputs.
+ Check Table 4.5 in Hanssen, 2001 (Page 143) for explanation of outputs.
Python translation of checkfr.m (Ramon Hanssen, 2000)
Parameters: data : 2D np.array (free from NaN value), displacement in m.
resolution : float, spatial resolution of input data in meters
- freq0 : float, reference spatial freqency in cycle / m.
+ freq0 : float, reference spatial frequency in cycle / m.
display : bool, display input data and its calculated 1D power spectrum
Returns: p0 : float, power spectral density at reference frequency in m^2
beta : float, slope of power profile in loglog scale
@@ -238,7 +238,7 @@ def power_slope(freq, psd, freq0=1e-3):
Parameters: freq : 1D / 2D np.array in cycle / m.
psd : 1D / 2D np.array for the power spectral density
- freq0 : reference freqency in cycle / m.
+ freq0 : reference frequency in cycle / m.
Returns: p0 : float, power spectral density at reference frequency
in the same unit as the input psd.
beta : float, slope of power profile in loglog scale
diff --git a/src/mintpy/simulation/simulation.py b/src/mintpy/simulation/simulation.py
index acfa9fc81..824421a0b 100644
--- a/src/mintpy/simulation/simulation.py
+++ b/src/mintpy/simulation/simulation.py
@@ -1,4 +1,4 @@
-"""Miscellaneous utlities for simulation."""
+"""Miscellaneous utilities for simulation."""
############################################################
# Program is part of MintPy #
# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi #
diff --git a/src/mintpy/smallbaselineApp.py b/src/mintpy/smallbaselineApp.py
index 2e062c10f..2276a8a33 100644
--- a/src/mintpy/smallbaselineApp.py
+++ b/src/mintpy/smallbaselineApp.py
@@ -57,7 +57,7 @@ def __init__(self, customTemplateFile=None, workDir=None):
self.cwd = os.path.abspath(os.getcwd())
def open(self):
- """The starting point of the workflow. It runs everytime.
+ """The starting point of the workflow. It runs every time.
It 1) grab project name if given
2) go to work directory
3) get and read template(s) options
@@ -117,7 +117,7 @@ def _read_template(self):
print('update default template based on input custom template')
self.templateFile = ut.update_template_file(self.templateFile, self.customTemplate)
- # 2) backup custome/default template file in inputs/pic folder
+ # 2) backup custom/default template file in inputs/pic folder
flen = len(os.path.basename(self.templateFile))
if self.customTemplateFile:
flen = max(flen, len(os.path.basename(self.customTemplateFile)))
diff --git a/src/mintpy/solid_earth_tides.py b/src/mintpy/solid_earth_tides.py
index be1320cb4..ca2faa44a 100644
--- a/src/mintpy/solid_earth_tides.py
+++ b/src/mintpy/solid_earth_tides.py
@@ -3,7 +3,7 @@
# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi #
# Author: Zhang Yunjun, Sep 2020 #
############################################################
-# Recomend import:
+# Recommend import:
# from mintpy import solid_earth_tides as SET
diff --git a/src/mintpy/spatial_filter.py b/src/mintpy/spatial_filter.py
index 338f26b03..cce901fc7 100644
--- a/src/mintpy/spatial_filter.py
+++ b/src/mintpy/spatial_filter.py
@@ -50,7 +50,7 @@ def filter_data(data, filter_type, filter_par=None):
data_filt = data - lp_data
elif filter_type == "lowpass_gaussian":
- # ORIGNAL: data_filt = filters.gaussian(data, sigma=filter_par)
+ # ORIGINAL: data_filt = filters.gaussian(data, sigma=filter_par)
# nan pixels can enlarge to big holes depending on the size of your gaussian kernel
# we can do normalized convolution (https://stackoverflow.com/a/36307291/7128154) as below:
V=np.array(data)
diff --git a/src/mintpy/tropo_pyaps3.py b/src/mintpy/tropo_pyaps3.py
index 9a91162fc..49867cd0d 100644
--- a/src/mintpy/tropo_pyaps3.py
+++ b/src/mintpy/tropo_pyaps3.py
@@ -446,7 +446,7 @@ def dload_grib_files(grib_files, tropo_model='ERA5', snwe=None):
pa.NARRdload(date_list2dload, hour, grib_dir)
except:
if i < 3:
- print(f'WARNING: the {i} attampt to download failed, retry it.\n')
+ print(f'WARNING: the {i} attempt to download failed, retry it.\n')
else:
print('\n\n'+'*'*50)
print('WARNING: downloading failed for 3 times, stop trying and continue.')
diff --git a/src/mintpy/tsview.py b/src/mintpy/tsview.py
index 1dbca6c17..61163dfec 100644
--- a/src/mintpy/tsview.py
+++ b/src/mintpy/tsview.py
@@ -99,7 +99,7 @@ def read_init_info(inps):
error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str)
inps.error_ts = error_fc[:, 1].astype(np.float32)*inps.unit_fac
- # update error file with exlcude date
+ # update error file with exclude date
if inps.ex_date_list:
e_ts = inps.error_ts[:]
inps.ex_error_ts = e_ts[inps.ex_flag == 0]
@@ -243,7 +243,7 @@ def subset_and_multilook_yx(yx, pix_box=None, multilook_num=1):
def read_exclude_date(input_ex_date, dateListAll):
- """Read exlcude list of dates
+ """Read exclude list of dates
Parameters: input_ex_date : list of string in YYYYMMDD or filenames for excluded dates
dateListAll : list of string in YYYYMMDD for all dates
Returns: ex_date_list : list of string in YYYYMMDD for excluded dates
@@ -507,7 +507,7 @@ def get_model_param_str(model, ds_dict, disp_unit='cm'):
def fit_time_func(model, date_list, ts_dis, disp_unit='cm', G_fit=None, conf_level=0.95, seconds=0):
- """Fit a suite of fime functions to the time series.
+ """Fit a suite of time functions to the time series.
Equations: Gm = d
Parameters: model - dict of time functions, check utils.time_func.estimate_time_func() for details.
date_list - list of dates in YYYYMMDD format
diff --git a/src/mintpy/utils/attribute.py b/src/mintpy/utils/attribute.py
index c56964506..0d8336c2e 100644
--- a/src/mintpy/utils/attribute.py
+++ b/src/mintpy/utils/attribute.py
@@ -231,7 +231,7 @@ def update_attribute4subset(atr_in, subset_box, print_msg=True):
atr['XMAX'] = str(sub_x[1]-sub_x[0] - 1)
vprint('update LENGTH, WIDTH, Y/XMAX')
- # Subset atribute
+ # Subset attribute
atr['SUBSET_YMAX'] = str(sub_y[1] + int(atr_in.get('SUBSET_YMIN', '0')))
atr['SUBSET_YMIN'] = str(sub_y[0] + int(atr_in.get('SUBSET_YMIN', '0')))
atr['SUBSET_XMAX'] = str(sub_x[1] + int(atr_in.get('SUBSET_XMIN', '0')))
diff --git a/src/mintpy/utils/isce_utils.py b/src/mintpy/utils/isce_utils.py
index 345218a90..9ec357663 100644
--- a/src/mintpy/utils/isce_utils.py
+++ b/src/mintpy/utils/isce_utils.py
@@ -724,7 +724,7 @@ def multilook_number2resolution(meta_file, az_looks, rg_looks):
def resolution2multilook_number(meta_file, resolution):
"""
- Calculate multilook number for InSAR processing given a disired output resolution on the ground
+ Calculate multilook number for InSAR processing given a desired output resolution on the ground
Parameters: meta_file : str, path of ISCE metadata file, i.e. IW1.xml, data.dat
resolution : float, target output resolution on the ground in meters
@@ -817,7 +817,7 @@ def get_IPF(proj_dir, ts_file):
# reference date
m_date = [i for i in date_list if not os.path.isdir(os.path.join(s_dir, i))][0]
- # grab IPF numver
+ # grab IPF number
IPF_IW1, IPF_IW2, IPF_IW3 = [], [], []
prog_bar = ptime.progressBar(maxValue=num_date)
for i in range(num_date):
@@ -911,10 +911,10 @@ def get_sensing_datetime_list(proj_dir, date_list=None):
############################## Standard Processing ###########################################
def gaussian_kernel(sx, sy, sig_x, sig_y):
- '''Generate a guassian kernal (with all elements sum to 1).
+ '''Generate a Gaussian kernel (with all elements sum to 1).
- Parameters: sx/y - int, dimensions of kernal
- sig_x/y - float, standard deviation of the guassian distribution
+ Parameters: sx/y - int, dimensions of kernel
+ sig_x/y - float, standard deviation of the Gaussian distribution
'''
# ensure sx/y are odd number
sx += 1 if np.mod(sx, 2) == 0 else 0
@@ -1203,7 +1203,7 @@ def unwrap_icu(int_file, unw_file):
# run ICU
icu_obj = Icu()
icu_obj.filteringFlag = False
- icu_obj.useAmplitudeFalg = False
+ icu_obj.useAmplitudeFlag = False
icu_obj.singlePatch = True
icu_obj.initCorrThresdhold = 0.1
icu_obj.icu(intImage=int_img, unwImage=unw_img)
diff --git a/src/mintpy/utils/plot.py b/src/mintpy/utils/plot.py
index 87c2af725..59cdbfd00 100644
--- a/src/mintpy/utils/plot.py
+++ b/src/mintpy/utils/plot.py
@@ -111,7 +111,7 @@ def auto_figure_size(ds_shape, scale=1.0, disp_cbar=False, disp_slider=False,
scale - floag, scale the final figure size
disp_cbar/slider - bool, plot colorbar on the right / slider on the bottom
cbar/slider_ratio - float, size ratio of the additional colobar / slider
- Returns: figsize - list of 2 float for the figure size in [width, lenght] in inches
+ Returns: figsize - list of 2 float for the figure size in [width, length] in inches
"""
# figure shape
fig_shape = list(ds_shape)[::-1]
@@ -262,7 +262,7 @@ def auto_flip_direction(metadata, ax=None, print_msg=True):
def auto_multilook_num(box, num_time, max_memory=4.0, print_msg=True):
- """Calcualte the default/auto multilook number based on the input 3D shape.
+ """Calculate the default/auto multilook number based on the input 3D shape.
Parameters: box - tuple of 4 int in (x0, y0, x1, y1) for the spatial bounding box
num_time - int, the 3rd / time dimension size
max_memory - float, max memory in GB
@@ -483,7 +483,7 @@ def auto_adjust_xaxis_date(ax, datevector, fontsize=12, every_year=None, buffer_
# Label font size
ax.tick_params(labelsize=fontsize)
- # fig2.autofmt_xdate() #adjust x overlap by rorating, may enble again
+ # fig2.autofmt_xdate() #adjust x overlap by rorating, may enable again
return ax, xmin, xmax
@@ -856,7 +856,7 @@ def plot_coherence_matrix(ax, date12List, cohList, date12List_drop=[], p_dict={}
date12List : list of date12 in YYYYMMDD_YYYYMMDD format
cohList : list of float, coherence value
date12List_drop : list of date12 for date12 marked as dropped
- p_dict : dict of plot settting
+ p_dict : dict of plot setting
Returns: ax : matplotlib.pyplot.Axes
coh_mat : 2D np.array in size of [num_date, num_date]
im : mappable
@@ -1744,7 +1744,7 @@ def read_mask(fname, mask_file=None, datasetName=None, box=None, xstep=1, ystep=
else:
mask_file = None
- # read mask_data from file if inputed
+ # read mask_data from file if inputted
mask_data = None
if os.path.isfile(str(mask_file)):
try:
diff --git a/src/mintpy/utils/readfile.py b/src/mintpy/utils/readfile.py
index 6406a0b49..205df4c35 100644
--- a/src/mintpy/utils/readfile.py
+++ b/src/mintpy/utils/readfile.py
@@ -1019,7 +1019,7 @@ def read_attribute(fname, datasetName=None, metafile_ext=None):
atr = giantIfgramStack(fname).get_metadata()
elif len(atr) > 0 and 'WIDTH' in atr.keys():
- # use the attribute at root level, which is already read from the begining
+ # use the attribute at root level, which is already read from the beginning
# grab attribute of dataset if specified, e.g. UNIT, no-data value, etc.
if datasetName and datasetName in d1_list:
@@ -1155,7 +1155,7 @@ def get_hdf5_dataset(name, obj):
raise FileNotFoundError('No UAVSAR *.ann file found!')
else:
- # grab all existed potential metadata file given the data file in prefered order/priority
+ # grab all existed potential metadata file given the data file in preferred order/priority
# .aux.xml file does not have geo-coordinates info
# .vrt file (e.g. incLocal.rdr.vrt from isce) does not have band interleavee info
metafiles = [
@@ -1359,7 +1359,7 @@ def read_template(fname, delimiter='=', skip_chars=None):
Parameters: fname - str, full path to the template file
delimiter - str, string to separate the key and value
- skip_chars - list of str, skip certain charaters in values
+ skip_chars - list of str, skip certain characters in values
Returns: template - dict, file content
Examples: template = read_template('KyushuAlosAT424.txt')
template = read_template('smallbaselineApp.cfg')
@@ -2192,7 +2192,7 @@ def read_complex_float32(fname, box=None, byte_order='l', band='phase'):
ROI_PAC file: .slc, .int, .amp
- Data is sotred as:
+ Data is stored as:
real, imaginary, real, imaginary, ...
real, imaginary, real, imaginary, ...
...
diff --git a/src/mintpy/utils/s1_utils.py b/src/mintpy/utils/s1_utils.py
index b0c3da37c..22ec99057 100644
--- a/src/mintpy/utils/s1_utils.py
+++ b/src/mintpy/utils/s1_utils.py
@@ -4,7 +4,7 @@
# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi #
# Author: Zhang Yunjun, Aug 2021 #
############################################################
-# Recommand import:
+# Recommend import:
# from mintpy.utils import s1_utils
diff --git a/src/mintpy/utils/solvers/lstl1.py b/src/mintpy/utils/solvers/lstl1.py
index abdc96c8b..9ccd97acf 100644
--- a/src/mintpy/utils/solvers/lstl1.py
+++ b/src/mintpy/utils/solvers/lstl1.py
@@ -2,7 +2,7 @@
# Author: Zhang Yunjun, 10 Jan 2019
#
# The least-norm problem is interesting only when m < n, i.e. when the equation Ax = b is underdetermined.
-# It uses prior information and guess that x is more likly to be small (as measured by ||*||) than large.
+# It uses prior information and guess that x is more likely to be small (as measured by ||*||) than large.
# Tge least-norm problem chooses the estimate of x the one that is the smallest among all solutions that
# are consistent with measurements Ax = b.
#
diff --git a/src/mintpy/utils/time_func.py b/src/mintpy/utils/time_func.py
index fecd61efe..b877b48d5 100644
--- a/src/mintpy/utils/time_func.py
+++ b/src/mintpy/utils/time_func.py
@@ -179,7 +179,7 @@ def inps2model(inps, date_list=None, print_msg=True):
def get_num_param(model):
- """Get the number of unknown paramters from the given time function configuration.
+ """Get the number of unknown parameters from the given time function configuration.
Parameters: model - dict, time functions config, e.g. {cfg}
Returns: num_param - int, number of unknown parameters
diff --git a/src/mintpy/utils/utils0.py b/src/mintpy/utils/utils0.py
index d00b334fe..89e41b92b 100644
--- a/src/mintpy/utils/utils0.py
+++ b/src/mintpy/utils/utils0.py
@@ -567,7 +567,7 @@ def xyz_to_local_radius(xyz):
# Definition of angles:
# (los_)inc_angle - the incidence angle of the LOS vector (from the ground to the SAR platform)
# measured from vertical. Used in isce2.
-# (los_)az_angle - the azimuth angle of the LOS vecotr (from the ground to the SAR platform)
+# (los_)az_angle - the azimuth angle of the LOS vector (from the ground to the SAR platform)
# measured from the north, with anti-clockwise as positive. Used in isce2.
# orb_az_angle - the azimuth angle of the SAR platform's orbit (along-track direction)
# measured from the north, with anti-clockwise as positive
@@ -696,7 +696,7 @@ def get_unit_vector4component_of_interest(los_inc_angle, los_az_angle, comp='enu
comps = [
'enu2los', 'en2los', 'hz2los', 'horz2los', 'u2los', 'vert2los', # radar LOS / cross-track
'en2az', 'hz2az', 'orb_az', 'orbit_az', # radar azimuth / along-track
- 'vert', 'vertical', 'horz', 'horizontal', # vertical / arbitraty horizontal
+ 'vert', 'vertical', 'horz', 'horizontal', # vertical / arbitrary horizontal
]
if comp not in comps:
diff --git a/src/mintpy/utils/writefile.py b/src/mintpy/utils/writefile.py
index 3862d3c7c..d2db160b8 100644
--- a/src/mintpy/utils/writefile.py
+++ b/src/mintpy/utils/writefile.py
@@ -347,7 +347,7 @@ def layout_hdf5(fname, ds_name_dict=None, metadata=None, ds_unit_dict=None, ref_
if key in ['connectComponent']:
ds_comp = 'lzf'
- # changable dataset shape
+ # changeable dataset shape
if len(data_shape) == 3:
max_shape = (None, data_shape[1], data_shape[2])
else:
@@ -508,7 +508,7 @@ def write_roipac_rsc(metadata, out_file, update_mode=False, print_msg=False):
"""Write attribute dict into ROI_PAC .rsc file
Inputs:
metadata : dict, attributes dictionary
- out_file : rsc file name, to which attribute is writen
+ out_file : rsc file name, to which attribute is written
update_mode : bool, skip writing if
1) output file existed AND
2) no new metadata key/value
@@ -758,7 +758,7 @@ def write_float32(*args):
Format of the binary file is same as roi_pac unw, cor, or hgt data.
should rename to write_rmg_float32()
- Exmaple:
+ Example:
write_float32(phase, out_file)
write_float32(amp, phase, out_file)
"""
diff --git a/src/mintpy/view.py b/src/mintpy/view.py
index d851c953f..f9e3fabf0 100644
--- a/src/mintpy/view.py
+++ b/src/mintpy/view.py
@@ -1260,7 +1260,7 @@ def format_coord(x, y):
# ignore dataset family info if there is only one type
if len(inps.dsetFamilyList) == 1 and '-' in title_str:
title_str = title_str.split('-')[1]
- # for ifgramStack, show index in the date12 list to facilitate the network modfication
+ # for ifgramStack, show index in the date12 list to facilitate the network modification
if inps.atr['FILE_TYPE'] == 'ifgramStack':
title_ind = inps.date12List.index(title_str)
diff --git a/tests/smallbaselineApp.py b/tests/smallbaselineApp.py
index 2d71dfe46..78e809f5f 100755
--- a/tests/smallbaselineApp.py
+++ b/tests/smallbaselineApp.py
@@ -149,7 +149,7 @@ def test_smallbaselineApp(dset_name, test_dir, fresh_start=True, test_pyaps=Fals
print('remove existing metadata file:', meta_file)
os.remove(meta_file)
- # runing smallbaselineApp
+ # running smallbaselineApp
# Note: execute script in command line instead of call main() for a clean run
# to avoid strange error from prep_aria: not recognized as a supported file format.
# which only occurs if all datasets are tested in one run
@@ -189,14 +189,14 @@ def main(iargs=None):
for i in range(num_dset):
dset_name = inps.dset_name[i]
print('#'*100)
- print(f'Start testing smallbaselineApp workflow on exmaple dataset {i+1}/{num_dset}: {dset_name}')
+ print(f'Start testing smallbaselineApp workflow on example dataset {i+1}/{num_dset}: {dset_name}')
test_smallbaselineApp(dset_name,
test_dir=inps.test_dir,
fresh_start=inps.fresh_start,
test_pyaps=inps.test_pyaps,
test_isce=inps.test_isce)
print('#'*100)
- print(f' PASS testing of smallbaselineApp workflow on exmaple dataset {i+1}/{num_dset}: {dset_name}')
+ print(f' PASS testing of smallbaselineApp workflow on example dataset {i+1}/{num_dset}: {dset_name}')
print('#'*100+'\n'*3)
# print message