From e07e0d8aee83844fb71496be64c076a5701d25fc Mon Sep 17 00:00:00 2001 From: Tyler Sutterley Date: Sat, 3 Sep 2022 15:34:18 -0700 Subject: [PATCH] refactor: move all ICESat, ICESat-2 and OIB code to separate repo to address #110 (#111) update version.txt --- .github/workflows/Dockerfile | 6 +- .github/workflows/python-request.yml | 1 + README.rst | 2 - .../compute_LPET_ICESat2_ATL03.rst | 19 - .../compute_LPET_ICESat2_ATL06.rst | 17 - .../compute_LPET_ICESat2_ATL07.rst | 17 - .../compute_LPET_ICESat2_ATL10.rst | 17 - .../compute_LPET_ICESat2_ATL11.rst | 17 - .../compute_LPET_ICESat2_ATL12.rst | 17 - .../compute_LPET_ICESat_GLA12.rst | 17 - .../compute_LPET_icebridge_data.rst | 17 - .../compute_LPT_ICESat_GLA12.rst | 18 - .../compute_LPT_icebridge_data.rst | 18 - .../compute_OPT_ICESat_GLA12.rst | 19 - .../compute_OPT_icebridge_data.rst | 19 - .../compute_tides_ICESat2_ATL03.rst | 27 - .../compute_tides_ICESat2_ATL06.rst | 25 - .../compute_tides_ICESat2_ATL07.rst | 22 - .../compute_tides_ICESat2_ATL10.rst | 22 - .../compute_tides_ICESat2_ATL11.rst | 26 - .../compute_tides_ICESat2_ATL12.rst | 22 - .../compute_tides_ICESat_GLA12.rst | 25 - .../compute_tides_icebridge_data.rst | 25 - doc/source/getting_started/Citations.rst | 2 - .../getting_started/Getting-Started.rst | 12 - doc/source/index.rst | 20 - environment.yml | 3 - pyTMD/time.py | 122 +-- requirements.txt | 2 - scripts/compute_LPET_ICESat2_ATL03.py | 465 ---------- scripts/compute_LPET_ICESat2_ATL06.py | 468 ---------- scripts/compute_LPET_ICESat2_ATL07.py | 498 ----------- scripts/compute_LPET_ICESat2_ATL10.py | 460 ---------- scripts/compute_LPET_ICESat2_ATL11.py | 649 -------------- scripts/compute_LPET_ICESat2_ATL12.py | 460 ---------- scripts/compute_LPET_ICESat_GLA12.py | 360 -------- scripts/compute_LPET_icebridge_data.py | 592 ------------- scripts/compute_LPT_ICESat_GLA12.py | 438 ---------- scripts/compute_LPT_icebridge_data.py | 675 --------------- scripts/compute_OPT_ICESat_GLA12.py | 458 ---------- scripts/compute_OPT_icebridge_data.py | 698 --------------- scripts/compute_tides_ICESat2_ATL03.py | 639 -------------- scripts/compute_tides_ICESat2_ATL06.py | 641 -------------- scripts/compute_tides_ICESat2_ATL07.py | 649 -------------- scripts/compute_tides_ICESat2_ATL10.py | 612 ------------- scripts/compute_tides_ICESat2_ATL11.py | 802 ------------------ scripts/compute_tides_ICESat2_ATL12.py | 612 ------------- scripts/compute_tides_ICESat_GLA12.py | 540 ------------ scripts/compute_tides_icebridge_data.py | 770 ----------------- test/test_equilibrium_tides.py | 110 --- version.txt | 2 +- 51 files changed, 69 insertions(+), 12105 deletions(-) delete mode 100644 doc/source/api_reference/compute_LPET_ICESat2_ATL03.rst delete mode 100644 doc/source/api_reference/compute_LPET_ICESat2_ATL06.rst delete mode 100644 doc/source/api_reference/compute_LPET_ICESat2_ATL07.rst delete mode 100644 doc/source/api_reference/compute_LPET_ICESat2_ATL10.rst delete mode 100644 doc/source/api_reference/compute_LPET_ICESat2_ATL11.rst delete mode 100644 doc/source/api_reference/compute_LPET_ICESat2_ATL12.rst delete mode 100644 doc/source/api_reference/compute_LPET_ICESat_GLA12.rst delete mode 100644 doc/source/api_reference/compute_LPET_icebridge_data.rst delete mode 100644 doc/source/api_reference/compute_LPT_ICESat_GLA12.rst delete mode 100644 doc/source/api_reference/compute_LPT_icebridge_data.rst delete mode 100644 doc/source/api_reference/compute_OPT_ICESat_GLA12.rst delete mode 100644 doc/source/api_reference/compute_OPT_icebridge_data.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat2_ATL03.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat2_ATL06.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat2_ATL07.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat2_ATL10.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat2_ATL11.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat2_ATL12.rst delete mode 100644 doc/source/api_reference/compute_tides_ICESat_GLA12.rst delete mode 100644 doc/source/api_reference/compute_tides_icebridge_data.rst delete mode 100644 scripts/compute_LPET_ICESat2_ATL03.py delete mode 100644 scripts/compute_LPET_ICESat2_ATL06.py delete mode 100644 scripts/compute_LPET_ICESat2_ATL07.py delete mode 100644 scripts/compute_LPET_ICESat2_ATL10.py delete mode 100644 scripts/compute_LPET_ICESat2_ATL11.py delete mode 100644 scripts/compute_LPET_ICESat2_ATL12.py delete mode 100644 scripts/compute_LPET_ICESat_GLA12.py delete mode 100644 scripts/compute_LPET_icebridge_data.py delete mode 100644 scripts/compute_LPT_ICESat_GLA12.py delete mode 100644 scripts/compute_LPT_icebridge_data.py delete mode 100644 scripts/compute_OPT_ICESat_GLA12.py delete mode 100644 scripts/compute_OPT_icebridge_data.py delete mode 100644 scripts/compute_tides_ICESat2_ATL03.py delete mode 100644 scripts/compute_tides_ICESat2_ATL06.py delete mode 100644 scripts/compute_tides_ICESat2_ATL07.py delete mode 100644 scripts/compute_tides_ICESat2_ATL10.py delete mode 100644 scripts/compute_tides_ICESat2_ATL11.py delete mode 100644 scripts/compute_tides_ICESat2_ATL12.py delete mode 100644 scripts/compute_tides_ICESat_GLA12.py delete mode 100644 scripts/compute_tides_icebridge_data.py delete mode 100644 test/test_equilibrium_tides.py diff --git a/.github/workflows/Dockerfile b/.github/workflows/Dockerfile index bbe8b4f2..e126ffed 100644 --- a/.github/workflows/Dockerfile +++ b/.github/workflows/Dockerfile @@ -293,7 +293,6 @@ RUN pip3 install --no-cache-dir --no-binary=h5py,cartopy \ h5py \ lxml \ matplotlib \ - mpi4py \ netCDF4 \ numpy \ pandas \ @@ -302,10 +301,7 @@ RUN pip3 install --no-cache-dir --no-binary=h5py,cartopy \ pyyaml \ scikit-learn \ scipy \ - setuptools_scm \ - zarr && \ - pip3 install --no-cache-dir --no-deps git+https://github.com/tsutterley/read-ICESat-2.git && \ - pip3 install --no-cache-dir --no-deps git+https://github.com/tsutterley/read-ATM1b-QFIT-binary.git + setuptools_scm COPY . . diff --git a/.github/workflows/python-request.yml b/.github/workflows/python-request.yml index 03b1e2cf..29b0b183 100644 --- a/.github/workflows/python-request.yml +++ b/.github/workflows/python-request.yml @@ -51,6 +51,7 @@ jobs: - name: Test with pytest run: | pip install --no-deps . + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi pytest --verbose --capture=no --cov=./ --cov-report=xml \ --username=${{ secrets.EARTHDATA_USERNAME }} \ --password=${{ secrets.EARTHDATA_PASSWORD }} \ diff --git a/README.rst b/README.rst index d63702ef..a2f2aa24 100644 --- a/README.rst +++ b/README.rst @@ -74,8 +74,6 @@ Dependencies - `numpy: Scientific Computing Tools For Python `_ - `pyproj: Python interface to PROJ library `_ - `PyYAML: YAML parser and emitter for Python `_ -- `read-ICESat-2: Python tools to read data from the NASA ICESat-2 mission `_ -- `read-ATM1b-QFIT-binary: Python reader for Airborne Topographic Mapper (ATM) QFIT data products `_ - `scipy: Scientific Tools for Python `_ - `setuptools_scm: manager for python package versions using scm metadata `_ diff --git a/doc/source/api_reference/compute_LPET_ICESat2_ATL03.rst b/doc/source/api_reference/compute_LPET_ICESat2_ATL03.rst deleted file mode 100644 index d3469541..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat2_ATL03.rst +++ /dev/null @@ -1,19 +0,0 @@ -============================= -compute_LPET_ICESat2_ATL03.py -============================= - -- Calculates long-period equilibrium tidal elevations for correcting ICESat-2 geolocated photon height data -- Calculated at ATL03 segment level using reference photon geolocation and time -- Segment level corrections can be applied to the individual photon events (PEs) -- Will calculate the long-period tides for all ATL03 segments and not just ocean segments defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat2_ATL03.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat2_ATL03.py - :func: arguments - :prog: compute_LPET_ICESat2_ATL03.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_ICESat2_ATL06.rst b/doc/source/api_reference/compute_LPET_ICESat2_ATL06.rst deleted file mode 100644 index 43311931..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat2_ATL06.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================= -compute_LPET_ICESat2_ATL06.py -============================= - -- Calculates long-period equilibrium tidal elevations for correcting ICESat-2 land ice elevation data -- Will calculate the long-period tides for all ATL06 segments and not just ocean segments defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat2_ATL06.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat2_ATL06.py - :func: arguments - :prog: compute_LPET_ICESat2_ATL06.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_ICESat2_ATL07.rst b/doc/source/api_reference/compute_LPET_ICESat2_ATL07.rst deleted file mode 100644 index fff209f0..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat2_ATL07.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================= -compute_LPET_ICESat2_ATL07.py -============================= - -- Calculates long-period equilibrium tidal elevations for correcting ICESat-2 sea ice height data -- Will calculate the long-period tides for all ATL07 segments and not just ocean segments defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat2_ATL07.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat2_ATL07.py - :func: arguments - :prog: compute_LPET_ICESat2_ATL07.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_ICESat2_ATL10.rst b/doc/source/api_reference/compute_LPET_ICESat2_ATL10.rst deleted file mode 100644 index c0246cbc..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat2_ATL10.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================= -compute_LPET_ICESat2_ATL10.py -============================= - -- Calculates long-period equilibrium tidal elevations for correcting ICESat-2 sea ice freeboard data -- Will calculate the long-period tides for all ATL10 segments and not just ocean segments defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat2_ATL10.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat2_ATL10.py - :func: arguments - :prog: compute_LPET_ICESat2_ATL10.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_ICESat2_ATL11.rst b/doc/source/api_reference/compute_LPET_ICESat2_ATL11.rst deleted file mode 100644 index 89216541..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat2_ATL11.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================= -compute_LPET_ICESat2_ATL11.py -============================= - -- Calculates long-period equilibrium tidal elevations for correcting ICESat-2 annual land ice height data over both along-track and across-track locations -- Will calculate the long-period tides for all ATL11 segments and crossovers and not just ocean segments defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat2_ATL11.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat2_ATL11.py - :func: arguments - :prog: compute_LPET_ICESat2_ATL11.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_ICESat2_ATL12.rst b/doc/source/api_reference/compute_LPET_ICESat2_ATL12.rst deleted file mode 100644 index 86bd2391..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat2_ATL12.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================= -compute_LPET_ICESat2_ATL12.py -============================= - -- Calculates long-period equilibrium tidal elevations for correcting ICESat-2 ocean surface height data -- Will calculate the long-period tides for all ATL12 segments and not just ocean segments defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat2_ATL12.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat2_ATL12.py - :func: arguments - :prog: compute_LPET_ICESat2_ATL12.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_ICESat_GLA12.rst b/doc/source/api_reference/compute_LPET_ICESat_GLA12.rst deleted file mode 100644 index 4c98ebd5..00000000 --- a/doc/source/api_reference/compute_LPET_ICESat_GLA12.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================ -compute_LPET_ICESat_GLA12.py -============================ - -- Calculates long-period equilibrium tidal elevations for correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet elevation data -- Will calculate the long-period tides for all GLAS elevations and not just ocean elevations defined by the ocean tide mask - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_ICESat_GLA12.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat_GLA12.py - :func: arguments - :prog: compute_LPET_ICESat_GLA12.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPET_icebridge_data.rst b/doc/source/api_reference/compute_LPET_icebridge_data.rst deleted file mode 100644 index a8ae8368..00000000 --- a/doc/source/api_reference/compute_LPET_icebridge_data.rst +++ /dev/null @@ -1,17 +0,0 @@ -============================== -compute_LPET_icebridge_data.py -============================== - -- Calculates long-period equilibrium tides for correcting Operation IceBridge elevation data -- Uses the summation of fifteen tidal spectral lines from `Cartwright and Edden, (1973) `_ - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPET_icebridge_data.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_icebridge_data.py - :func: arguments - :prog: compute_LPET_icebridge_data.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPT_ICESat_GLA12.rst b/doc/source/api_reference/compute_LPT_ICESat_GLA12.rst deleted file mode 100644 index 88b6b9df..00000000 --- a/doc/source/api_reference/compute_LPT_ICESat_GLA12.rst +++ /dev/null @@ -1,18 +0,0 @@ -=========================== -compute_LPT_ICESat_GLA12.py -=========================== - -- Calculates radial load pole tide displacements for correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet elevation data following IERS Convention (2010) guidelines -- `http://maia.usno.navy.mil/conventions/2010officialinfo.php `_ -- `http://maia.usno.navy.mil/conventions/chapter7.php `_ - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPT_ICESat_GLA12.py - -.. argparse:: - :filename: ../../scripts/compute_LPET_ICESat_GLA12.py - :func: arguments - :prog: compute_LPET_ICESat_GLA12.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_LPT_icebridge_data.rst b/doc/source/api_reference/compute_LPT_icebridge_data.rst deleted file mode 100644 index c5d9697a..00000000 --- a/doc/source/api_reference/compute_LPT_icebridge_data.rst +++ /dev/null @@ -1,18 +0,0 @@ -============================= -compute_LPT_icebridge_data.py -============================= - -- Calculates radial pole load tide displacements for correcting Operation IceBridge elevation data following IERS Convention (2010) guidelines -- `http://maia.usno.navy.mil/conventions/2010officialinfo.php `_ -- `http://maia.usno.navy.mil/conventions/chapter7.php `_ - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_LPT_icebridge_data.py - -.. argparse:: - :filename: ../../scripts/compute_LPT_icebridge_data.py - :func: arguments - :prog: compute_LPT_icebridge_data.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_OPT_ICESat_GLA12.rst b/doc/source/api_reference/compute_OPT_ICESat_GLA12.rst deleted file mode 100644 index 5e03c9fc..00000000 --- a/doc/source/api_reference/compute_OPT_ICESat_GLA12.rst +++ /dev/null @@ -1,19 +0,0 @@ -=========================== -compute_OPT_ICESat_GLA12.py -=========================== - -- Calculates radial ocean pole tide displacements for correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet elevation data following IERS Convention (2010) guidelines -- `http://maia.usno.navy.mil/conventions/2010officialinfo.php `_ -- `http://maia.usno.navy.mil/conventions/chapter7.php `_ -- `ftp://tai.bipm.org/iers/conv2010/chapter7/opoleloadcoefcmcor.txt.gz `_ - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_OPT_ICESat_GLA12.py - -.. argparse:: - :filename: ../../scripts/compute_OPT_ICESat_GLA12.py - :func: arguments - :prog: compute_OPT_ICESat_GLA12.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_OPT_icebridge_data.rst b/doc/source/api_reference/compute_OPT_icebridge_data.rst deleted file mode 100644 index c23711d3..00000000 --- a/doc/source/api_reference/compute_OPT_icebridge_data.rst +++ /dev/null @@ -1,19 +0,0 @@ -============================= -compute_OPT_icebridge_data.py -============================= - -- Calculates radial ocean pole tide displacements for correcting Operation IceBridge elevation data following IERS Convention (2010) guidelines -- `http://maia.usno.navy.mil/conventions/2010officialinfo.php `_ -- `http://maia.usno.navy.mil/conventions/chapter7.php `_ -- `ftp://tai.bipm.org/iers/conv2010/chapter7/opoleloadcoefcmcor.txt.gz `_ - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_OPT_icebridge_data.py - -.. argparse:: - :filename: ../../scripts/compute_OPT_icebridge_data.py - :func: arguments - :prog: compute_OPT_icebridge_data.py - :nodescription: - :nodefault: diff --git a/doc/source/api_reference/compute_tides_ICESat2_ATL03.rst b/doc/source/api_reference/compute_tides_ICESat2_ATL03.rst deleted file mode 100644 index 81406c9f..00000000 --- a/doc/source/api_reference/compute_tides_ICESat2_ATL03.rst +++ /dev/null @@ -1,27 +0,0 @@ -============================== -compute_tides_ICESat2_ATL03.py -============================== - -- Calculates tidal elevations for correcting ICESat-2 photon height data -- Calculated at ATL03 segment level using reference photon geolocation and time -- Segment level corrections can be applied to the individual photon events (PEs) -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat2_ATL03.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat2_ATL03.py - :func: arguments - :prog: compute_tides_ICESat2_ATL03.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points - - --apply-flexure : @after - Only valid for models containing flexure fields diff --git a/doc/source/api_reference/compute_tides_ICESat2_ATL06.rst b/doc/source/api_reference/compute_tides_ICESat2_ATL06.rst deleted file mode 100644 index 76f01b30..00000000 --- a/doc/source/api_reference/compute_tides_ICESat2_ATL06.rst +++ /dev/null @@ -1,25 +0,0 @@ -============================== -compute_tides_ICESat2_ATL06.py -============================== - -- Calculates tidal elevations for correcting ICESat-2 land ice elevation data -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat2_ATL06.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat2_ATL06.py - :func: arguments - :prog: compute_tides_ICESat2_ATL06.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points - - --apply-flexure : @after - Only valid for models containing flexure fields diff --git a/doc/source/api_reference/compute_tides_ICESat2_ATL07.rst b/doc/source/api_reference/compute_tides_ICESat2_ATL07.rst deleted file mode 100644 index eaccdbba..00000000 --- a/doc/source/api_reference/compute_tides_ICESat2_ATL07.rst +++ /dev/null @@ -1,22 +0,0 @@ -============================== -compute_tides_ICESat2_ATL07.py -============================== - -- Calculates tidal elevations for correcting ICESat-2 sea ice height data -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat2_ATL07.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat2_ATL07.py - :func: arguments - :prog: compute_tides_ICESat2_ATL07.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points diff --git a/doc/source/api_reference/compute_tides_ICESat2_ATL10.rst b/doc/source/api_reference/compute_tides_ICESat2_ATL10.rst deleted file mode 100644 index 018c66fa..00000000 --- a/doc/source/api_reference/compute_tides_ICESat2_ATL10.rst +++ /dev/null @@ -1,22 +0,0 @@ -============================== -compute_tides_ICESat2_ATL10.py -============================== - -- Calculates tidal elevations for correcting ICESat-2 sea ice freeboard data -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat2_ATL10.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat2_ATL10.py - :func: arguments - :prog: compute_tides_ICESat2_ATL10.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points diff --git a/doc/source/api_reference/compute_tides_ICESat2_ATL11.rst b/doc/source/api_reference/compute_tides_ICESat2_ATL11.rst deleted file mode 100644 index f1c3eb2f..00000000 --- a/doc/source/api_reference/compute_tides_ICESat2_ATL11.rst +++ /dev/null @@ -1,26 +0,0 @@ -============================== -compute_tides_ICESat2_ATL11.py -============================== - -- Calculates tidal elevations for correcting ICESat-2 annual land ice height data -- Calculates tidal elevations for both along-track and across-track locations -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat2_ATL11.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat2_ATL11.py - :func: arguments - :prog: compute_tides_ICESat2_ATL11.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points - - --apply-flexure : @after - Only valid for models containing flexure fields diff --git a/doc/source/api_reference/compute_tides_ICESat2_ATL12.rst b/doc/source/api_reference/compute_tides_ICESat2_ATL12.rst deleted file mode 100644 index 7153c8ab..00000000 --- a/doc/source/api_reference/compute_tides_ICESat2_ATL12.rst +++ /dev/null @@ -1,22 +0,0 @@ -============================== -compute_tides_ICESat2_ATL12.py -============================== - -- Calculates tidal elevations for correcting ICESat-2 ocean surface height data -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Glgobal Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat2_ATL12.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat2_ATL12.py - :func: arguments - :prog: compute_tides_ICESat2_ATL12.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points diff --git a/doc/source/api_reference/compute_tides_ICESat_GLA12.rst b/doc/source/api_reference/compute_tides_ICESat_GLA12.rst deleted file mode 100644 index 4dda250c..00000000 --- a/doc/source/api_reference/compute_tides_ICESat_GLA12.rst +++ /dev/null @@ -1,25 +0,0 @@ -============================= -compute_tides_ICESat_GLA12.py -============================= - -- Calculates tidal elevations for correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet elevation data -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_ICESat_GLA12.py - -.. argparse:: - :filename: ../../scripts/compute_tides_ICESat_GLA12.py - :func: arguments - :prog: compute_tides_ICESat_GLA12.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points - - --apply-flexure : @after - Only valid for models containing flexure fields diff --git a/doc/source/api_reference/compute_tides_icebridge_data.rst b/doc/source/api_reference/compute_tides_icebridge_data.rst deleted file mode 100644 index 76f68fcb..00000000 --- a/doc/source/api_reference/compute_tides_icebridge_data.rst +++ /dev/null @@ -1,25 +0,0 @@ -=============================== -compute_tides_icebridge_data.py -=============================== - -- Calculates tidal elevations for correcting Operation IceBridge elevation data -- Can use OTIS format tidal solutions provided by Ohio State University and ESR -- Can use Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -- Can use Finite Element Solution (FES) models provided by AVISO - -`Source code`__ - -.. __: https://github.com/tsutterley/pyTMD/blob/main/scripts/compute_tides_icebridge_data.py - -.. argparse:: - :filename: ../../scripts/compute_tides_icebridge_data.py - :func: arguments - :prog: compute_tides_icebridge_data.py - :nodescription: - :nodefault: - - --cutoff -c : @after - * set to ``'inf'`` to extrapolate for all points - - --apply-flexure : @after - Only valid for models containing flexure fields diff --git a/doc/source/getting_started/Citations.rst b/doc/source/getting_started/Citations.rst index ba07a526..033e1b01 100644 --- a/doc/source/getting_started/Citations.rst +++ b/doc/source/getting_started/Citations.rst @@ -49,8 +49,6 @@ This software is also dependent on other commonly used Python packages: - `numpy: Scientific Computing Tools For Python `_ - `pyproj: Python interface to PROJ library `_ - `PyYAML: YAML parser and emitter for Python `_ -- `read-ICESat-2: Python tools to read data from the NASA ICESat-2 mission `_ -- `read-ATM1b-QFIT-binary: Python reader for Airborne Topographic Mapper (ATM) QFIT data products `_ - `scipy: Scientific Tools for Python `_ - `setuptools_scm: manager for python package versions using scm metadata `_ diff --git a/doc/source/getting_started/Getting-Started.rst b/doc/source/getting_started/Getting-Started.rst index 8e850d40..65f165f3 100644 --- a/doc/source/getting_started/Getting-Started.rst +++ b/doc/source/getting_started/Getting-Started.rst @@ -124,18 +124,6 @@ elevations or currents (zonal and meridional) for each point. --epoch 'seconds since 1970-01-01T00:00:00' --verbose --mode 0o775 \ input_file.H5 output_file.H5 - -There are specific programs for correcting some publicly available elevation datasets: - -- `NASA Operation IceBridge data `_ -- `ICESat GLA12 ice sheet altimetry data `_ -- `ICESat-2 ATL03 geolocated photon data `_ -- `ICESat-2 ATL06 land ice height data `_ -- `ICESat-2 ATL07 sea ice height data `_ -- `ICESat-2 ATL10 sea ice freeboard data `_ -- `ICESat-2 ATL11 annual land ice height data `_ -- `ICESat-2 ATL12 ocean surface height data `_ - Definition Files ################ diff --git a/doc/source/index.rst b/doc/source/index.rst index af934475..780990e5 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -74,27 +74,7 @@ conventions for calculating radial pole tide displacements. :caption: Use Cases api_reference/compute_LPET_elevations.rst - api_reference/compute_LPET_icebridge_data.rst - api_reference/compute_LPET_ICESat_GLA12.rst - api_reference/compute_LPET_ICESat2_ATL03.rst - api_reference/compute_LPET_ICESat2_ATL06.rst - api_reference/compute_LPET_ICESat2_ATL07.rst - api_reference/compute_LPET_ICESat2_ATL10.rst - api_reference/compute_LPET_ICESat2_ATL11.rst - api_reference/compute_LPET_ICESat2_ATL12.rst api_reference/compute_LPT_displacements.rst - api_reference/compute_LPT_icebridge_data.rst - api_reference/compute_LPT_ICESat_GLA12.rst api_reference/compute_OPT_displacements.rst - api_reference/compute_OPT_icebridge_data.rst - api_reference/compute_OPT_ICESat_GLA12.rst api_reference/compute_tidal_currents.rst api_reference/compute_tidal_elevations.rst - api_reference/compute_tides_icebridge_data.rst - api_reference/compute_tides_ICESat_GLA12.rst - api_reference/compute_tides_ICESat2_ATL03.rst - api_reference/compute_tides_ICESat2_ATL06.rst - api_reference/compute_tides_ICESat2_ATL07.rst - api_reference/compute_tides_ICESat2_ATL10.rst - api_reference/compute_tides_ICESat2_ATL11.rst - api_reference/compute_tides_ICESat2_ATL12.rst diff --git a/environment.yml b/environment.yml index e9d42834..8e013cc0 100644 --- a/environment.yml +++ b/environment.yml @@ -20,6 +20,3 @@ dependencies: - pyyaml - scipy - setuptools_scm - - pip: - - icesat2-toolkit - - ATM1b-QFIT diff --git a/pyTMD/time.py b/pyTMD/time.py index 9183b50b..a1e71210 100644 --- a/pyTMD/time.py +++ b/pyTMD/time.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" time.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (08/2022) Utilities for calculating time operations PYTHON DEPENDENCIES: @@ -16,6 +16,8 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 08/2022: output variables to unit conversion to seconds + and the number of days per month for both leap and standard years Updated 05/2022: changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format Updated 04/2021: updated NIST ftp server url for leap-seconds.list @@ -40,6 +42,20 @@ import dateutil.parser import pyTMD.utilities +# conversion factors between time units and seconds +_to_sec = {'microseconds': 1e-6, 'microsecond': 1e-6, + 'microsec': 1e-6, 'microsecs': 1e-6, + 'milliseconds': 1e-3, 'millisecond': 1e-3, + 'millisec': 1e-3, 'millisecs': 1e-3, + 'msec': 1e-3, 'msecs': 1e-3, 'ms': 1e-3, + 'seconds': 1.0, 'second': 1.0, 'sec': 1.0, + 'secs': 1.0, 's': 1.0, + 'minutes': 60.0, 'minute': 60.0, + 'min': 60.0, 'mins': 60.0, + 'hours': 3600.0, 'hour': 3600.0, + 'hr': 3600.0, 'hrs': 3600.0, 'h': 3600.0, + 'day': 86400.0, 'days': 86400.0, 'd': 86400.0} + # PURPOSE: parse a date string into epoch and units scale def parse_date_string(date_string): """ @@ -70,19 +86,10 @@ def parse_date_string(date_string): return (datetime_to_list(epoch),0.0) # split the date string into units and epoch units,epoch = split_date_string(date_string) - conversion_factors = {'microseconds': 1e-6,'microsecond': 1e-6, - 'microsec': 1e-6,'microsecs': 1e-6, - 'milliseconds': 1e-3,'millisecond': 1e-3,'millisec': 1e-3, - 'millisecs': 1e-3,'msec': 1e-3,'msecs': 1e-3,'ms': 1e-3, - 'seconds': 1.0,'second': 1.0,'sec': 1.0,'secs': 1.0,'s': 1.0, - 'minutes': 60.0,'minute': 60.0,'min': 60.0,'mins': 60.0, - 'hours': 3600.0,'hour': 3600.0,'hr': 3600.0, - 'hrs': 3600.0,'h': 3600.0, - 'day': 86400.0,'days': 86400.0,'d': 86400.0} - if units not in conversion_factors.keys(): + if units not in _to_sec.keys(): raise ValueError('Invalid units: {0}'.format(units)) # return the epoch (as list) and the time unit conversion factors - return (datetime_to_list(epoch),conversion_factors[units]) + return (datetime_to_list(epoch), _to_sec[units]) # PURPOSE: split a date string into units and epoch def split_date_string(date_string): @@ -95,11 +102,11 @@ def split_date_string(date_string): time-units since yyyy-mm-dd hh:mm:ss """ try: - units,_,epoch = date_string.split(None,2) + units,_,epoch = date_string.split(None, 2) except ValueError: raise ValueError('Invalid format: {0}'.format(date_string)) else: - return (units.lower(),dateutil.parser.parse(epoch)) + return (units.lower(), dateutil.parser.parse(epoch)) # PURPOSE: convert a datetime object into a list def datetime_to_list(date): @@ -115,7 +122,13 @@ def datetime_to_list(date): date: list [year,month,day,hour,minute,second] """ - return [date.year,date.month,date.day,date.hour,date.minute,date.second] + return [date.year, date.month, date.day, + date.hour, date.minute, date.second] + +# days per month in a leap and a standard year +# only difference is February (29 vs. 28) +_dpm_leap = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] +_dpm_stnd = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] # PURPOSE: gets the number of days per month for a given year def calendar_days(year): @@ -132,10 +145,6 @@ def calendar_days(year): dpm: list number of days for each month """ - # days per month in a leap and a standard year - # only difference is February (29 vs. 28) - dpm_leap = np.array([31,29,31,30,31,30,31,31,30,31,30,31],dtype=np.float64) - dpm_stnd = np.array([31,28,31,30,31,30,31,31,30,31,30,31],dtype=np.float64) # Rules in the Gregorian calendar for a year to be a leap year: # divisible by 4, but not by 100 unless divisible by 400 # True length of the year is about 365.2422 days @@ -149,12 +158,12 @@ def calendar_days(year): m4000 = (year % 4000) # find indices for standard years and leap years using criteria if ((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0)): - return dpm_leap + return np.array(_dpm_leap, dtype=np.float64) elif ((m4 != 0) | (m100 == 0) & (m400 != 0) | (m4000 == 0)): - return dpm_stnd + return np.array(_dpm_stnd, dtype=np.float64) # PURPOSE: convert a numpy datetime array to delta times from the UNIX epoch -def convert_datetime(date, epoch=(1970,1,1,0,0,0)): +def convert_datetime(date, epoch=(1970, 1, 1, 0, 0, 0)): """ Convert a numpy datetime array to seconds since ``epoch`` @@ -198,7 +207,7 @@ def convert_delta_time(delta_time, epoch1=None, epoch2=None, scale=1.0): # PURPOSE: calculate the delta time from calendar date # http://scienceworld.wolfram.com/astronomy/JulianDate.html def convert_calendar_dates(year, month, day, hour=0.0, minute=0.0, second=0.0, - epoch=(1992,1,1,0,0,0), scale=1.0): + epoch=(1992, 1, 1, 0, 0, 0), scale=1.0): """ Calculate the time in time units since ``epoch`` from calendar dates @@ -232,11 +241,11 @@ def convert_calendar_dates(year, month, day, hour=0.0, minute=0.0, second=0.0, np.floor(3.0*(np.floor((year + (month - 9.0)/7.0)/100.0) + 1.0)/4.0) + \ np.floor(275.0*month/9.0) + day + hour/24.0 + minute/1440.0 + \ second/86400.0 + 1721028.5 - 2400000.5 - epoch1 = datetime.datetime(1858,11,17,0,0,0) + epoch1 = datetime.datetime(1858, 11, 17, 0, 0, 0) epoch2 = datetime.datetime(*epoch) delta_time_epochs = (epoch2 - epoch1).total_seconds() # return the date in days since epoch - return scale*np.array(MJD - delta_time_epochs/86400.0,dtype=np.float64) + return scale*np.array(MJD - delta_time_epochs/86400.0, dtype=np.float64) # PURPOSE: Converts from calendar dates into decimal years def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, @@ -297,8 +306,8 @@ def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, # days per month in a leap and a standard year # only difference is February (29 vs. 28) - dpm_leap=np.array([31,29,31,30,31,30,31,31,30,31,30,31], dtype=np.float64) - dpm_stnd=np.array([31,28,31,30,31,30,31,31,30,31,30,31], dtype=np.float64) + dpm_leap = np.array(_dpm_leap, dtype=np.float64) + dpm_stnd = np.array(_dpm_stnd, dtype=np.float64) # Rules in the Gregorian calendar for a year to be a leap year: # divisible by 4, but not by 100 unless divisible by 400 @@ -433,7 +442,7 @@ def convert_julian(JD, **kwargs): kwargs.setdefault('astype', None) kwargs.setdefault('format', 'dict') # raise warnings for deprecated keyword arguments - deprecated_keywords = dict(ASTYPE='astype',FORMAT='format') + deprecated_keywords = dict(ASTYPE='astype', FORMAT='format') for old,new in deprecated_keywords.items(): if old in kwargs.keys(): warnings.warn("""Deprecated keyword argument {0}. @@ -445,10 +454,11 @@ def convert_julian(JD, **kwargs): # convert to array if only a single value was imported if (np.ndim(JD) == 0): JD = np.atleast_1d(JD) - SINGLE_VALUE = True + single_value = True else: - SINGLE_VALUE = False + single_value = False + # verify julian day JDO = np.floor(JD + 0.5) C = np.zeros_like(JD) # calculate C for dates before and after the switch to Gregorian @@ -463,41 +473,41 @@ def convert_julian(JD, **kwargs): E = np.floor((365.0 * D) + np.floor(D/4.0)) F = np.floor((C - E)/30.6001) # calculate day, month, year and hour - DAY = np.floor(C - E + 0.5) - np.floor(30.6001*F) - MONTH = F - 1.0 - 12.0*np.floor(F/14.0) - YEAR = D - 4715.0 - np.floor((7.0+MONTH)/10.0) - HOUR = np.floor(24.0*(JD + 0.5 - JDO)) + day = np.floor(C - E + 0.5) - np.floor(30.6001*F) + month = F - 1.0 - 12.0*np.floor(F/14.0) + year = D - 4715.0 - np.floor((7.0 + month)/10.0) + hour = np.floor(24.0*(JD + 0.5 - JDO)) # calculate minute and second - G = (JD + 0.5 - JDO) - HOUR/24.0 - MINUTE = np.floor(G*1440.0) - SECOND = (G - MINUTE/1440.0) * 86400.0 + G = (JD + 0.5 - JDO) - hour/24.0 + minute = np.floor(G*1440.0) + second = (G - minute/1440.0) * 86400.0 # convert all variables to output type (from float) if kwargs['astype'] is not None: - YEAR = YEAR.astype(kwargs['astype']) - MONTH = MONTH.astype(kwargs['astype']) - DAY = DAY.astype(kwargs['astype']) - HOUR = HOUR.astype(kwargs['astype']) - MINUTE = MINUTE.astype(kwargs['astype']) - SECOND = SECOND.astype(kwargs['astype']) + year = year.astype(kwargs['astype']) + month = month.astype(kwargs['astype']) + day = day.astype(kwargs['astype']) + hour = hour.astype(kwargs['astype']) + minute = minute.astype(kwargs['astype']) + second = second.astype(kwargs['astype']) # if only a single value was imported initially: remove singleton dims - if SINGLE_VALUE: - YEAR = YEAR.item(0) - MONTH = MONTH.item(0) - DAY = DAY.item(0) - HOUR = HOUR.item(0) - MINUTE = MINUTE.item(0) - SECOND = SECOND.item(0) - - # return date variables in output format (default python dictionary) + if single_value: + year = year.item(0) + month = month.item(0) + day = day.item(0) + hour = hour.item(0) + minute = minute.item(0) + second = second.item(0) + + # return date variables in output format if (kwargs['format'] == 'dict'): - return dict(year=YEAR, month=MONTH, day=DAY, - hour=HOUR, minute=MINUTE, second=SECOND) + return dict(year=year, month=month, day=day, + hour=hour, minute=minute, second=second) elif (kwargs['format'] == 'tuple'): - return (YEAR, MONTH, DAY, HOUR, MINUTE, SECOND) + return (year, month, day, hour, minute, second) elif (kwargs['format'] == 'zip'): - return zip(YEAR, MONTH, DAY, HOUR, MINUTE, SECOND) + return zip(year, month, day, hour, minute, second) # PURPOSE: Count number of leap seconds that have passed for each GPS time def count_leap_seconds(GPS_Time, truncate=True): diff --git a/requirements.txt b/requirements.txt index 942fd56d..ce79d41a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,5 @@ -ATM1b-QFIT gdal h5py -icesat2-toolkit lxml netCDF4 numpy diff --git a/scripts/compute_LPET_ICESat2_ATL03.py b/scripts/compute_LPET_ICESat2_ATL03.py deleted file mode 100644 index 8d375571..00000000 --- a/scripts/compute_LPET_ICESat2_ATL03.py +++ /dev/null @@ -1,465 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat2_ATL03.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting ICESat-2 - geolocated photon height data -Will calculate the long-period tides for all ATL03 segments and not just ocean - segments defined by the ocean tide mask -Calculated at ATL03 segment level using reference photon geolocation and time -Segment level corrections can be applied to the individual photon events (PEs) - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL03.py: reads ICESat-2 global geolocated photon data files - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named ATL03 file as input - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 11/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL03 import read_HDF5_ATL03_main, \ - read_HDF5_ATL03_beam -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 geolocated photon data (ATL03) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat2(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl03_mds,IS2_atl03_attrs,IS2_atl03_beams = read_HDF5_ATL03_main(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' - r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for ASAS/NSIDC granules - args = (PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_LPET_{1}{2}{3}{4}{5}{6}_{7}{8}{9}_{10}_{11}{12}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl03_mds['ancillary_data']['atlas_sdp_gps_epoch'] - - # copy variables for outputting to HDF5 file - IS2_atl03_tide = {} - IS2_atl03_fill = {} - IS2_atl03_dims = {} - IS2_atl03_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl03_tide['ancillary_data'] = {} - IS2_atl03_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl03_tide['ancillary_data'][key] = IS2_atl03_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl03_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl03_attrs['ancillary_data'][key].items(): - IS2_atl03_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl03_beams): - # output data dictionaries for beam - IS2_atl03_tide[gtx] = dict(geolocation={}, geophys_corr={}) - IS2_atl03_fill[gtx] = dict(geolocation={}, geophys_corr={}) - IS2_atl03_dims[gtx] = dict(geolocation={}, geophys_corr={}) - IS2_atl03_tide_attrs[gtx] = dict(geolocation={}, geophys_corr={}) - - # read data and attributes for beam - val,attrs = read_HDF5_ATL03_beam(INPUT_FILE,gtx,ATTRIBUTES=True) - # number of segments - n_seg = len(val['geolocation']['segment_id']) - # extract variables for computing equilibrium tides - segment_id = val['geolocation']['segment_id'].copy() - delta_time = val['geolocation']['delta_time'].copy() - lon = val['geolocation']['reference_photon_lon'].copy() - lat = val['geolocation']['reference_photon_lat'].copy() - # invalid value - fv = attrs['geolocation']['sigma_h']['_FillValue'] - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + delta_time - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # predict long-period equilibrium tides at latitudes and time - tide_lpe = compute_equilibrium_tide(tide_time + deltat, lat) - - # group attributes for beam - IS2_atl03_tide_attrs[gtx]['Description'] = attrs['Description'] - IS2_atl03_tide_attrs[gtx]['atlas_pce'] = attrs['atlas_pce'] - IS2_atl03_tide_attrs[gtx]['atlas_beam_type'] = attrs['atlas_beam_type'] - IS2_atl03_tide_attrs[gtx]['groundtrack_id'] = attrs['groundtrack_id'] - IS2_atl03_tide_attrs[gtx]['atmosphere_profile'] = attrs['atmosphere_profile'] - IS2_atl03_tide_attrs[gtx]['atlas_spot_number'] = attrs['atlas_spot_number'] - IS2_atl03_tide_attrs[gtx]['sc_orientation'] = attrs['sc_orientation'] - - # group attributes for geolocation - IS2_atl03_tide_attrs[gtx]['geolocation']['Description'] = ("Contains parameters related to " - "geolocation. The rate of all of these parameters is at the rate corresponding to the " - "ICESat-2 Geolocation Along Track Segment interval (nominally 20 m along-track).") - IS2_atl03_tide_attrs[gtx]['geolocation']['data_rate'] = ("Data within this group are " - "stored at the ICESat-2 20m segment rate.") - # group attributes for geophys_corr - IS2_atl03_tide_attrs[gtx]['geophys_corr']['Description'] = ("Contains parameters used to " - "correct photon heights for geophysical effects, such as tides. These parameters are " - "posted at the same interval as the ICESat-2 Geolocation Along-Track Segment interval " - "(nominally 20m along-track).") - IS2_atl03_tide_attrs[gtx]['geophys_corr']['data_rate'] = ("These parameters are stored at " - "the ICESat-2 Geolocation Along Track Segment rate (nominally every 20 m along-track).") - - # geolocation, time and segment ID - # delta time in geolocation group - IS2_atl03_tide[gtx]['geolocation']['delta_time'] = delta_time - IS2_atl03_fill[gtx]['geolocation']['delta_time'] = None - IS2_atl03_dims[gtx]['geolocation']['delta_time'] = None - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['standard_name'] = "time" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['calendar'] = "standard" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['description'] = ("Elapsed seconds " - "from the ATLAS SDP GPS Epoch, corresponding to the transmit time of the reference " - "photon. The ATLAS Standard Data Products (SDP) epoch offset is defined within " - "/ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds between the GPS epoch " - "(1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By adding the offset " - "contained within atlas_sdp_gps_epoch to delta time parameters, the time in gps_seconds " - "relative to the GPS epoch can be computed.") - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['coordinates'] = \ - "segment_id reference_photon_lat reference_photon_lon" - # delta time in geophys_corr group - IS2_atl03_tide[gtx]['geophys_corr']['delta_time'] = delta_time - IS2_atl03_fill[gtx]['geophys_corr']['delta_time'] = None - IS2_atl03_dims[gtx]['geophys_corr']['delta_time'] = None - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time'] = {} - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['standard_name'] = "time" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['calendar'] = "standard" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['description'] = ("Elapsed seconds " - "from the ATLAS SDP GPS Epoch, corresponding to the transmit time of the reference " - "photon. The ATLAS Standard Data Products (SDP) epoch offset is defined within " - "/ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds between the GPS epoch " - "(1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By adding the offset " - "contained within atlas_sdp_gps_epoch to delta time parameters, the time in gps_seconds " - "relative to the GPS epoch can be computed.") - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['coordinates'] = ("../geolocation/segment_id " - "../geolocation/reference_photon_lat ../geolocation/reference_photon_lon") - - # latitude - IS2_atl03_tide[gtx]['geolocation']['reference_photon_lat'] = lat - IS2_atl03_fill[gtx]['geolocation']['reference_photon_lat'] = None - IS2_atl03_dims[gtx]['geolocation']['reference_photon_lat'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['units'] = "degrees_north" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['contentType'] = "physicalMeasurement" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['long_name'] = "Latitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['standard_name'] = "latitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['description'] = ("Latitude of each " - "reference photon. Computed from the ECF Cartesian coordinates of the bounce point.") - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['valid_min'] = -90.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['valid_max'] = 90.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['coordinates'] = \ - "segment_id delta_time reference_photon_lon" - # longitude - IS2_atl03_tide[gtx]['geolocation']['reference_photon_lon'] = lon - IS2_atl03_fill[gtx]['geolocation']['reference_photon_lon'] = None - IS2_atl03_dims[gtx]['geolocation']['reference_photon_lon'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['units'] = "degrees_east" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['contentType'] = "physicalMeasurement" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['long_name'] = "Longitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['standard_name'] = "longitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['description'] = ("Longitude of each " - "reference photon. Computed from the ECF Cartesian coordinates of the bounce point.") - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['valid_min'] = -180.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['valid_max'] = 180.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['coordinates'] = \ - "segment_id delta_time reference_photon_lat" - # segment ID - IS2_atl03_tide[gtx]['geolocation']['segment_id'] = segment_id - IS2_atl03_fill[gtx]['geolocation']['segment_id'] = None - IS2_atl03_dims[gtx]['geolocation']['segment_id'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['units'] = "1" - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['contentType'] = "referenceInformation" - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['long_name'] = "Along-track segment ID number" - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['description'] = ("A 7 digit number " - "identifying the along-track geolocation segment number. These are sequential, starting with " - "1 for the first segment after an ascending equatorial crossing node. Equal to the segment_id for " - "the second of the two 20m ATL03 segments included in the 40m ATL03 segment") - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['coordinates'] = \ - "delta_time reference_photon_lat reference_photon_lon" - - # computed long-period equilibrium tide - IS2_atl03_tide[gtx]['geophys_corr']['tide_equilibrium'] = tide_lpe - IS2_atl03_fill[gtx]['geophys_corr']['tide_equilibrium'] = None - IS2_atl03_dims[gtx]['geophys_corr']['tide_equilibrium'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium'] = {} - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium']['units'] = "meters" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium']['contentType'] = "referenceInformation" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['tide_equilibrium']['coordinates'] = \ - ("../geolocation/segment_id ../geolocation/delta_time " - "../geolocation/reference_photon_lat ../geolocation/reference_photon_lon") - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL03_tide_write(IS2_atl03_tide, IS2_atl03_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl03_fill, DIMENSIONS=IS2_atl03_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL03_tide_write(IS2_atl03_tide, IS2_atl03_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl03_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl03_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl03_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - h5[gtx] = {} - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl03_attrs[gtx][att_name] - # create geolocation and geophys_corr groups - for key in ['geolocation','geophys_corr']: - fileID[gtx].create_group(key) - h5[gtx][key] = {} - for att_name in ['Description','data_rate']: - att_val = IS2_atl03_attrs[gtx][key][att_name] - fileID[gtx][key].attrs[att_name] = att_val - - # all variables for group - groupkeys = set(IS2_atl03_tide[gtx][key].keys())-set(['delta_time']) - for k in ['delta_time',*sorted(groupkeys)]: - # values and attributes - v = IS2_atl03_tide[gtx][key][k] - attrs = IS2_atl03_attrs[gtx][key][k] - fillvalue = FILL_VALUE[gtx][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,key,k) - if fillvalue: - h5[gtx][key][k] = fileID.create_dataset(val, np.shape(v), - data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx][key][k] = fileID.create_dataset(val, np.shape(v), - data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx][key][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx][key][k]): - h5[gtx][key][k].dims[i].attach_scale(h5[gtx][key][dim]) - else: - # make dimension - h5[gtx][key][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L2A Global Geolocated Photon Data' - fileID.attrs['summary'] = ('The purpose of ATL03 is to provide along-track ' - 'photon data for all 6 ATLAS beams and associated statistics') - fileID.attrs['description'] = ('Photon heights determined by ATBD ' - 'Algorithm using POD and PPD. All photon events per transmit pulse ' - 'per beam. Includes POD and PPD vectors. Classification of each ' - 'photon by several ATBD Algorithms.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL03 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl03_tide[gtx]['geolocation']['reference_photon_lon'] - lat = IS2_atl03_tide[gtx]['geolocation']['reference_photon_lat'] - delta_time = IS2_atl03_tide[gtx]['geolocation']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl03_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat-2 ATL03 geolocated photon height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL03 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL03 file - for FILE in args.infile: - compute_LPET_ICESat2(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_ICESat2_ATL06.py b/scripts/compute_LPET_ICESat2_ATL06.py deleted file mode 100644 index 9e1aaffb..00000000 --- a/scripts/compute_LPET_ICESat2_ATL06.py +++ /dev/null @@ -1,468 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat2_ATL06.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting ICESat-2 - land ice elevation data -Will calculate the long-period tides for all ATL06 segments and not just ocean - segments defined by the ocean tide mask - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL06.py: reads ICESat-2 land ice along-track height data files - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named ATL06 file as input - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 11/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL06 import read_HDF5_ATL06 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 land ice data (ATL06) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat2(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl06_mds,IS2_atl06_attrs,IS2_atl06_beams = read_HDF5_ATL06(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' - r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for ASAS/NSIDC granules - args = (PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_LPET_{1}{2}{3}{4}{5}{6}_{7}{8}{9}_{10}_{11}{12}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl06_mds['ancillary_data']['atlas_sdp_gps_epoch'] - - # copy variables for outputting to HDF5 file - IS2_atl06_tide = {} - IS2_atl06_fill = {} - IS2_atl06_dims = {} - IS2_atl06_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl06_tide['ancillary_data'] = {} - IS2_atl06_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl06_tide['ancillary_data'][key] = IS2_atl06_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl06_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl06_attrs['ancillary_data'][key].items(): - IS2_atl06_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl06_beams): - # output data dictionaries for beam - IS2_atl06_tide[gtx] = dict(land_ice_segments={}) - IS2_atl06_fill[gtx] = dict(land_ice_segments={}) - IS2_atl06_dims[gtx] = dict(land_ice_segments={}) - IS2_atl06_tide_attrs[gtx] = dict(land_ice_segments={}) - - # number of segments - val = IS2_atl06_mds[gtx]['land_ice_segments'] - n_seg = len(val['segment_id']) - # find valid segments for beam - fv = IS2_atl06_attrs[gtx]['land_ice_segments']['h_li']['_FillValue'] - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # predict long-period equilibrium tides at latitudes and time - tide_lpe = np.ma.zeros((n_seg), fill_value=fv) - tide_lpe.data[:] = compute_equilibrium_tide(tide_time + deltat, val['latitude']) - tide_lpe.mask = (val['latitude'] == fv) | (val['delta_time'] == fv) - - # group attributes for beam - IS2_atl06_tide_attrs[gtx]['Description'] = IS2_atl06_attrs[gtx]['Description'] - IS2_atl06_tide_attrs[gtx]['atlas_pce'] = IS2_atl06_attrs[gtx]['atlas_pce'] - IS2_atl06_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl06_attrs[gtx]['atlas_beam_type'] - IS2_atl06_tide_attrs[gtx]['groundtrack_id'] = IS2_atl06_attrs[gtx]['groundtrack_id'] - IS2_atl06_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl06_attrs[gtx]['atmosphere_profile'] - IS2_atl06_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl06_attrs[gtx]['atlas_spot_number'] - IS2_atl06_tide_attrs[gtx]['sc_orientation'] = IS2_atl06_attrs[gtx]['sc_orientation'] - # group attributes for land_ice_segments - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['Description'] = ("The land_ice_segments group " - "contains the primary set of derived products. This includes geolocation, height, and " - "standard error and quality measures for each segment. This group is sparse, meaning " - "that parameters are provided only for pairs of segments for which at least one beam " - "has a valid surface-height measurement.") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['data_rate'] = ("Data within this group are " - "sparse. Data values are provided only for those ICESat-2 20m segments where at " - "least one beam has a valid land ice height measurement.") - - # geolocation, time and segment ID - # delta time - IS2_atl06_tide[gtx]['land_ice_segments']['delta_time'] = val['delta_time'].copy() - IS2_atl06_fill[gtx]['land_ice_segments']['delta_time'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['delta_time'] = None - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['standard_name'] = "time" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['calendar'] = "standard" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['description'] = ("Number of GPS " - "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset " - "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds " - "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By " - "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the " - "time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['coordinates'] = \ - "segment_id latitude longitude" - # latitude - IS2_atl06_tide[gtx]['land_ice_segments']['latitude'] = val['latitude'].copy() - IS2_atl06_fill[gtx]['land_ice_segments']['latitude'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['latitude'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['units'] = "degrees_north" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['contentType'] = "physicalMeasurement" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['long_name'] = "Latitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['standard_name'] = "latitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['valid_min'] = -90.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['valid_max'] = 90.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['coordinates'] = \ - "segment_id delta_time longitude" - # longitude - IS2_atl06_tide[gtx]['land_ice_segments']['longitude'] = val['longitude'].copy() - IS2_atl06_fill[gtx]['land_ice_segments']['longitude'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['longitude'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['units'] = "degrees_east" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['contentType'] = "physicalMeasurement" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['long_name'] = "Longitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['standard_name'] = "longitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['valid_min'] = -180.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['valid_max'] = 180.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['coordinates'] = \ - "segment_id delta_time latitude" - # segment ID - IS2_atl06_tide[gtx]['land_ice_segments']['segment_id'] = val['segment_id'] - IS2_atl06_fill[gtx]['land_ice_segments']['segment_id'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['segment_id'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['units'] = "1" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['contentType'] = "referenceInformation" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['long_name'] = "Along-track segment ID number" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['description'] = ("A 7 digit number " - "identifying the along-track geolocation segment number. These are sequential, starting with " - "1 for the first segment after an ascending equatorial crossing node. Equal to the segment_id for " - "the second of the two 20m ATL03 segments included in the 40m ATL06 segment") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['coordinates'] = \ - "delta_time latitude longitude" - - # geophysical variables - IS2_atl06_tide[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_fill[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_dims[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['Description'] = ("The geophysical group " - "contains parameters used to correct segment heights for geophysical effects, parameters " - "related to solar background and parameters indicative of the presence or absence of clouds.") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['data_rate'] = ("Data within this group " - "are stored at the land_ice_segments segment rate.") - # computed long-period equilibrium tide - IS2_atl06_tide[gtx]['land_ice_segments']['geophysical']['tide_equilibrium'] = tide_lpe - IS2_atl06_fill[gtx]['land_ice_segments']['geophysical']['tide_equilibrium'] = tide_lpe.fill_value - IS2_atl06_dims[gtx]['land_ice_segments']['geophysical']['tide_equilibrium'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium']['units'] = "meters" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium']['contentType'] = "referenceInformation" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['tide_equilibrium']['coordinates'] = \ - "../segment_id ../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL06_tide_write(IS2_atl06_tide, IS2_atl06_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl06_fill, DIMENSIONS=IS2_atl06_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL06_tide_write(IS2_atl06_tide, IS2_atl06_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl06_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl06_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl06_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl06_attrs[gtx][att_name] - # create land_ice_segments group - fileID[gtx].create_group('land_ice_segments') - h5[gtx] = dict(land_ice_segments={}) - for att_name in ['Description','data_rate']: - att_val = IS2_atl06_attrs[gtx]['land_ice_segments'][att_name] - fileID[gtx]['land_ice_segments'].attrs[att_name] = att_val - - # delta_time, geolocation and segment_id variables - for k in ['delta_time','latitude','longitude','segment_id']: - # values and attributes - v = IS2_atl06_tide[gtx]['land_ice_segments'][k] - attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][k] - fillvalue = FILL_VALUE[gtx]['land_ice_segments'][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,'land_ice_segments',k) - if fillvalue: - h5[gtx]['land_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx]['land_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx]['land_ice_segments'][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['land_ice_segments'][k]): - h5[gtx]['land_ice_segments'][k].dims[i].attach_scale( - h5[gtx]['land_ice_segments'][dim]) - else: - # make dimension - h5[gtx]['land_ice_segments'][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['land_ice_segments'][k].attrs[att_name] = att_val - - # add to geophysical corrections - key = 'geophysical' - fileID[gtx]['land_ice_segments'].create_group(key) - h5[gtx]['land_ice_segments'][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl06_attrs[gtx]['land_ice_segments'][key][att_name] - fileID[gtx]['land_ice_segments'][key].attrs[att_name] = att_val - for k,v in IS2_atl06_tide[gtx]['land_ice_segments'][key].items(): - # attributes - attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][key][k] - fillvalue = FILL_VALUE[gtx]['land_ice_segments'][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,'land_ice_segments',key,k) - if fillvalue: - h5[gtx]['land_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx]['land_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['land_ice_segments'][key][k]): - h5[gtx]['land_ice_segments'][key][k].dims[i].attach_scale( - h5[gtx]['land_ice_segments'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['land_ice_segments'][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Land Ice Height' - fileID.attrs['summary'] = ('Estimates of the ice-sheet tidal parameters ' - 'needed to interpret and assess the quality of the height estimates.') - fileID.attrs['description'] = ('Land ice parameters for each beam. All ' - 'parameters are calculated for the same along-track increments for ' - 'each beam and repeat.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL06 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl06_tide[gtx]['land_ice_segments']['longitude'] - lat = IS2_atl06_tide[gtx]['land_ice_segments']['latitude'] - delta_time = IS2_atl06_tide[gtx]['land_ice_segments']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl06_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat-2 ATL06 land ice elevation data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL06 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL06 file - for FILE in args.infile: - compute_LPET_ICESat2(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_ICESat2_ATL07.py b/scripts/compute_LPET_ICESat2_ATL07.py deleted file mode 100644 index 7c306131..00000000 --- a/scripts/compute_LPET_ICESat2_ATL07.py +++ /dev/null @@ -1,498 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat2_ATL07.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting ICESat-2 - sea ice height data -Will calculate the long-period tides for all ATL07 segments and not just ocean - segments defined by the ocean tide mask - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL07.py: reads ICESat-2 sea ice height data files - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named ATL07 file as input - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 11/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL07 import read_HDF5_ATL07 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 sea ice height (ATL07) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat2(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl07_mds,IS2_atl07_attrs,IS2_atl07_beams = read_HDF5_ATL07(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 sea ice file name - rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})' - r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for ASAS/NSIDC granules - args = (PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX) - file_format = '{0}-{1}_LPET_{2}{3}{4}{5}{6}{7}_{8}{9}{10}_{11}_{12}{13}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl07_mds['ancillary_data']['atlas_sdp_gps_epoch'] - - # copy variables for outputting to HDF5 file - IS2_atl07_tide = {} - IS2_atl07_fill = {} - IS2_atl07_dims = {} - IS2_atl07_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl07_tide['ancillary_data'] = {} - IS2_atl07_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl07_tide['ancillary_data'][key] = IS2_atl07_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl07_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl07_attrs['ancillary_data'][key].items(): - IS2_atl07_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl07_beams): - # output data dictionaries for beam - IS2_atl07_tide[gtx] = dict(sea_ice_segments={}) - IS2_atl07_fill[gtx] = dict(sea_ice_segments={}) - IS2_atl07_dims[gtx] = dict(sea_ice_segments={}) - IS2_atl07_tide_attrs[gtx] = dict(sea_ice_segments={}) - - # number of segments - val = IS2_atl07_mds[gtx]['sea_ice_segments'] - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # predict long-period equilibrium tides at latitudes and time - tide_lpe = compute_equilibrium_tide(tide_time + deltat, val['latitude']) - - # group attributes for beam - IS2_atl07_tide_attrs[gtx]['Description'] = IS2_atl07_attrs[gtx]['Description'] - IS2_atl07_tide_attrs[gtx]['atlas_pce'] = IS2_atl07_attrs[gtx]['atlas_pce'] - IS2_atl07_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl07_attrs[gtx]['atlas_beam_type'] - IS2_atl07_tide_attrs[gtx]['groundtrack_id'] = IS2_atl07_attrs[gtx]['groundtrack_id'] - IS2_atl07_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl07_attrs[gtx]['atmosphere_profile'] - IS2_atl07_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl07_attrs[gtx]['atlas_spot_number'] - IS2_atl07_tide_attrs[gtx]['sc_orientation'] = IS2_atl07_attrs[gtx]['sc_orientation'] - # group attributes for sea_ice_segments - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['Description'] = ("Top group for sea " - "ice segments as computed by the ATBD algorithm.") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['data_rate'] = ("Data within this " - "group are stored at the variable segment rate.") - - # geolocation, time and segment ID - # delta time - IS2_atl07_tide[gtx]['sea_ice_segments']['delta_time'] = val['delta_time'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['delta_time'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['delta_time'] = None - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['standard_name'] = "time" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['source'] = "telemetry" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['calendar'] = "standard" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['description'] = ("Number of " - "GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch " - "offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS " - "seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP " - "epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time " - "parameters, the time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['coordinates'] = \ - "height_segment_id latitude longitude" - # latitude - IS2_atl07_tide[gtx]['sea_ice_segments']['latitude'] = val['latitude'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['latitude'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['latitude'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['units'] = "degrees_north" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['contentType'] = "physicalMeasurement" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['long_name'] = "Latitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['standard_name'] = "latitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['valid_min'] = -90.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['valid_max'] = 90.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['coordinates'] = \ - "height_segment_id delta_time longitude" - # longitude - IS2_atl07_tide[gtx]['sea_ice_segments']['longitude'] = val['longitude'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['longitude'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['longitude'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['units'] = "degrees_east" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['contentType'] = "physicalMeasurement" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['long_name'] = "Longitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['standard_name'] = "longitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['valid_min'] = -180.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['valid_max'] = 180.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['coordinates'] = \ - "height_segment_id delta_time latitude" - # segment ID - IS2_atl07_tide[gtx]['sea_ice_segments']['height_segment_id'] = val['height_segment_id'] - IS2_atl07_fill[gtx]['sea_ice_segments']['height_segment_id'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['height_segment_id'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['units'] = "1" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['long_name'] = \ - "Identifier of each height segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['description'] = \ - "Identifier of each height segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['coordinates'] = \ - "delta_time latitude longitude" - # geolocation segment beginning - IS2_atl07_tide[gtx]['sea_ice_segments']['geoseg_beg'] = val['geoseg_beg'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_beg'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_beg'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['units'] = "1" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['long_name'] = "Beginning GEOSEG" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['description'] = \ - "Geolocation segment (geoseg) ID associated with the first photon used in this sea ice segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['coordinates'] = \ - "height_segment_id delta_time latitude longitude" - # geolocation segment ending - IS2_atl07_tide[gtx]['sea_ice_segments']['geoseg_end'] = val['geoseg_end'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_end'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_end'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['units'] = "1" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['long_name'] = "Ending GEOSEG" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['description'] = \ - "Geolocation segment (geoseg) ID associated with the last photon used in this sea ice segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['coordinates'] = \ - "height_segment_id delta_time latitude longitude" - # along track distance - IS2_atl07_tide[gtx]['sea_ice_segments']['seg_dist_x'] = val['seg_dist_x'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['seg_dist_x'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['seg_dist_x'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['units'] = "meters" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['long_name'] = "Along track distance" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['description'] = \ - "Along-track distance from the equator crossing to the segment center." - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['coordinates'] = \ - "height_segment_id delta_time latitude longitude" - - # geophysical variables - IS2_atl07_tide[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['Description'] = ("Contains geophysical " - "parameters and corrections used to correct photon heights for geophysical effects, such as tides.") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['data_rate'] = ("Data within this group " - "are stored at the sea_ice_height segment rate.") - - # computed long-period equilibrium tide - IS2_atl07_tide[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe'] = tide_lpe - IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe']['units'] = "meters" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['height_segment_lpe']['coordinates'] = \ - "../height_segment_id ../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL07_tide_write(IS2_atl07_tide, IS2_atl07_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl07_fill, DIMENSIONS=IS2_atl07_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL07_tide_write(IS2_atl07_tide, IS2_atl07_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl07_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl07_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl07_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl07_attrs[gtx][att_name] - # create sea_ice_segments group - fileID[gtx].create_group('sea_ice_segments') - h5[gtx] = dict(sea_ice_segments={}) - for att_name in ['Description','data_rate']: - att_val = IS2_atl07_attrs[gtx]['sea_ice_segments'][att_name] - fileID[gtx]['sea_ice_segments'].attrs[att_name] = att_val - - # delta_time, geolocation and segment identification variables - for k in ['delta_time','latitude','longitude','height_segment_id', - 'geoseg_beg','geoseg_end','seg_dist_x']: - # values and attributes - v = IS2_atl07_tide[gtx]['sea_ice_segments'][k] - attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][k] - fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,'sea_ice_segments',k) - if fillvalue: - h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx]['sea_ice_segments'][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][k]): - h5[gtx]['sea_ice_segments'][k].dims[i].attach_scale( - h5[gtx]['sea_ice_segments'][dim]) - else: - # make dimension - h5[gtx]['sea_ice_segments'][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['sea_ice_segments'][k].attrs[att_name] = att_val - - # add to geophysical corrections - key = 'geophysical' - fileID[gtx]['sea_ice_segments'].create_group(key) - h5[gtx]['sea_ice_segments'][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl07_attrs[gtx]['sea_ice_segments'][key][att_name] - fileID[gtx]['sea_ice_segments'][key].attrs[att_name] = att_val - for k,v in IS2_atl07_tide[gtx]['sea_ice_segments'][key].items(): - # attributes - attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][key][k] - fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,'sea_ice_segments',key,k) - if fillvalue: - h5[gtx]['sea_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx]['sea_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][key][k]): - h5[gtx]['sea_ice_segments'][key][k].dims[i].attach_scale( - h5[gtx]['sea_ice_segments'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['sea_ice_segments'][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Height' - fileID.attrs['summary'] = ('Estimates of the sea ice tidal parameters ' - 'needed to interpret and assess the quality of the height estimates.') - fileID.attrs['description'] = ('The data set (ATL07) contains along-track ' - 'heights for sea ice and open water leads (at varying length scales) ' - 'relative to the WGS84 ellipsoid (ITRF2014 reference frame) after ' - 'adjustment for geoidal and tidal variations, and inverted barometer ' - 'effects.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL07 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl07_tide[gtx]['sea_ice_segments']['longitude'] - lat = IS2_atl07_tide[gtx]['sea_ice_segments']['latitude'] - delta_time = IS2_atl07_tide[gtx]['sea_ice_segments']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl07_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat-2 ATL07 sea ice height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL07 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL07 file - for FILE in args.infile: - compute_LPET_ICESat2(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_ICESat2_ATL10.py b/scripts/compute_LPET_ICESat2_ATL10.py deleted file mode 100644 index e72ce113..00000000 --- a/scripts/compute_LPET_ICESat2_ATL10.py +++ /dev/null @@ -1,460 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat2_ATL10.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting ICESat-2 - sea ice freeboard data -Will calculate the long-period tides for all ATL10 segments and not just ocean - segments defined by the ocean tide mask - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL10.py: reads ICESat-2 sea ice freeboard data files - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Forked 12/2021 from compute_LPET_ICESat2_ATL07.py - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named ATL07 file as input - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 11/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL10 import read_HDF5_ATL10 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 sea ice freeboard (ATL10) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat2(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl10_mds,IS2_atl10_attrs,IS2_atl10_beams = read_HDF5_ATL10(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 sea ice file name - rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})' - r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for ASAS/NSIDC granules - args = (PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX) - file_format = '{0}-{1}_LPET_{2}{3}{4}{5}{6}{7}_{8}{9}{10}_{11}_{12}{13}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl10_mds['ancillary_data']['atlas_sdp_gps_epoch'] - - # copy variables for outputting to HDF5 file - IS2_atl10_tide = {} - IS2_atl10_fill = {} - IS2_atl10_dims = {} - IS2_atl10_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl10_tide['ancillary_data'] = {} - IS2_atl10_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl10_tide['ancillary_data'][key] = IS2_atl10_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl10_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl10_attrs['ancillary_data'][key].items(): - IS2_atl10_tide_attrs['ancillary_data'][key][att_name] = att_val - - for gtx in sorted(IS2_atl10_beams): - # output data dictionaries for beam - IS2_atl10_tide[gtx] = dict(freeboard_beam_segment={},leads={}) - IS2_atl10_fill[gtx] = dict(freeboard_beam_segment={},leads={}) - IS2_atl10_dims[gtx] = dict(freeboard_beam_segment={},leads={}) - IS2_atl10_tide_attrs[gtx] = dict(freeboard_beam_segment={},leads={}) - - # group attributes for beam - IS2_atl10_tide_attrs[gtx]['Description'] = IS2_atl10_attrs[gtx]['Description'] - IS2_atl10_tide_attrs[gtx]['atlas_pce'] = IS2_atl10_attrs[gtx]['atlas_pce'] - IS2_atl10_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl10_attrs[gtx]['atlas_beam_type'] - IS2_atl10_tide_attrs[gtx]['groundtrack_id'] = IS2_atl10_attrs[gtx]['groundtrack_id'] - IS2_atl10_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl10_attrs[gtx]['atmosphere_profile'] - IS2_atl10_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl10_attrs[gtx]['atlas_spot_number'] - IS2_atl10_tide_attrs[gtx]['sc_orientation'] = IS2_atl10_attrs[gtx]['sc_orientation'] - - # group attributes for freeboard_beam_segment - IS2_atl10_tide_attrs[gtx]['freeboard_beam_segment']['Description'] = ("Contains freeboard " - "estimate and associated height segment parameters for only the sea ice segments by beam.") - IS2_atl10_tide_attrs[gtx]['freeboard_beam_segment']['data_rate'] = ("Data within this " - "group are stored at the freeboard swath segment rate.") - # group attributes for leads - IS2_atl10_tide_attrs[gtx]['leads']['Description'] = ("Contains parameters relating " - "to the freeboard values.") - IS2_atl10_tide_attrs[gtx]['leads']['data_rate'] = ("Data within this " - "group are stored at the lead index rate.") - - # for each ATL10 group - for group in ['freeboard_beam_segment','leads']: - # number of segments - val = IS2_atl10_mds[gtx][group] - n_seg = len(val['delta_time']) - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # predict long-period equilibrium tides at latitudes and time - tide_lpe = compute_equilibrium_tide(tide_time + deltat, val['latitude']) - - # delta time - IS2_atl10_tide[gtx][group]['delta_time'] = val['delta_time'].copy() - IS2_atl10_fill[gtx][group]['delta_time'] = None - IS2_atl10_dims[gtx][group]['delta_time'] = None - IS2_atl10_tide_attrs[gtx][group]['delta_time'] = {} - IS2_atl10_tide_attrs[gtx][group]['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['standard_name'] = "time" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['source'] = "telemetry" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['calendar'] = "standard" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['description'] = ("Number of " - "GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch " - "offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS " - "seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP " - "epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time " - "parameters, the time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl10_tide_attrs[gtx][group]['delta_time']['coordinates'] = \ - "latitude longitude" - # latitude - IS2_atl10_tide[gtx][group]['latitude'] = val['latitude'].copy() - IS2_atl10_fill[gtx][group]['latitude'] = None - IS2_atl10_dims[gtx][group]['latitude'] = ['delta_time'] - IS2_atl10_tide_attrs[gtx][group]['latitude'] = {} - IS2_atl10_tide_attrs[gtx][group]['latitude']['units'] = "degrees_north" - IS2_atl10_tide_attrs[gtx][group]['latitude']['contentType'] = "physicalMeasurement" - IS2_atl10_tide_attrs[gtx][group]['latitude']['long_name'] = "Latitude" - IS2_atl10_tide_attrs[gtx][group]['latitude']['standard_name'] = "latitude" - IS2_atl10_tide_attrs[gtx][group]['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl10_tide_attrs[gtx][group]['latitude']['valid_min'] = -90.0 - IS2_atl10_tide_attrs[gtx][group]['latitude']['valid_max'] = 90.0 - IS2_atl10_tide_attrs[gtx][group]['latitude']['coordinates'] = \ - "delta_time longitude" - # longitude - IS2_atl10_tide[gtx][group]['longitude'] = val['longitude'].copy() - IS2_atl10_fill[gtx][group]['longitude'] = None - IS2_atl10_dims[gtx][group]['longitude'] = ['delta_time'] - IS2_atl10_tide_attrs[gtx][group]['longitude'] = {} - IS2_atl10_tide_attrs[gtx][group]['longitude']['units'] = "degrees_east" - IS2_atl10_tide_attrs[gtx][group]['longitude']['contentType'] = "physicalMeasurement" - IS2_atl10_tide_attrs[gtx][group]['longitude']['long_name'] = "Longitude" - IS2_atl10_tide_attrs[gtx][group]['longitude']['standard_name'] = "longitude" - IS2_atl10_tide_attrs[gtx][group]['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl10_tide_attrs[gtx][group]['longitude']['valid_min'] = -180.0 - IS2_atl10_tide_attrs[gtx][group]['longitude']['valid_max'] = 180.0 - IS2_atl10_tide_attrs[gtx][group]['longitude']['coordinates'] = \ - "delta_time latitude" - - # geophysical variables - IS2_atl10_tide[gtx][group]['geophysical'] = {} - IS2_atl10_fill[gtx][group]['geophysical'] = {} - IS2_atl10_dims[gtx][group]['geophysical'] = {} - IS2_atl10_tide_attrs[gtx][group]['geophysical'] = {} - IS2_atl10_tide_attrs[gtx][group]['geophysical']['Description'] = ("Contains geophysical " - "parameters and corrections used to correct photon heights for geophysical " - "effects, such as tides.") - IS2_atl10_tide_attrs[gtx][group]['geophysical']['data_rate'] = ("Data within this group " - "are stored at the variable segment rate.") - - # computed long-period equilibrium tide - IS2_atl10_tide[gtx][group]['geophysical']['height_segment_lpe'] = tide_lpe - IS2_atl10_fill[gtx][group]['geophysical']['height_segment_lpe'] = None - IS2_atl10_dims[gtx][group]['geophysical']['height_segment_lpe'] = ['delta_time'] - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe'] = {} - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe']['units'] = "meters" - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe']['contentType'] = \ - "referenceInformation" - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl10_tide_attrs[gtx][group]['geophysical']['height_segment_lpe']['coordinates'] = \ - "../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL10_tide_write(IS2_atl10_tide, IS2_atl10_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl10_fill, DIMENSIONS=IS2_atl10_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL10_tide_write(IS2_atl10_tide, IS2_atl10_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl10_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl10_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl10_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl10_attrs[gtx][att_name] - # create freeboard_beam_segment and leads groups - h5[gtx] = dict(freeboard_beam_segment={},leads={}) - for group in ['freeboard_beam_segment','leads']: - fileID[gtx].create_group(group) - for att_name in ['Description','data_rate']: - att_val = IS2_atl10_attrs[gtx][group][att_name] - fileID[gtx][group].attrs[att_name] = att_val - - # delta_time and geolocation variables - for k in ['delta_time','latitude','longitude']: - # values and attributes - v = IS2_atl10_tide[gtx][group][k] - attrs = IS2_atl10_attrs[gtx][group][k] - fillvalue = FILL_VALUE[gtx][group][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,group,k) - if fillvalue: - h5[gtx][group][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx][group][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx][group][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx][group][k]): - h5[gtx][group][k].dims[i].attach_scale( - h5[gtx][group][dim]) - else: - # make dimension - h5[gtx][group][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx][group][k].attrs[att_name] = att_val - - # add to geophysical corrections - key = 'geophysical' - fileID[gtx][group].create_group(key) - h5[gtx][group][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl10_attrs[gtx][group][key][att_name] - fileID[gtx][group][key].attrs[att_name] = att_val - for k,v in IS2_atl10_tide[gtx][group][key].items(): - # attributes - attrs = IS2_atl10_attrs[gtx][group][key][k] - fillvalue = FILL_VALUE[gtx][group][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,group,key,k) - if fillvalue: - h5[gtx][group][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx][group][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx][group][key][k]): - h5[gtx][group][key][k].dims[i].attach_scale( - h5[gtx][group][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx][group][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Freeboard' - fileID.attrs['summary'] = ('Estimates of the sea ice tidal parameters ' - 'needed to interpret and assess the quality of the freeboard estimates.') - fileID.attrs['description'] = ('The data set (ATL10) contains estimates ' - 'of sea ice freeboard, calculated using three different approaches. ' - 'Sea ice leads used to establish the reference sea surface and ' - 'descriptive statistics used in the height estimates are also provided') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL10 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - # for each ATL10 group - for group in ['freeboard_beam_segment','leads']: - lon = IS2_atl10_tide[gtx][group]['longitude'] - lat = IS2_atl10_tide[gtx][group]['latitude'] - delta_time = IS2_atl10_tide[gtx][group]['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl10_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat-2 ATL10 sea ice freeboard data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL10 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL10 file - for FILE in args.infile: - compute_LPET_ICESat2(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_ICESat2_ATL11.py b/scripts/compute_LPET_ICESat2_ATL11.py deleted file mode 100644 index 634a1040..00000000 --- a/scripts/compute_LPET_ICESat2_ATL11.py +++ /dev/null @@ -1,649 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat2_ATL11.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting ICESat-2 - annual land ice height data -Will calculate the long-period tides for all ATL11 segments and not just ocean - segments defined by the ocean tide mask - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL11.py: reads ICESat-2 annual land ice height data files - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named ATL11 file as input - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 02/2021: additionally calculate tides for crossing track data - Updated 01/2021: using standalone ATL11 reader - Updated 12/2020: merged time conversion routines into module - Written 12/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import collections -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL11 import read_HDF5_ATL11 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 annual land ice height data (ATL11) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat2(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl11_mds,IS2_atl11_attrs,IS2_atl11_pairs = read_HDF5_ATL11(INPUT_FILE, - ATTRIBUTES=True, CROSSOVERS=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})_(\d{2})(\d{2})_' - r'(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,TRK,GRAN,SCYC,ECYC,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for ASAS/NSIDC granules - args = (PRD,TRK,GRAN,SCYC,ECYC,RL,VERS,AUX) - file_format = '{0}_LPET_{1}{2}_{3}{4}_{5}_{6}{7}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl11_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl11_tide = {} - IS2_atl11_fill = {} - IS2_atl11_dims = {} - IS2_atl11_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl11_tide['ancillary_data'] = {} - IS2_atl11_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl11_tide['ancillary_data'][key] = IS2_atl11_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl11_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl11_attrs['ancillary_data'][key].items(): - IS2_atl11_tide_attrs['ancillary_data'][key][att_name] = att_val - # HDF5 group name for across-track data - XT = 'crossing_track_data' - - # for each input beam pair within the file - for ptx in sorted(IS2_atl11_pairs): - # output data dictionaries for beam - IS2_atl11_tide[ptx] = dict(cycle_stats=collections.OrderedDict(), - crossing_track_data=collections.OrderedDict()) - IS2_atl11_fill[ptx] = dict(cycle_stats={},crossing_track_data={}) - IS2_atl11_dims[ptx] = dict(cycle_stats={},crossing_track_data={}) - IS2_atl11_tide_attrs[ptx] = dict(cycle_stats={},crossing_track_data={}) - - # extract along-track and across-track variables - ref_pt = {} - latitude = {} - longitude = {} - delta_time = {} - # along-track (AT) reference point, latitude, longitude and time - ref_pt['AT'] = IS2_atl11_mds[ptx]['ref_pt'].copy() - latitude['AT'] = np.ma.array(IS2_atl11_mds[ptx]['latitude'], - fill_value=IS2_atl11_attrs[ptx]['latitude']['_FillValue']) - longitude['AT'] = np.ma.array(IS2_atl11_mds[ptx]['longitude'], - fill_value=IS2_atl11_attrs[ptx]['longitude']['_FillValue']) - delta_time['AT'] = np.ma.array(IS2_atl11_mds[ptx]['delta_time'], - fill_value=IS2_atl11_attrs[ptx]['delta_time']['_FillValue']) - # across-track (XT) reference point, latitude, longitude and time - ref_pt['XT'] = IS2_atl11_mds[ptx][XT]['ref_pt'].copy() - latitude['XT'] = np.ma.array(IS2_atl11_mds[ptx][XT]['latitude'], - fill_value=IS2_atl11_attrs[ptx][XT]['latitude']['_FillValue']) - longitude['XT'] = np.ma.array(IS2_atl11_mds[ptx][XT]['longitude'], - fill_value=IS2_atl11_attrs[ptx][XT]['longitude']['_FillValue']) - delta_time['XT'] = np.ma.array(IS2_atl11_mds[ptx][XT]['delta_time'], - fill_value=IS2_atl11_attrs[ptx][XT]['delta_time']['_FillValue']) - - # number of average segments and number of included cycles - # fill_value for invalid heights and corrections - fv = IS2_atl11_attrs[ptx]['h_corr']['_FillValue'] - # shape of along-track and across-track data - n_points,n_cycles = delta_time['AT'].shape - n_cross, = delta_time['XT'].shape - # allocate for output long-period equilibrium tide variables - tide_lpe = {} - # along-track (AT) tides - tide_lpe['AT'] = np.ma.empty((n_points,n_cycles),fill_value=fv) - tide_lpe['AT'].mask = (delta_time['AT'] == delta_time['AT'].fill_value) - # across-track (XT) tides - tide_lpe['XT'] = np.ma.empty((n_cross),fill_value=fv) - tide_lpe['XT'].mask = (delta_time['XT'] == delta_time['XT'].fill_value) - - # calculate tides for along-track and across-track data - for track in ['AT','XT']: - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + delta_time[track] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # calculate long-period equilibrium tides for track type - if (track == 'AT'): - # calculate LPET for each cycle if along-track - for cycle in range(n_cycles): - # find valid time and spatial points for cycle - valid, = np.nonzero(~tide_lpe[track].mask[:,cycle]) - # predict long-period equilibrium tides at latitudes and time - t = tide_time[valid,cycle] + deltat[valid,cycle] - tide_lpe[track].data[valid,cycle] = compute_equilibrium_tide(t, - latitude[track][valid]) - elif (track == 'XT'): - # find valid time and spatial points for cycle - valid, = np.nonzero(~tide_lpe[track].mask[:]) - # predict long-period equilibrium tides at latitudes and time - t = tide_time[valid] + deltat[valid] - tide_lpe[track].data[valid] = compute_equilibrium_tide(t, - latitude[track][valid]) - - # replace masked and nan values with fill value - invalid = np.nonzero(np.isnan(tide_lpe[track].data) | tide_lpe[track].mask) - tide_lpe[track].data[invalid] = tide_lpe[track].fill_value - tide_lpe[track].mask[invalid] = True - - # group attributes for beam - IS2_atl11_tide_attrs[ptx]['description'] = ('Contains the primary science parameters ' - 'for this data set') - IS2_atl11_tide_attrs[ptx]['beam_pair'] = IS2_atl11_attrs[ptx]['beam_pair'] - IS2_atl11_tide_attrs[ptx]['ReferenceGroundTrack'] = IS2_atl11_attrs[ptx]['ReferenceGroundTrack'] - IS2_atl11_tide_attrs[ptx]['first_cycle'] = IS2_atl11_attrs[ptx]['first_cycle'] - IS2_atl11_tide_attrs[ptx]['last_cycle'] = IS2_atl11_attrs[ptx]['last_cycle'] - IS2_atl11_tide_attrs[ptx]['equatorial_radius'] = IS2_atl11_attrs[ptx]['equatorial_radius'] - IS2_atl11_tide_attrs[ptx]['polar_radius'] = IS2_atl11_attrs[ptx]['polar_radius'] - - # geolocation, time and reference point - # reference point - IS2_atl11_tide[ptx]['ref_pt'] = ref_pt['AT'].copy() - IS2_atl11_fill[ptx]['ref_pt'] = None - IS2_atl11_dims[ptx]['ref_pt'] = None - IS2_atl11_tide_attrs[ptx]['ref_pt'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['ref_pt']['units'] = "1" - IS2_atl11_tide_attrs[ptx]['ref_pt']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx]['ref_pt']['long_name'] = "Reference point number" - IS2_atl11_tide_attrs[ptx]['ref_pt']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['ref_pt']['description'] = ("The reference point is the " - "7 digit segment_id number corresponding to the center of the ATL06 data used " - "for each ATL11 point. These are sequential, starting with 1 for the first " - "segment after an ascending equatorial crossing node.") - IS2_atl11_tide_attrs[ptx]['ref_pt']['coordinates'] = \ - "delta_time latitude longitude" - # cycle_number - IS2_atl11_tide[ptx]['cycle_number'] = IS2_atl11_mds[ptx]['cycle_number'].copy() - IS2_atl11_fill[ptx]['cycle_number'] = None - IS2_atl11_dims[ptx]['cycle_number'] = None - IS2_atl11_tide_attrs[ptx]['cycle_number'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['cycle_number']['units'] = "1" - IS2_atl11_tide_attrs[ptx]['cycle_number']['long_name'] = "Orbital cycle number" - IS2_atl11_tide_attrs[ptx]['cycle_number']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['cycle_number']['description'] = ("Number of 91-day periods " - "that have elapsed since ICESat-2 entered the science orbit. Each of the 1,387 " - "reference ground track (RGTs) is targeted in the polar regions once " - "every 91 days.") - # delta time - IS2_atl11_tide[ptx]['delta_time'] = delta_time['AT'].copy() - IS2_atl11_fill[ptx]['delta_time'] = delta_time['AT'].fill_value - IS2_atl11_dims[ptx]['delta_time'] = ['ref_pt','cycle_number'] - IS2_atl11_tide_attrs[ptx]['delta_time'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl11_tide_attrs[ptx]['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl11_tide_attrs[ptx]['delta_time']['standard_name'] = "time" - IS2_atl11_tide_attrs[ptx]['delta_time']['calendar'] = "standard" - IS2_atl11_tide_attrs[ptx]['delta_time']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['delta_time']['description'] = ("Number of GPS " - "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset " - "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds " - "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By " - "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the " - "time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl11_tide_attrs[ptx]['delta_time']['coordinates'] = \ - "ref_pt cycle_number latitude longitude" - # latitude - IS2_atl11_tide[ptx]['latitude'] = latitude['AT'].copy() - IS2_atl11_fill[ptx]['latitude'] = latitude['AT'].fill_value - IS2_atl11_dims[ptx]['latitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx]['latitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['latitude']['units'] = "degrees_north" - IS2_atl11_tide_attrs[ptx]['latitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx]['latitude']['long_name'] = "Latitude" - IS2_atl11_tide_attrs[ptx]['latitude']['standard_name'] = "latitude" - IS2_atl11_tide_attrs[ptx]['latitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['latitude']['description'] = ("Center latitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx]['latitude']['valid_min'] = -90.0 - IS2_atl11_tide_attrs[ptx]['latitude']['valid_max'] = 90.0 - IS2_atl11_tide_attrs[ptx]['latitude']['coordinates'] = \ - "ref_pt delta_time longitude" - # longitude - IS2_atl11_tide[ptx]['longitude'] = longitude['AT'].copy() - IS2_atl11_fill[ptx]['longitude'] = longitude['AT'].fill_value - IS2_atl11_dims[ptx]['longitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx]['longitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['longitude']['units'] = "degrees_east" - IS2_atl11_tide_attrs[ptx]['longitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx]['longitude']['long_name'] = "Longitude" - IS2_atl11_tide_attrs[ptx]['longitude']['standard_name'] = "longitude" - IS2_atl11_tide_attrs[ptx]['longitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['longitude']['description'] = ("Center longitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx]['longitude']['valid_min'] = -180.0 - IS2_atl11_tide_attrs[ptx]['longitude']['valid_max'] = 180.0 - IS2_atl11_tide_attrs[ptx]['longitude']['coordinates'] = \ - "ref_pt delta_time latitude" - - # cycle statistics variables - IS2_atl11_tide_attrs[ptx]['cycle_stats']['Description'] = ("The cycle_stats subgroup " - "contains summary information about segments for each reference point, including " - "the uncorrected mean heights for reference surfaces, blowing snow and cloud " - "indicators, and geolocation and height misfit statistics.") - IS2_atl11_tide_attrs[ptx]['cycle_stats']['data_rate'] = ("Data within this group " - "are stored at the average segment rate.") - # computed long-period equilibrium tide - IS2_atl11_tide[ptx]['cycle_stats']['tide_equilibrium'] = tide_lpe['AT'].copy() - IS2_atl11_fill[ptx]['cycle_stats']['tide_equilibrium'] = tide_lpe['AT'].fill_value - IS2_atl11_dims[ptx]['cycle_stats']['tide_equilibrium'] = ['ref_pt','cycle_number'] - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium']['units'] = "meters" - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl11_tide_attrs[ptx]['cycle_stats']['tide_equilibrium']['coordinates'] = \ - "../ref_pt ../cycle_number ../delta_time ../latitude ../longitude" - - # crossing track variables - IS2_atl11_tide_attrs[ptx][XT]['Description'] = ("The crossing_track_data " - "subgroup contains elevation data at crossover locations. These are " - "locations where two ICESat-2 pair tracks cross, so data are available " - "from both the datum track, for which the granule was generated, and " - "from the crossing track.") - IS2_atl11_tide_attrs[ptx][XT]['data_rate'] = ("Data within this group are " - "stored at the average segment rate.") - - # reference point - IS2_atl11_tide[ptx][XT]['ref_pt'] = ref_pt['XT'].copy() - IS2_atl11_fill[ptx][XT]['ref_pt'] = None - IS2_atl11_dims[ptx][XT]['ref_pt'] = None - IS2_atl11_tide_attrs[ptx][XT]['ref_pt'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['units'] = "1" - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['long_name'] = ("fit center reference point number, " - "segment_id") - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['source'] = "derived, ATL11 algorithm" - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['description'] = ("The reference-point number of the " - "fit center for the datum track. The reference point is the 7 digit segment_id number " - "corresponding to the center of the ATL06 data used for each ATL11 point. These are " - "sequential, starting with 1 for the first segment after an ascending equatorial " - "crossing node.") - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['coordinates'] = \ - "delta_time latitude longitude" - - # reference ground track of the crossing track - IS2_atl11_tide[ptx][XT]['rgt'] = IS2_atl11_mds[ptx][XT]['rgt'].copy() - IS2_atl11_fill[ptx][XT]['rgt'] = IS2_atl11_attrs[ptx][XT]['rgt']['_FillValue'] - IS2_atl11_dims[ptx][XT]['rgt'] = None - IS2_atl11_tide_attrs[ptx][XT]['rgt'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['rgt']['units'] = "1" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['long_name'] = "crossover reference ground track" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['description'] = "The RGT number for the crossing data." - IS2_atl11_tide_attrs[ptx][XT]['rgt']['coordinates'] = \ - "ref_pt delta_time latitude longitude" - # cycle_number of the crossing track - IS2_atl11_tide[ptx][XT]['cycle_number'] = IS2_atl11_mds[ptx][XT]['cycle_number'].copy() - IS2_atl11_fill[ptx][XT]['cycle_number'] = IS2_atl11_attrs[ptx][XT]['cycle_number']['_FillValue'] - IS2_atl11_dims[ptx][XT]['cycle_number'] = None - IS2_atl11_tide_attrs[ptx][XT]['cycle_number'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['units'] = "1" - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['long_name'] = "crossover cycle number" - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['description'] = ("Cycle number for the " - "crossing data. Number of 91-day periods that have elapsed since ICESat-2 entered " - "the science orbit. Each of the 1,387 reference ground track (RGTs) is targeted " - "in the polar regions once every 91 days.") - # delta time of the crossing track - IS2_atl11_tide[ptx][XT]['delta_time'] = delta_time['XT'].copy() - IS2_atl11_fill[ptx][XT]['delta_time'] = delta_time['XT'].fill_value - IS2_atl11_dims[ptx][XT]['delta_time'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['delta_time'] = {} - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['standard_name'] = "time" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['calendar'] = "standard" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['description'] = ("Number of GPS " - "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset " - "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds " - "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By " - "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the " - "time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl11_tide_attrs[ptx]['delta_time']['coordinates'] = \ - "ref_pt latitude longitude" - # latitude of the crossover measurement - IS2_atl11_tide[ptx][XT]['latitude'] = latitude['XT'].copy() - IS2_atl11_fill[ptx][XT]['latitude'] = latitude['XT'].fill_value - IS2_atl11_dims[ptx][XT]['latitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['latitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['latitude']['units'] = "degrees_north" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['long_name'] = "crossover latitude" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['standard_name'] = "latitude" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['description'] = ("Center latitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx][XT]['latitude']['valid_min'] = -90.0 - IS2_atl11_tide_attrs[ptx][XT]['latitude']['valid_max'] = 90.0 - IS2_atl11_tide_attrs[ptx][XT]['latitude']['coordinates'] = \ - "ref_pt delta_time longitude" - # longitude of the crossover measurement - IS2_atl11_tide[ptx][XT]['longitude'] = longitude['XT'].copy() - IS2_atl11_fill[ptx][XT]['longitude'] = longitude['XT'].fill_value - IS2_atl11_dims[ptx][XT]['longitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['longitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['longitude']['units'] = "degrees_east" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['long_name'] = "crossover longitude" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['standard_name'] = "longitude" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['description'] = ("Center longitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx][XT]['longitude']['valid_min'] = -180.0 - IS2_atl11_tide_attrs[ptx][XT]['longitude']['valid_max'] = 180.0 - IS2_atl11_tide_attrs[ptx][XT]['longitude']['coordinates'] = \ - "ref_pt delta_time latitude" - # computed long-period equilibrium tide for the crossover measurement - IS2_atl11_tide[ptx][XT]['tide_equilibrium'] = tide_lpe['XT'].copy() - IS2_atl11_fill[ptx][XT]['tide_equilibrium'] = tide_lpe['XT'].fill_value - IS2_atl11_dims[ptx][XT]['tide_equilibrium'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium']['units'] = "meters" - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl11_tide_attrs[ptx][XT]['tide_equilibrium']['coordinates'] = \ - "ref_pt delta_time latitude longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL11_tide_write(IS2_atl11_tide, IS2_atl11_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl11_fill, DIMENSIONS=IS2_atl11_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL11_tide_write(IS2_atl11_tide, IS2_atl11_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl11_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl11_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam pair - pairs = [k for k in IS2_atl11_tide.keys() if bool(re.match(r'pt\d',k))] - for ptx in pairs: - fileID.create_group(ptx) - h5[ptx] = {} - # add HDF5 group attributes for beam - for att_name in ['description','beam_pair','ReferenceGroundTrack', - 'first_cycle','last_cycle','equatorial_radius','polar_radius']: - fileID[ptx].attrs[att_name] = IS2_atl11_attrs[ptx][att_name] - - # ref_pt, cycle number, geolocation and delta_time variables - for k in ['ref_pt','cycle_number','delta_time','latitude','longitude']: - # values and attributes - v = IS2_atl11_tide[ptx][k] - attrs = IS2_atl11_attrs[ptx][k] - fillvalue = FILL_VALUE[ptx][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}'.format(ptx,k) - if fillvalue: - h5[ptx][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[ptx][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[ptx][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[ptx][k]): - h5[ptx][k].dims[i].attach_scale(h5[ptx][dim]) - else: - # make dimension - h5[ptx][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[ptx][k].attrs[att_name] = att_val - - # add to cycle_stats and crossing_track_data variables - for key in ['cycle_stats','crossing_track_data']: - fileID[ptx].create_group(key) - h5[ptx][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl11_attrs[ptx][key][att_name] - fileID[ptx][key].attrs[att_name] = att_val - for k,v in IS2_atl11_tide[ptx][key].items(): - # attributes - attrs = IS2_atl11_attrs[ptx][key][k] - fillvalue = FILL_VALUE[ptx][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(ptx,key,k) - if fillvalue: - h5[ptx][key][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[ptx][key][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[ptx][key][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[ptx][key][k]): - if (key == 'cycle_stats'): - h5[ptx][key][k].dims[i].attach_scale(h5[ptx][dim]) - else: - h5[ptx][key][k].dims[i].attach_scale(h5[ptx][key][dim]) - else: - # make dimension - h5[ptx][key][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[ptx][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 Annual Land Ice Height' - fileID.attrs['summary'] = ('The purpose of ATL11 is to provide an ICESat-2 ' - 'satellite cycle summary of heights and height changes of land-based ' - 'ice and will be provided as input to ATL15 and ATL16, gridded ' - 'estimates of heights and height-changes.') - fileID.attrs['description'] = ('Land ice parameters for each beam pair. ' - 'All parameters are calculated for the same along-track increments ' - 'for each beam pair and repeat.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL11 files - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for ptx in pairs: - lon = IS2_atl11_tide[ptx]['longitude'] - lat = IS2_atl11_tide[ptx]['latitude'] - delta_time = IS2_atl11_tide[ptx]['delta_time'] - valid = np.nonzero(delta_time != FILL_VALUE[ptx]['delta_time']) - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time[valid].min() if (delta_time[valid].min() < tmn) else tmn - tmx = delta_time[valid].max() if (delta_time[valid].max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl11_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat-2 ATL11 annual land ice height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL11 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL11 file - for FILE in args.infile: - compute_LPET_ICESat2(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_ICESat2_ATL12.py b/scripts/compute_LPET_ICESat2_ATL12.py deleted file mode 100644 index 42041310..00000000 --- a/scripts/compute_LPET_ICESat2_ATL12.py +++ /dev/null @@ -1,460 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat2_ATL12.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting ICESat-2 - ocean surface height data -Will calculate the long-period tides for all ATL12 segments and not just ocean - segments defined by the ocean tide mask - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL12.py: reads ICESat-2 ocean surface height data files - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named ATL12 file as input - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 11/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL12 import read_HDF5_ATL12 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 ocean surface height (ATL12) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat2(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl12_mds,IS2_atl12_attrs,IS2_atl12_beams = read_HDF5_ATL12(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 ocean surface file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' - r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for ASAS/NSIDC granules - args = (PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_LPET_{1}{2}{3}{4}{5}{6}_{7}{8}{9}_{10}_{11}{12}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl12_mds['ancillary_data']['atlas_sdp_gps_epoch'] - - # copy variables for outputting to HDF5 file - IS2_atl12_tide = {} - IS2_atl12_fill = {} - IS2_atl12_dims = {} - IS2_atl12_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl12_tide['ancillary_data'] = {} - IS2_atl12_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl12_tide['ancillary_data'][key] = IS2_atl12_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl12_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl12_attrs['ancillary_data'][key].items(): - IS2_atl12_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl12_beams): - # output data dictionaries for beam - IS2_atl12_tide[gtx] = dict(ssh_segments={}) - IS2_atl12_fill[gtx] = dict(ssh_segments={}) - IS2_atl12_dims[gtx] = dict(ssh_segments={}) - IS2_atl12_tide_attrs[gtx] = dict(ssh_segments={}) - - # number of segments - val = IS2_atl12_mds[gtx]['ssh_segments'] - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # predict long-period equilibrium tides at latitudes and time - tide_lpe = compute_equilibrium_tide(tide_time + deltat, val['latitude']) - - # group attributes for beam - IS2_atl12_tide_attrs[gtx]['Description'] = IS2_atl12_attrs[gtx]['Description'] - IS2_atl12_tide_attrs[gtx]['atlas_pce'] = IS2_atl12_attrs[gtx]['atlas_pce'] - IS2_atl12_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl12_attrs[gtx]['atlas_beam_type'] - IS2_atl12_tide_attrs[gtx]['groundtrack_id'] = IS2_atl12_attrs[gtx]['groundtrack_id'] - IS2_atl12_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl12_attrs[gtx]['atmosphere_profile'] - IS2_atl12_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl12_attrs[gtx]['atlas_spot_number'] - IS2_atl12_tide_attrs[gtx]['sc_orientation'] = IS2_atl12_attrs[gtx]['sc_orientation'] - # group attributes for ssh_segments - IS2_atl12_tide_attrs[gtx]['ssh_segments']['Description'] = ("Contains " - "parameters relating to the calculated surface height.") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['data_rate'] = ("Data within " - "this group are stored at the variable ocean processing segment rate.") - - # geolocation, time and segment ID - # delta time - IS2_atl12_tide[gtx]['ssh_segments']['delta_time'] = val['delta_time'].copy() - IS2_atl12_fill[gtx]['ssh_segments']['delta_time'] = None - IS2_atl12_dims[gtx]['ssh_segments']['delta_time'] = None - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['standard_name'] = "time" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['source'] = "telemetry" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['calendar'] = "standard" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['description'] = ("Number of " - "GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch " - "offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS " - "seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP " - "epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time " - "parameters, the time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['coordinates'] = \ - "latitude longitude" - # latitude - IS2_atl12_tide[gtx]['ssh_segments']['latitude'] = val['latitude'].copy() - IS2_atl12_fill[gtx]['ssh_segments']['latitude'] = None - IS2_atl12_dims[gtx]['ssh_segments']['latitude'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['units'] = "degrees_north" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['contentType'] = "physicalMeasurement" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['long_name'] = "Latitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['standard_name'] = "latitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['valid_min'] = -90.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['valid_max'] = 90.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['coordinates'] = \ - "delta_time longitude" - # longitude - IS2_atl12_tide[gtx]['ssh_segments']['longitude'] = val['longitude'].copy() - IS2_atl12_fill[gtx]['ssh_segments']['longitude'] = None - IS2_atl12_dims[gtx]['ssh_segments']['longitude'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['units'] = "degrees_east" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['contentType'] = "physicalMeasurement" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['long_name'] = "Longitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['standard_name'] = "longitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['valid_min'] = -180.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['valid_max'] = 180.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['coordinates'] = \ - "delta_time latitude" - # Ocean Segment Duration - IS2_atl12_tide[gtx]['ssh_segments']['delt_seg'] = val['delt_seg'] - IS2_atl12_fill[gtx]['ssh_segments']['delt_seg'] = None - IS2_atl12_dims[gtx]['ssh_segments']['delt_seg'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['units'] = "seconds" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['contentType'] = \ - "referenceInformation" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['long_name'] = \ - "Ocean Segment Duration" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['description'] = \ - "Time duration segment" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['coordinates'] = \ - "delta_time latitude longitude" - - # stats variables - IS2_atl12_tide[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_fill[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_dims[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['Description'] = ("Contains parameters " - "related to quality and corrections on the sea surface height parameters.") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['data_rate'] = ("Data within this group " - "are stored at the variable ocean processing segment rate.") - - # computed long-period equilibrium tide - IS2_atl12_tide[gtx]['ssh_segments']['stats']['tide_equilibrium_seg'] = tide_lpe - IS2_atl12_fill[gtx]['ssh_segments']['stats']['tide_equilibrium_seg'] = None - IS2_atl12_dims[gtx]['ssh_segments']['stats']['tide_equilibrium_seg'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium_seg'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium_seg']['units'] = "meters" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium_seg']['contentType'] = "referenceInformation" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium']['long_name'] = \ - "Long Period Equilibrium Tide" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium_seg']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium_seg']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['tide_equilibrium_seg']['coordinates'] = \ - "../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL12_tide_write(IS2_atl12_tide, IS2_atl12_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl12_fill, DIMENSIONS=IS2_atl12_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL12_tide_write(IS2_atl12_tide, IS2_atl12_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl12_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl12_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl12_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl12_attrs[gtx][att_name] - # create ssh_segments group - fileID[gtx].create_group('ssh_segments') - h5[gtx] = dict(ssh_segments={}) - for att_name in ['Description','data_rate']: - att_val = IS2_atl12_attrs[gtx]['ssh_segments'][att_name] - fileID[gtx]['ssh_segments'].attrs[att_name] = att_val - - # delta_time, geolocation and segment description variables - for k in ['delta_time','latitude','longitude','delt_seg']: - # values and attributes - v = IS2_atl12_tide[gtx]['ssh_segments'][k] - attrs = IS2_atl12_attrs[gtx]['ssh_segments'][k] - fillvalue = FILL_VALUE[gtx]['ssh_segments'][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,'ssh_segments',k) - if fillvalue: - h5[gtx]['ssh_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx]['ssh_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx]['ssh_segments'][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['ssh_segments'][k]): - h5[gtx]['ssh_segments'][k].dims[i].attach_scale( - h5[gtx]['ssh_segments'][dim]) - else: - # make dimension - h5[gtx]['ssh_segments'][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['ssh_segments'][k].attrs[att_name] = att_val - - # add to stats variables - key = 'stats' - fileID[gtx]['ssh_segments'].create_group(key) - h5[gtx]['ssh_segments'][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl12_attrs[gtx]['ssh_segments'][key][att_name] - fileID[gtx]['ssh_segments'][key].attrs[att_name] = att_val - for k,v in IS2_atl12_tide[gtx]['ssh_segments'][key].items(): - # attributes - attrs = IS2_atl12_attrs[gtx]['ssh_segments'][key][k] - fillvalue = FILL_VALUE[gtx]['ssh_segments'][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,'ssh_segments',key,k) - if fillvalue: - h5[gtx]['ssh_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx]['ssh_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['ssh_segments'][key][k]): - h5[gtx]['ssh_segments'][key][k].dims[i].attach_scale( - h5[gtx]['ssh_segments'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['ssh_segments'][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Ocean Surface Height' - fileID.attrs['summary'] = ('Estimates of the ocean surface tidal parameters ' - 'needed to interpret and assess the quality of ocean height estimates.') - fileID.attrs['description'] = ('Sea Surface Height (SSH) of the global ' - 'open ocean including the ice-free seasonal ice zone (SIZ) and ' - 'near-coast regions.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL12 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl12_tide[gtx]['ssh_segments']['longitude'] - lat = IS2_atl12_tide[gtx]['ssh_segments']['latitude'] - delta_time = IS2_atl12_tide[gtx]['ssh_segments']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl12_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat-2 ATL12 ocean surface height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL12 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL12 file - for FILE in args.infile: - compute_LPET_ICESat2(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_ICESat_GLA12.py b/scripts/compute_LPET_ICESat_GLA12.py deleted file mode 100644 index 5c418e38..00000000 --- a/scripts/compute_LPET_ICESat_GLA12.py +++ /dev/null @@ -1,360 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_ICESat_GLA12.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting - ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet elevation data -Will calculate the long-period tides for all GLAS elevations and not just - ocean elevations defined by the ocean tide mask - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - spatial.py: utilities for reading, writing and operating on spatial data - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 02/2022: save ICESat campaign attribute to output file - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named GLA12 file as input - Updated 12/2020: H5py deprecation warning change to use make_scale - Written 12/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import warnings -import numpy as np -import pyTMD.time -import pyTMD.spatial -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat ice sheet HDF5 elevation data (GLAH12) from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_ICESat(INPUT_FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get directory from INPUT_FILE - logger.info('{0} -->'.format(INPUT_FILE)) - DIRECTORY = os.path.dirname(INPUT_FILE) - - # compile regular expression operator for extracting information from file - rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_' - r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE) - # extract parameters from ICESat/GLAS HDF5 file name - # PRD: Product number (01, 05, 06, 12, 13, 14, or 15) - # RL: Release number for process that created the product = 634 - # RGTP: Repeat ground-track phase (1=8-day, 2=91-day, 3=transfer orbit) - # ORB: Reference orbit number (starts at 1 and increments each time a - # new reference orbit ground track file is obtained.) - # INST: Instance number (increments every time the satellite enters a - # different reference orbit) - # CYCL: Cycle of reference orbit for this phase - # TRK: Track within reference orbit - # SEG: Segment of orbit - # GRAN: Granule version number - # TYPE: File type - try: - PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE = rx.findall(INPUT_FILE).pop() - except: - # output long-period equilibrium tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPET',fileExtension) - else: - # output long-period equilibrium tide HDF5 file for NSIDC granules - args = (PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE) - file_format = 'GLAH{0}_{1}_LPET_{2}{3}{4}_{5}_{6}_{7}_{8}_{9}.h5' - OUTPUT_FILE = file_format.format(*args) - - # read GLAH12 HDF5 file - fileID = h5py.File(INPUT_FILE,'r') - n_40HZ, = fileID['Data_40HZ']['Time']['i_rec_ndx'].shape - # get variables and attributes - rec_ndx_40HZ = fileID['Data_40HZ']['Time']['i_rec_ndx'][:].copy() - # seconds since 2000-01-01 12:00:00 UTC (J2000) - DS_UTCTime_40HZ = fileID['Data_40HZ']['DS_UTCTime_40'][:].copy() - # Latitude (degrees North) - lat_TPX = fileID['Data_40HZ']['Geolocation']['d_lat'][:].copy() - # Longitude (degrees East) - lon_40HZ = fileID['Data_40HZ']['Geolocation']['d_lon'][:].copy() - # Elevation (height above TOPEX/Poseidon ellipsoid in meters) - elev_TPX = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'][:].copy() - fv = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'].attrs['_FillValue'] - - # semimajor axis (a) and flattening (f) for TP and WGS84 ellipsoids - atop,ftop = (6378136.3,1.0/298.257) - awgs,fwgs = (6378137.0,1.0/298.257223563) - # convert from Topex/Poseidon to WGS84 Ellipsoids - lat_40HZ,elev_40HZ = pyTMD.spatial.convert_ellipsoid(lat_TPX, elev_TPX, - atop, ftop, awgs, fwgs, eps=1e-12, itmax=10) - - # convert time from J2000 to days relative to Jan 1, 1992 (48622mjd) - # J2000: seconds since 2000-01-01 12:00:00 UTC - tide_time = pyTMD.time.convert_delta_time(DS_UTCTime_40HZ, - epoch1=(2000,1,1,12,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # predict long-period equilibrium tides at latitudes and time - tide_lpe = compute_equilibrium_tide(tide_time + deltat, lat_40HZ) - - # copy variables for outputting to HDF5 file - IS_gla12_tide = dict(Data_40HZ={}) - IS_gla12_fill = dict(Data_40HZ={}) - IS_gla12_tide_attrs = dict(Data_40HZ={}) - - # copy global file attributes - global_attribute_list = ['featureType','title','comment','summary','license', - 'references','AccessConstraints','CitationforExternalPublication', - 'contributor_role','contributor_name','creator_name','creator_email', - 'publisher_name','publisher_email','publisher_url','platform','instrument', - 'processing_level','date_created','spatial_coverage_type','history', - 'keywords','keywords_vocabulary','naming_authority','project','time_type', - 'date_type','time_coverage_start','time_coverage_end', - 'time_coverage_duration','source','HDFVersion','identifier_product_type', - 'identifier_product_format_version','Conventions','institution', - 'ReprocessingPlanned','ReprocessingActual','LocalGranuleID', - 'ProductionDateTime','LocalVersionID','PGEVersion','OrbitNumber', - 'StartOrbitNumber','StopOrbitNumber','EquatorCrossingLongitude', - 'EquatorCrossingTime','EquatorCrossingDate','ShortName','VersionID', - 'InputPointer','RangeBeginningTime','RangeEndingTime','RangeBeginningDate', - 'RangeEndingDate','PercentGroundHit','OrbitQuality','Cycle','Track', - 'Instrument_State','Timing_Bias','ReferenceOrbit','SP_ICE_PATH_NO', - 'SP_ICE_GLAS_StartBlock','SP_ICE_GLAS_EndBlock','Instance','Range_Bias', - 'Instrument_State_Date','Instrument_State_Time','Range_Bias_Date', - 'Range_Bias_Time','Timing_Bias_Date','Timing_Bias_Time', - 'identifier_product_doi','identifier_file_uuid', - 'identifier_product_doi_authority'] - for att in global_attribute_list: - IS_gla12_tide_attrs[att] = fileID.attrs[att] - # copy ICESat campaign name from ancillary data - IS_gla12_tide_attrs['Campaign'] = fileID['ANCILLARY_DATA'].attrs['Campaign'] - - # add attributes for input GLA12 file - IS_gla12_tide_attrs['input_files'] = os.path.basename(INPUT_FILE) - # update geospatial ranges for ellipsoid - IS_gla12_tide_attrs['geospatial_lat_min'] = np.min(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lat_max'] = np.max(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lon_min'] = np.min(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lon_max'] = np.max(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lat_units'] = "degrees_north" - IS_gla12_tide_attrs['geospatial_lon_units'] = "degrees_east" - IS_gla12_tide_attrs['geospatial_ellipsoid'] = "WGS84" - - # copy 40Hz group attributes - for att_name,att_val in fileID['Data_40HZ'].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][att_name] = att_val - # copy attributes for time, geolocation and geophysical groups - for var in ['Time','Geolocation','Geophysical']: - IS_gla12_tide['Data_40HZ'][var] = {} - IS_gla12_fill['Data_40HZ'][var] = {} - IS_gla12_tide_attrs['Data_40HZ'][var] = {} - for att_name,att_val in fileID['Data_40HZ'][var].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][var][att_name] = att_val - - # J2000 time - IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] = DS_UTCTime_40HZ - IS_gla12_fill['Data_40HZ']['DS_UTCTime_40'] = None - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'] = {} - for att_name,att_val in fileID['Data_40HZ']['DS_UTCTime_40'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'][att_name] = att_val - # record - IS_gla12_tide['Data_40HZ']['Time']['i_rec_ndx'] = rec_ndx_40HZ - IS_gla12_fill['Data_40HZ']['Time']['i_rec_ndx'] = None - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'] = {} - for att_name,att_val in fileID['Data_40HZ']['Time']['i_rec_ndx'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'][att_name] = att_val - # latitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lat'] = lat_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lat'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'] = {} - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lat'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'][att_name] = att_val - # longitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lon'] = lon_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lon'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'] = {} - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lon'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'][att_name] = att_val - - # geophysical variables - # computed long-period equilibrium tide - IS_gla12_tide['Data_40HZ']['Geophysical']['d_eqElv'] = tide_lpe - IS_gla12_fill['Data_40HZ']['Geophysical']['d_eqElv'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_eqElv'] = {} - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_eqElv']['units'] = "meters" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_eqElv']['long_name'] = \ - "Long Period Equilibrium Tide" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_eqElv']['description'] = ("Long-period " - "equilibrium tidal elevation from the summation of fifteen tidal spectral lines") - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_eqElv']['reference'] = \ - "https://doi.org/10.1111/j.1365-246X.1973.tb03420.x" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_eqElv']['coordinates'] = \ - "../DS_UTCTime_40" - - # close the input HDF5 file - fileID.close() - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_tide_attrs, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE), - FILL_VALUE=IS_gla12_fill, CLOBBER=True) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat data to HDF5 -def HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_attrs, - FILENAME='', FILL_VALUE=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - # create 40HZ HDF5 records - h5 = dict(Data_40HZ={}) - - # add HDF5 file attributes - attrs = {a:v for a,v in IS_gla12_attrs.items() if not isinstance(v,dict)} - for att_name,att_val in attrs.items(): - fileID.attrs[att_name] = att_val - - # create Data_40HZ group - fileID.create_group('Data_40HZ') - # add HDF5 40HZ group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'].items(): - if att_name not in ('DS_UTCTime_40',) and not isinstance(att_val,dict): - fileID['Data_40HZ'].attrs[att_name] = att_val - - # add 40HZ time variable - val = IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] - attrs = IS_gla12_attrs['Data_40HZ']['DS_UTCTime_40'] - # Defining the HDF5 dataset variables - var = '{0}/{1}'.format('Data_40HZ','DS_UTCTime_40') - h5['Data_40HZ']['DS_UTCTime_40'] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, compression='gzip') - # make dimension - h5['Data_40HZ']['DS_UTCTime_40'].make_scale('DS_UTCTime_40') - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ']['DS_UTCTime_40'].attrs[att_name] = att_val - - # for each variable group - for group in ['Time','Geolocation','Geophysical']: - # add group to dict - h5['Data_40HZ'][group] = {} - # create Data_40HZ group - fileID.create_group('Data_40HZ/{0}'.format(group)) - # add HDF5 group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'][group].items(): - if not isinstance(att_val,dict): - fileID['Data_40HZ'][group].attrs[att_name] = att_val - # for each variable in the group - for key,val in IS_gla12_tide['Data_40HZ'][group].items(): - fillvalue = FILL_VALUE['Data_40HZ'][group][key] - attrs = IS_gla12_attrs['Data_40HZ'][group][key] - # Defining the HDF5 dataset variables - var = '{0}/{1}/{2}'.format('Data_40HZ',group,key) - # use variable compression if containing fill values - if fillvalue: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - fillvalue=fillvalue, compression='gzip') - else: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - compression='gzip') - # attach dimensions - for i,dim in enumerate(['DS_UTCTime_40']): - h5['Data_40HZ'][group][key].dims[i].attach_scale( - h5['Data_40HZ'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ'][group][key].attrs[att_name] = att_val - - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet - elevation data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat GLA12 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input GLA12 file - for FILE in args.infile: - compute_LPET_ICESat(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPET_icebridge_data.py b/scripts/compute_LPET_icebridge_data.py deleted file mode 100644 index f8e1fc11..00000000 --- a/scripts/compute_LPET_icebridge_data.py +++ /dev/null @@ -1,592 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPET_icebridge_data.py -Written by Tyler Sutterley (07/2022) -Calculates long-period equilibrium tidal elevations for correcting Operation - IceBridge elevation data - -INPUTS: - ATM1B, ATM icessn or LVIS file from NSIDC - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - netCDF4: Python interface to the netCDF C library - https://unidata.github.io/netcdf4-python/netCDF4/index.html - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - utilities.py: download and management utilities for syncing files - calc_delta_time.py: calculates difference between universal and dynamic time - compute_equilibrium_tide.py: calculates long-period equilibrium ocean tides - read_ATM1b_QFIT_binary.py: read ATM1b QFIT binary files (NSIDC version 1) - -UPDATE HISTORY: - Updated 07/2022: update imports of ATM1b QFIT functions to released version - place some imports within try/except statements - Updated 04/2022: include utf-8 encoding in reads to be windows compliant - use argparse descriptions within sphinx documentation - Updated 10/2021: using python logging for handling verbose output - using collections to store attributes in order of creation - Updated 07/2021: can use prefix files to define command line arguments - Updated 05/2021: modified import of ATM1b QFIT reader - Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: merged time conversion routines into module - Updated 10/2020: using argparse to set command line parameters - Updated 09/2020: output days since 1992-01-01 as time variable - Written 08/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import time -import logging -import argparse -import warnings -import collections -import numpy as np -import pyTMD.time -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -# attempt imports -try: - import ATM1b_QFIT.read_ATM1b_QFIT_binary -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("ATM1b_QFIT not available") -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: reading the number of file lines removing commented lines -def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): - # subset the data to indices if specified - if input_subsetter: - file_lines = len(input_subsetter) - elif HDF5: - # read the size of an input variable within a HDF5 file - with h5py.File(input_file,'r') as fileID: - file_lines, = fileID[HDF5].shape - elif QFIT: - # read the size of a QFIT binary file - file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) - else: - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - i = [i for i in f.readlines() if re.match(r'^(?!\#|\n)',i)] - file_lines = len(i) - # return the number of lines - return file_lines - -## PURPOSE: read the ATM Level-1b data file for variables of interest -def read_ATM_qfit_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - mission_flag = r'(BLATM1B|ILATM1B|ILNSA1B)' - regex_pattern = r'{0}_(\d+)_(\d+)(.*?).(qi|TXT|h5)'.format(mission_flag) - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # output python dictionary with variables - ATM_L1b_input = {} - # Version 1 of ATM QFIT files (ascii) - # output text file from qi2txt with proper filename format - # do not use the shortened output format from qi2txt - if (SFX == 'TXT'): - # compile regular expression operator for reading lines - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # create output variables with length equal to the number of lines - ATM_L1b_input['lat'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['lon'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['data'] = np.zeros_like(file_contents,dtype=np.float64) - hour = np.zeros_like(file_contents,dtype=np.float64) - minute = np.zeros_like(file_contents,dtype=np.float64) - second = np.zeros_like(file_contents,dtype=np.float64) - # for each line within the file - for i,line in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line) - ATM_L1b_input['lat'][i] = np.float64(line_contents[1]) - ATM_L1b_input['lon'][i] = np.float64(line_contents[2]) - ATM_L1b_input['data'][i] = np.float64(line_contents[3]) - hour[i] = np.float64(line_contents[-1][:2]) - minute[i] = np.float64(line_contents[-1][2:4]) - second[i] = np.float64(line_contents[-1][4:]) - # Version 1 of ATM QFIT files (binary) - elif (SFX == 'qi'): - # read input QFIT data file and subset if specified - fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,QFIT=True) - ATM_L1b_input['lat'] = fid['latitude'][:] - ATM_L1b_input['lon'] = fid['longitude'][:] - ATM_L1b_input['data'] = fid['elevation'][:] - time_hhmmss = fid['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # Version 2 of ATM QFIT files (HDF5) - elif (SFX == 'h5'): - # Open the HDF5 file for reading - fileID = h5py.File(os.path.expanduser(input_file), 'r') - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,HDF5='elevation') - # create output variables with length equal to input elevation - ATM_L1b_input['lat'] = fileID['latitude'][:] - ATM_L1b_input['lon'] = fileID['longitude'][:] - ATM_L1b_input['data'] = fileID['elevation'][:] - time_hhmmss = fileID['instrument_parameters']['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # close the input HDF5 file - fileID.close() - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - # calculation of Julian day taking into account leap seconds - # converting to J2000 seconds - ATM_L1b_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L1b_input.items(): - ATM_L1b_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L1b_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L1b_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L1b_input,file_lines,HEM - -# PURPOSE: read the ATM Level-2 data file for variables of interest -def read_ATM_icessn_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - regex_pattern=r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # input file column names for variables of interest with column indices - # variables not used: (SNslope:4, WEslope:5, npt_used:7, npt_edit:8, d:9) - file_dtype = {'seconds':0, 'lat':1, 'lon':2, 'data':3, 'RMS':6, 'track':-1} - # compile regular expression operator for reading lines (extracts numbers) - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # output python dictionary with variables - ATM_L2_input = {} - # create output variables with length equal to the number of file lines - for key in file_dtype.keys(): - ATM_L2_input[key] = np.zeros_like(file_contents, dtype=np.float64) - # for each line within the file - for line_number,line_entries in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line_entries) - # for each variable of interest: save to dinput as float - for key,val in file_dtype.items(): - ATM_L2_input[key][line_number] = np.float64(line_contents[val]) - # convert shot time (seconds of day) to J2000 - hour = np.floor(ATM_L2_input['seconds']/3600.0) - minute = np.floor((ATM_L2_input['seconds'] % 3600)/60.0) - second = ATM_L2_input['seconds'] % 60.0 - # First column in Pre-IceBridge and ICESSN Version 1 files is GPS time - if (MISSION == 'BLATM2') or (SFX != 'csv'): - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - else: - leap_seconds = 0.0 - # calculation of Julian day - # converting to J2000 seconds - ATM_L2_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # convert RMS from centimeters to meters - ATM_L2_input['error'] = ATM_L2_input['RMS']/100.0 - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L2_input.items(): - ATM_L2_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L2_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L2_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L2_input,file_lines,HEM - -# PURPOSE: read the LVIS Level-2 data file for variables of interest -def read_LVIS_HDF5_file(input_file, input_subsetter): - # LVIS region flags: GL for Greenland and AQ for Antarctica - lvis_flag = {'GL':'N','AQ':'S'} - # regular expression pattern for extracting parameters from HDF5 files - # computed in read_icebridge_lvis.py - mission_flag = '(BLVIS2|BVLIS2|ILVIS2|ILVGH2)' - regex_pattern = r'{0}_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5'.format(mission_flag) - # extract mission, region and other parameters from filename - MISSION,REGION,YY,MMDD,RLD,SS = re.findall(regex_pattern,input_file).pop() - LDS_VERSION = '2.0.2' if (int(RLD[1:3]) >= 18) else '1.04' - # input and output python dictionaries with variables - file_input = {} - LVIS_L2_input = {} - fileID = h5py.File(input_file,'r') - # create output variables with length equal to input shot number - file_lines = file_length(input_file,input_subsetter,HDF5='Shot_Number') - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS104.html - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS202.html - if (LDS_VERSION == '1.04'): - # elevation surfaces - file_input['elev'] = fileID['Elevation_Surfaces/Elevation_Centroid'][:] - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # latitude - file_input['lat'] = fileID['Geolocation/Latitude_Centroid'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon'] = fileID['Geolocation/Longitude_Centroid'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - elif (LDS_VERSION == '2.0.2'): - # elevation surfaces - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # heights above lowest detected mode - file_input['RH50'] = fileID['Waveform/RH50'][:] - file_input['RH100'] = fileID['Waveform/RH100'][:] - # calculate centroidal elevation using 50% of waveform energy - file_input['elev'] = file_input['elev_low'] + file_input['RH50'] - # latitude - file_input['lat_top'] = fileID['Geolocation/Latitude_Top'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon_top'] = fileID['Geolocation/Longitude_Top'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - # linearly interpolate latitude and longitude to RH50 - file_input['lat'] = file_input['lat_low'] + file_input['RH50'] * \ - (file_input['lat_top'] - file_input['lat_low'])/file_input['RH100'] - file_input['lon'] = file_input['lon_low'] + file_input['RH50'] * \ - (file_input['lon_top'] - file_input['lon_low'])/file_input['RH100'] - # J2000 seconds - LVIS_L2_input['time'] = fileID['Time/J2000'][:] - # close the input HDF5 file - fileID.close() - # output combined variables - LVIS_L2_input['data'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lon'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lat'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['error'] = np.zeros_like(file_input['elev'],dtype=np.float64) - # find where elev high is equal to elev low - # see note about using LVIS centroid elevation product - # http://lvis.gsfc.nasa.gov/OIBDataStructure.html - ii = np.nonzero(file_input['elev_low'] == file_input['elev_high']) - jj = np.nonzero(file_input['elev_low'] != file_input['elev_high']) - # where lowest point of waveform is equal to highest point --> - # using the elev_low elevation - LVIS_L2_input['data'][ii] = file_input['elev_low'][ii] - # for other locations use the centroid elevation - # as the centroid is a useful product over rough terrain - # when you are calculating ice volume change - LVIS_L2_input['data'][jj] = file_input['elev'][jj] - # latitude and longitude for each case - # elevation low == elevation high - LVIS_L2_input['lon'][ii] = file_input['lon_low'][ii] - LVIS_L2_input['lat'][ii] = file_input['lat_low'][ii] - # centroid elevations - LVIS_L2_input['lon'][jj] = file_input['lon'][jj] - LVIS_L2_input['lat'][jj] = file_input['lat'][jj] - # estimated uncertainty for both cases - LVIS_variance_low = (file_input['elev_low'] - file_input['elev'])**2 - LVIS_variance_high = (file_input['elev_high'] - file_input['elev'])**2 - LVIS_L2_input['error']=np.sqrt((LVIS_variance_low + LVIS_variance_high)/2.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in LVIS_L2_input.items(): - LVIS_L2_input[key] = val[input_subsetter] - # return the output variables - return LVIS_L2_input,file_lines,lvis_flag[REGION] - -# PURPOSE: read Operation IceBridge data from NSIDC -# compute long-period equilibrium tides at points and times -def compute_LPET_icebridge_data(arg, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # extract file name and subsetter indices lists - match_object = re.match(r'(.*?)(\[(.*?)\])?$',arg) - input_file = os.path.expanduser(match_object.group(1)) - # subset input file to indices - if match_object.group(2): - # decompress ranges and add to list - input_subsetter = [] - for i in re.findall(r'((\d+)-(\d+)|(\d+))',match_object.group(3)): - input_subsetter.append(int(i[3])) if i[3] else \ - input_subsetter.extend(range(int(i[1]),int(i[2])+1)) - else: - input_subsetter = None - - # output directory for input_file - DIRECTORY = os.path.dirname(input_file) - # calculate if input files are from ATM or LVIS (+GH) - regex = {} - regex['ATM'] = r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - regex['ATM1b'] = r'(BLATM1b|ILATM1b)_(\d+)_(\d+)(.*?).(qi|TXT|h5)$' - regex['LVIS'] = r'(BLVIS2|BVLIS2|ILVIS2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - regex['LVGH'] = r'(ILVGH2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - for key,val in regex.items(): - if re.match(val, os.path.basename(input_file)): - OIB = key - - # HDF5 file attributes - attrib = collections.OrderedDict() - # time - attrib['time'] = {} - attrib['time']['long_name'] = 'Time' - attrib['time']['units'] = 'days since 1992-01-01T00:00:00' - attrib['time']['standard_name'] = 'time' - attrib['time']['calendar'] = 'standard' - # latitude - attrib['lat'] = {} - attrib['lat']['long_name'] = 'Latitude_of_measurement' - attrib['lat']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lat']['units'] = 'Degrees_North' - # longitude - attrib['lon'] = {} - attrib['lon']['long_name'] = 'Longitude_of_measurement' - attrib['lon']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lon']['units'] = 'Degrees_East' - # long-period equilibrium tides - attrib['tide_lpe'] = {} - attrib['tide_lpe']['long_name'] = 'Equilibrium_Tide' - attrib['tide_lpe']['description'] = ('Long-period_equilibrium_tidal_elevation_' - 'from_the_summation_of_fifteen_tidal_spectral_lines_at_the_measurement_' - 'position_at_the_acquisition_time') - attrib['tide_lpe']['reference'] = ('https://doi.org/10.1111/' - 'j.1365-246X.1973.tb03420.x') - attrib['tide_lpe']['units'] = 'meters' - - # extract information from first input file - # acquisition year, month and day - # number of points - # instrument (PRE-OIB ATM or LVIS, OIB ATM or LVIS) - if OIB in ('ATM','ATM1b'): - M1,YYMMDD1,HHMMSS1,AX1,SF1 = re.findall(regex[OIB], input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD1) == 6): - ypre,MM1,DD1 = YYMMDD1[:2],YYMMDD1[2:4],YYMMDD1[4:] - if (np.float64(ypre) >= 90): - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 1900.0) - else: - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 2000.0) - elif (len(YYMMDD1) == 8): - YY1,MM1,DD1 = YYMMDD1[:4],YYMMDD1[4:6],YYMMDD1[6:] - elif OIB in ('LVIS','LVGH'): - M1,RG1,YY1,MMDD1,RLD1,SS1 = re.findall(regex[OIB], input_file).pop() - MM1,DD1 = MMDD1[:2],MMDD1[2:] - - # read data from input_file - logger.info('{0} -->'.format(input_file)) - if (OIB == 'ATM'): - # load IceBridge ATM data from input_file - dinput,file_lines,HEM = read_ATM_icessn_file(input_file,input_subsetter) - elif (OIB == 'ATM1b'): - # load IceBridge Level-1b ATM data from input_file - dinput,file_lines,HEM = read_ATM_qfit_file(input_file,input_subsetter) - elif OIB in ('LVIS','LVGH'): - # load IceBridge LVIS data from input_file - dinput,file_lines,HEM = read_LVIS_HDF5_file(input_file,input_subsetter) - - # convert time from J2000 to days relative to Jan 1, 1992 (48622mjd) - # J2000: seconds since 2000-01-01 12:00:00 UTC - tide_time = pyTMD.time.convert_delta_time(dinput['time'], - epoch1=(2000,1,1,12,0,0), epoch2=(1992,1,1,0,0,0), - scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = calc_delta_time(delta_file, tide_time) - - # output tidal HDF5 file - # form: rg_NASA_model_EQUILIBRIUM_TIDES_WGS84_fl1yyyymmddjjjjj.H5 - # where rg is the hemisphere flag (GR or AN) for the region - # fl1 and fl2 are the data flags (ATM, LVIS, GLAS) - # yymmddjjjjj is the year, month, day and second of the input file - # output region flags: GR for Greenland and AN for Antarctica - hem_flag = {'N':'GR','S':'AN'} - # use starting second to distinguish between files for the day - JJ1 = np.min(dinput['time']) % 86400 - # output file format - file_format = '{0}_NASA_EQUILIBRIUM_TIDES_WGS84_{1}{2}{3}{4}{5:05.0f}.H5' - FILENAME = file_format.format(hem_flag[HEM],OIB,YY1,MM1,DD1,JJ1) - # print file information - logger.info('\t{0}'.format(FILENAME)) - - # open output HDF5 file - fid = h5py.File(os.path.join(DIRECTORY,FILENAME), 'w') - - # predict long-period equilibrium tides at time - dinput['tide_lpe'] = compute_equilibrium_tide(tide_time + deltat, - dinput['lat']) - - # output dictionary with HDF5 variables - h5 = {} - # add variables to output file - for key,attributes in attrib.items(): - # Defining the HDF5 dataset variables for lat/lon - h5[key] = fid.create_dataset(key, (file_lines,), - data=dinput[key][:], dtype=dinput[key].dtype, - compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in attributes.items(): - h5[key].attrs[att_name] = att_val - # attach dimensions - if key not in ('time',): - for i,dim in enumerate(['time']): - h5[key].dims[i].label = 'RECORD_SIZE' - h5[key].dims[i].attach_scale(h5[dim]) - - # HDF5 file attributes - fid.attrs['featureType'] = 'trajectory' - fid.attrs['title'] = ('Long-Period_Equilibrium_tidal_correction_for_' - 'elevation_measurements') - fid.attrs['summary'] = ('Tidal_correction_computed_at_elevation_' - 'measurements_using_fifteen_spectral_lines.') - fid.attrs['project'] = 'NASA_Operation_IceBridge' - fid.attrs['processing_level'] = '4' - fid.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - # add attributes for input file - fid.attrs['elevation_file'] = os.path.basename(input_file) - # add geospatial and temporal attributes - fid.attrs['geospatial_lat_min'] = dinput['lat'].min() - fid.attrs['geospatial_lat_max'] = dinput['lat'].max() - fid.attrs['geospatial_lon_min'] = dinput['lon'].min() - fid.attrs['geospatial_lon_max'] = dinput['lon'].max() - fid.attrs['geospatial_lat_units'] = "degrees_north" - fid.attrs['geospatial_lon_units'] = "degrees_east" - fid.attrs['geospatial_ellipsoid'] = "WGS84" - fid.attrs['time_type'] = 'UTC' - # convert start/end time from days since 1992-01-01 into Julian days - time_range = np.array([np.min(tide_time),np.max(tide_time)]) - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(time_range, - epoch1=(1992,1,1,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0) - # convert to calendar date - cal = pyTMD.time.convert_julian(time_julian,astype=int) - # add attributes with measurement date start, end and duration - args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) - fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['hour'][-1],cal['minute'][-1],cal['second'][-1]) - fid.attrs['RangeEndingTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['year'][0],cal['month'][0],cal['day'][0]) - fid.attrs['RangeBeginningDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - args = (cal['year'][-1],cal['month'][-1],cal['day'][-1]) - fid.attrs['RangeEndingDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - duration = np.round(time_julian[-1]*86400.0 - time_julian[0]*86400.0) - fid.attrs['DurationTimeSeconds'] = '{0:0.0f}'.format(duration) - # close the output HDF5 dataset - fid.close() - # change the permissions level to MODE - os.chmod(os.path.join(DIRECTORY,FILENAME), MODE) - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates long-period equilibrium tidal elevations for - correcting Operation IceBridge elevation data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line options - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='Input Operation IceBridge file to run') - # verbosity settings - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of output file') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input file - for arg in args.infile: - compute_LPET_icebridge_data(arg, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPT_ICESat_GLA12.py b/scripts/compute_LPT_ICESat_GLA12.py deleted file mode 100644 index 785b5cef..00000000 --- a/scripts/compute_LPT_ICESat_GLA12.py +++ /dev/null @@ -1,438 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPT_ICESat_GLA12.py -Written by Tyler Sutterley (07/2022) -Calculates radial load pole tide displacements for correcting ICESat/GLAS - L2 GLA12 Antarctic and Greenland Ice Sheet elevation data following - IERS Convention (2010) guidelines - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - spatial.py: utilities for reading, writing and operating on spatial data - utilities.py: download and management utilities for syncing files - iers_mean_pole.py: provides the angular coordinates of IERS Mean Pole - read_iers_EOP.py: read daily earth orientation parameters from IERS - -REFERENCES: - S Desai, "Observing the pole tide with satellite altimetry", Journal of - Geophysical Research: Oceans, 107(C11), 2002. doi: 10.1029/2001JC001224 - S Desai, J Wahr and B Beckley "Revisiting the pole tide for and from - satellite altimetry", Journal of Geodesy, 89(12), p1233-1243, 2015. - doi: 10.1007/s00190-015-0848-7 - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use argparse descriptions within documentation - Updated 02/2022: save ICESat campaign attribute to output file - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named GLA12 file as input - Updated 03/2021: use cartesian coordinate conversion routine in spatial - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 12/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import warnings -import numpy as np -import scipy.interpolate -import pyTMD.time -import pyTMD.spatial -import pyTMD.utilities -from pyTMD.iers_mean_pole import iers_mean_pole -from pyTMD.read_iers_EOP import read_iers_EOP -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat ice sheet HDF5 elevation data (GLAH12) from NSIDC -# compute load pole tide radial displacements at points and times -def compute_LPT_ICESat(FILE, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get directory from FILE - logger.info('{0} -->'.format(FILE)) - DIRECTORY = os.path.dirname(FILE) - - # compile regular expression operator for extracting information from file - rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_' - r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE) - # extract parameters from ICESat/GLAS HDF5 file name - # PRD: Product number (01, 05, 06, 12, 13, 14, or 15) - # RL: Release number for process that created the product = 634 - # RGTP: Repeat ground-track phase (1=8-day, 2=91-day, 3=transfer orbit) - # ORB: Reference orbit number (starts at 1 and increments each time a - # new reference orbit ground track file is obtained.) - # INST: Instance number (increments every time the satellite enters a - # different reference orbit) - # CYCL: Cycle of reference orbit for this phase - # TRK: Track within reference orbit - # SEG: Segment of orbit - # GRAN: Granule version number - # TYPE: File type - try: - PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE = rx.findall(FILE).pop() - except: - # output load pole tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'LPT',fileExtension) - else: - # output load pole tide HDF5 file for NSIDC granules - args = (PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE) - file_format = 'GLAH{0}_{1}_LPT_{2}{3}{4}_{5}_{6}_{7}_{8}_{9}.h5' - OUTPUT_FILE = file_format.format(*args) - - # read GLAH12 HDF5 file - fileID = h5py.File(FILE,'r') - n_40HZ, = fileID['Data_40HZ']['Time']['i_rec_ndx'].shape - # get variables and attributes - rec_ndx_40HZ = fileID['Data_40HZ']['Time']['i_rec_ndx'][:].copy() - # seconds since 2000-01-01 12:00:00 UTC (J2000) - DS_UTCTime_40HZ = fileID['Data_40HZ']['DS_UTCTime_40'][:].copy() - # Latitude (degrees North) - lat_TPX = fileID['Data_40HZ']['Geolocation']['d_lat'][:].copy() - # Longitude (degrees East) - lon_40HZ = fileID['Data_40HZ']['Geolocation']['d_lon'][:].copy() - # Elevation (height above TOPEX/Poseidon ellipsoid in meters) - elev_TPX = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'][:].copy() - fv = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'].attrs['_FillValue'] - - # convert time from UTC time of day to Modified Julian Days (MJD) - # J2000: seconds since 2000-01-01 12:00:00 UTC - t = DS_UTCTime_40HZ[:]/86400.0 + 51544.5 - # convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') - # convert calendar dates into year decimal - tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, - hour=HH,minute=MN,second=SS) - - # semimajor axis (a) and flattening (f) for TP and WGS84 ellipsoids - atop,ftop = (6378136.3,1.0/298.257) - awgs,fwgs = (6378137.0,1.0/298.257223563) - # convert from Topex/Poseidon to WGS84 Ellipsoids - lat_40HZ,elev_40HZ = pyTMD.spatial.convert_ellipsoid(lat_TPX, elev_TPX, - atop, ftop, awgs, fwgs, eps=1e-12, itmax=10) - - # degrees to radians - dtr = np.pi/180.0 - atr = np.pi/648000.0 - # earth and physical parameters (IERS and WGS84) - G = 6.67428e-11# universal constant of gravitation [m^3/(kg*s^2)] - GM = 3.986004418e14# geocentric gravitational constant [m^3/s^2] - ge = 9.7803278# mean equatorial gravity [m/s^2] - a_axis = 6378136.6# semimajor axis of the WGS84 ellipsoid [m] - flat = 1.0/298.257223563# flattening of the WGS84 ellipsoid - b_axis = (1.0 -flat)*a_axis# semiminor axis of the WGS84 ellipsoid [m] - omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] - # tidal love number appropriate for the load tide - hb2 = 0.6207 - # Linear eccentricity, first and second numerical eccentricity - lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2) - ecc1 = lin_ecc/a_axis - ecc2 = lin_ecc/b_axis - # m parameter [omega^2*a^2*b/(GM)]. p. 70, Eqn.(2-137) - m = omega**2*((1 -flat)*a_axis**3)/GM - # flattening components - f_2 = -flat + (5.0/2.0)*m + (1.0/2.0)*flat**2.0 - (26.0/7.0)*flat*m + \ - (15.0/4.0)*m**2.0 - f_4 = -(1.0/2.0)*flat**2.0 + (5.0/2.0)*flat*m - - # convert from geodetic latitude to geocentric latitude - # calculate X, Y and Z from geodetic latitude and longitude - X,Y,Z = pyTMD.spatial.to_cartesian(lon_40HZ,lat_40HZ,h=elev_40HZ, - a_axis=a_axis,flat=flat) - rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0) - # calculate geocentric latitude and convert to degrees - latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0))/dtr - # colatitude and longitude in radians - theta = dtr*(90.0 - latitude_geocentric) - phi = lon_40HZ*dtr - - # compute normal gravity at spatial location and elevation of points. - # normal gravity at the equator. p. 79, Eqn.(2-186) - gamma_a = (GM/(a_axis*b_axis)) * (1.0-(3.0/2.0)*m - (3.0/14.0)*ecc2**2.0*m) - # Normal gravity. p. 80, Eqn.(2-199) - gamma_0 = gamma_a*(1.0 + f_2*np.cos(theta)**2.0 + - f_4*np.sin(np.pi*latitude_geocentric/180.0)**4.0) - # Normal gravity at height h. p. 82, Eqn.(2-215) - gamma_h = gamma_0*(1.0 - - (2.0/a_axis)*(1.0+flat+m-2.0*flat*np.cos(theta)**2.0)*elev_40HZ + \ - (3.0/a_axis**2.0)*elev_40HZ**2.0) - - # pole tide files (mean and daily) - mean_pole_file = pyTMD.utilities.get_data_path(['data','mean-pole.tab']) - pole_tide_file = pyTMD.utilities.get_data_path(['data','finals.all']) - # read IERS daily polar motion values - EOP = read_iers_EOP(pole_tide_file) - # create cubic spline interpolations of daily polar motion values - xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['x'],k=3,s=0) - ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['y'],k=3,s=0) - - # calculate angular coordinates of mean pole at time tdec - mpx,mpy,fl = iers_mean_pole(mean_pole_file,tdec,'2015') - # interpolate daily polar motion values to time using cubic splines - px = xSPL(t) - py = ySPL(t) - # calculate differentials from mean pole positions - mx = px - mpx - my = -(py - mpy) - # calculate radial displacement at time - dfactor = -hb2*atr*(omega**2*rr**2)/(2.0*gamma_h) - Srad = np.ma.zeros((n_40HZ),fill_value=fv) - Srad.data[:] = dfactor*np.sin(2.0*theta)*(mx*np.cos(phi) + my*np.sin(phi)) - # replace fill values - Srad.mask = np.isnan(Srad.data) - Srad.data[Srad.mask] = Srad.fill_value - - # copy variables for outputting to HDF5 file - IS_gla12_tide = dict(Data_40HZ={}) - IS_gla12_fill = dict(Data_40HZ={}) - IS_gla12_tide_attrs = dict(Data_40HZ={}) - - # copy global file attributes - global_attribute_list = ['featureType','title','comment','summary','license', - 'references','AccessConstraints','CitationforExternalPublication', - 'contributor_role','contributor_name','creator_name','creator_email', - 'publisher_name','publisher_email','publisher_url','platform','instrument', - 'processing_level','date_created','spatial_coverage_type','history', - 'keywords','keywords_vocabulary','naming_authority','project','time_type', - 'date_type','time_coverage_start','time_coverage_end', - 'time_coverage_duration','source','HDFVersion','identifier_product_type', - 'identifier_product_format_version','Conventions','institution', - 'ReprocessingPlanned','ReprocessingActual','LocalGranuleID', - 'ProductionDateTime','LocalVersionID','PGEVersion','OrbitNumber', - 'StartOrbitNumber','StopOrbitNumber','EquatorCrossingLongitude', - 'EquatorCrossingTime','EquatorCrossingDate','ShortName','VersionID', - 'InputPointer','RangeBeginningTime','RangeEndingTime','RangeBeginningDate', - 'RangeEndingDate','PercentGroundHit','OrbitQuality','Cycle','Track', - 'Instrument_State','Timing_Bias','ReferenceOrbit','SP_ICE_PATH_NO', - 'SP_ICE_GLAS_StartBlock','SP_ICE_GLAS_EndBlock','Instance','Range_Bias', - 'Instrument_State_Date','Instrument_State_Time','Range_Bias_Date', - 'Range_Bias_Time','Timing_Bias_Date','Timing_Bias_Time', - 'identifier_product_doi','identifier_file_uuid', - 'identifier_product_doi_authority'] - for att in global_attribute_list: - IS_gla12_tide_attrs[att] = fileID.attrs[att] - # copy ICESat campaign name from ancillary data - IS_gla12_tide_attrs['Campaign'] = fileID['ANCILLARY_DATA'].attrs['Campaign'] - - # add attributes for input GLA12 file - IS_gla12_tide_attrs['input_files'] = os.path.basename(FILE) - # update geospatial ranges for ellipsoid - IS_gla12_tide_attrs['geospatial_lat_min'] = np.min(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lat_max'] = np.max(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lon_min'] = np.min(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lon_max'] = np.max(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lat_units'] = "degrees_north" - IS_gla12_tide_attrs['geospatial_lon_units'] = "degrees_east" - IS_gla12_tide_attrs['geospatial_ellipsoid'] = "WGS84" - - # copy 40Hz group attributes - for att_name,att_val in fileID['Data_40HZ'].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][att_name] = att_val - # copy attributes for time, geolocation and geophysical groups - for var in ['Time','Geolocation','Geophysical']: - IS_gla12_tide['Data_40HZ'][var] = {} - IS_gla12_fill['Data_40HZ'][var] = {} - IS_gla12_tide_attrs['Data_40HZ'][var] = {} - for att_name,att_val in fileID['Data_40HZ'][var].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][var][att_name] = att_val - - # J2000 time - IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] = DS_UTCTime_40HZ - IS_gla12_fill['Data_40HZ']['DS_UTCTime_40'] = None - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'] = {} - for att_name,att_val in fileID['Data_40HZ']['DS_UTCTime_40'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'][att_name] = att_val - # record - IS_gla12_tide['Data_40HZ']['Time']['i_rec_ndx'] = rec_ndx_40HZ - IS_gla12_fill['Data_40HZ']['Time']['i_rec_ndx'] = None - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'] = {} - for att_name,att_val in fileID['Data_40HZ']['Time']['i_rec_ndx'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'][att_name] = att_val - # latitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lat'] = lat_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lat'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'] = {} - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lat'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'][att_name] = att_val - # longitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lon'] = lon_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lon'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'] = {} - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lon'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'][att_name] = att_val - - # geophysical variables - # computed Solid Earth load pole tide - IS_gla12_tide['Data_40HZ']['Geophysical']['d_poElv'] = Srad - IS_gla12_fill['Data_40HZ']['Geophysical']['d_poElv'] = Srad.fill_value - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv'] = {} - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['units'] = "meters" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['long_name'] = \ - "Solid Earth Pole Tide" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['description'] = ("Solid " - "Earth pole tide radial displacements due to polar motion") - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['reference'] = \ - 'ftp://tai.bipm.org/iers/conv2010/chapter7/tn36_c7.pdf' - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_poElv']['coordinates'] = \ - "../DS_UTCTime_40" - - # close the input HDF5 file - fileID.close() - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_tide_attrs, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE), - FILL_VALUE=IS_gla12_fill, CLOBBER=True) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat data to HDF5 -def HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_attrs, - FILENAME='', FILL_VALUE=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - # create 40HZ HDF5 records - h5 = dict(Data_40HZ={}) - - # add HDF5 file attributes - attrs = {a:v for a,v in IS_gla12_attrs.items() if not isinstance(v,dict)} - for att_name,att_val in attrs.items(): - fileID.attrs[att_name] = att_val - - # create Data_40HZ group - fileID.create_group('Data_40HZ') - # add HDF5 40HZ group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'].items(): - if att_name not in ('DS_UTCTime_40',) and not isinstance(att_val,dict): - fileID['Data_40HZ'].attrs[att_name] = att_val - - # add 40HZ time variable - val = IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] - attrs = IS_gla12_attrs['Data_40HZ']['DS_UTCTime_40'] - # Defining the HDF5 dataset variables - var = '{0}/{1}'.format('Data_40HZ','DS_UTCTime_40') - h5['Data_40HZ']['DS_UTCTime_40'] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, compression='gzip') - # make dimension - h5['Data_40HZ']['DS_UTCTime_40'].make_scale('DS_UTCTime_40') - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ']['DS_UTCTime_40'].attrs[att_name] = att_val - - # for each variable group - for group in ['Time','Geolocation','Geophysical']: - # add group to dict - h5['Data_40HZ'][group] = {} - # create Data_40HZ group - fileID.create_group('Data_40HZ/{0}'.format(group)) - # add HDF5 group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'][group].items(): - if not isinstance(att_val,dict): - fileID['Data_40HZ'][group].attrs[att_name] = att_val - # for each variable in the group - for key,val in IS_gla12_tide['Data_40HZ'][group].items(): - fillvalue = FILL_VALUE['Data_40HZ'][group][key] - attrs = IS_gla12_attrs['Data_40HZ'][group][key] - # Defining the HDF5 dataset variables - var = '{0}/{1}/{2}'.format('Data_40HZ',group,key) - # use variable compression if containing fill values - if fillvalue: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - fillvalue=fillvalue, compression='gzip') - else: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - compression='gzip') - # attach dimensions - for i,dim in enumerate(['DS_UTCTime_40']): - h5['Data_40HZ'][group][key].dims[i].attach_scale( - h5['Data_40HZ'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ'][group][key].attrs[att_name] = att_val - - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates radial load pole tide displacements for - correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet - elevation data following IERS Convention (2010) guidelines - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat GLA12 file to run') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input GLA12 file - for FILE in args.infile: - compute_LPT_ICESat(FILE, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_LPT_icebridge_data.py b/scripts/compute_LPT_icebridge_data.py deleted file mode 100644 index fc5b0956..00000000 --- a/scripts/compute_LPT_icebridge_data.py +++ /dev/null @@ -1,675 +0,0 @@ -#!/usr/bin/env python -u""" -compute_LPT_icebridge_data.py -Written by Tyler Sutterley (07/2022) -Calculates load pole tide displacements for correcting Operation IceBridge - elevation data following IERS Convention (2010) guidelines - http://maia.usno.navy.mil/conventions/2010officialinfo.php - http://maia.usno.navy.mil/conventions/chapter7.php - -INPUTS: - ATM1B, ATM icessn or LVIS file from NSIDC - -COMMAND LINE OPTIONS: - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - spatial.py: utilities for reading, writing and operating on spatial data - utilities.py: download and management utilities for syncing files - iers_mean_pole.py: provides the angular coordinates of IERS Mean Pole - read_iers_EOP.py: read daily earth orientation parameters from IERS - read_ATM1b_QFIT_binary.py: read ATM1b QFIT binary files (NSIDC version 1) - -UPDATE HISTORY: - Updated 07/2022: update imports of ATM1b QFIT functions to released version - place some imports within try/except statements - Updated 04/2022: include utf-8 encoding in reads to be windows compliant - use argparse descriptions within sphinx documentation - Updated 10/2021: using python logging for handling verbose output - using collections to store attributes in order of creation - Updated 07/2021: can use prefix files to define command line arguments - Updated 05/2021: modified import of ATM1b QFIT reader - Updated 03/2021: use cartesian coordinate conversion routine in spatial - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: merged time conversion routines into module - Updated 11/2020: use internal mean pole and finals EOP files - Updated 10/2020: using argparse to set command line parameters - Updated 09/2020: output modified julian days as time variable - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 03/2020: use read_ATM1b_QFIT_binary from repository - Updated 02/2019: using range for python3 compatibility - Updated 10/2018: updated GPS time calculation for calculating leap seconds - Written 06/2018 -""" -from __future__ import print_function - -import sys -import os -import re -import time -import logging -import argparse -import warnings -import collections -import numpy as np -import pyTMD.time -import pyTMD.spatial -import pyTMD.utilities -import scipy.interpolate -from pyTMD.iers_mean_pole import iers_mean_pole -from pyTMD.read_iers_EOP import read_iers_EOP -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - import ATM1b_QFIT.read_ATM1b_QFIT_binary -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("ATM1b_QFIT not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: reading the number of file lines removing commented lines -def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): - # subset the data to indices if specified - if input_subsetter: - file_lines = len(input_subsetter) - elif HDF5: - # read the size of an input variable within a HDF5 file - with h5py.File(input_file,'r') as fileID: - file_lines, = fileID[HDF5].shape - elif QFIT: - # read the size of a QFIT binary file - file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) - else: - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - i = [i for i in f.readlines() if re.match(r'^(?!\#|\n)',i)] - file_lines = len(i) - # return the number of lines - return file_lines - -# PURPOSE: read the ATM Level-1b data file for variables of interest -def read_ATM_qfit_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - mission_flag = r'(BLATM1B|ILATM1B|ILNSA1B)' - regex_pattern = r'{0}_(\d+)_(\d+)(.*?).(qi|TXT|h5)'.format(mission_flag) - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # output python dictionary with variables - ATM_L1b_input = {} - # Version 1 of ATM QFIT files (ascii) - # output text file from qi2txt with proper filename format - # do not use the shortened output format from qi2txt - if (SFX == 'TXT'): - # compile regular expression operator for reading lines - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # create output variables with length equal to the number of lines - ATM_L1b_input['lat'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['lon'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['data'] = np.zeros_like(file_contents,dtype=np.float64) - hour = np.zeros_like(file_contents,dtype=np.float64) - minute = np.zeros_like(file_contents,dtype=np.float64) - second = np.zeros_like(file_contents,dtype=np.float64) - # for each line within the file - for i,line in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line) - ATM_L1b_input['lat'][i] = np.float64(line_contents[1]) - ATM_L1b_input['lon'][i] = np.float64(line_contents[2]) - ATM_L1b_input['data'][i] = np.float64(line_contents[3]) - hour[i] = np.float64(line_contents[-1][:2]) - minute[i] = np.float64(line_contents[-1][2:4]) - second[i] = np.float64(line_contents[-1][4:]) - # Version 1 of ATM QFIT files (binary) - elif (SFX == 'qi'): - # read input QFIT data file and subset if specified - fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,QFIT=True) - ATM_L1b_input['lat'] = fid['latitude'][:] - ATM_L1b_input['lon'] = fid['longitude'][:] - ATM_L1b_input['data'] = fid['elevation'][:] - time_hhmmss = fid['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # Version 2 of ATM QFIT files (HDF5) - elif (SFX == 'h5'): - # Open the HDF5 file for reading - fileID = h5py.File(os.path.expanduser(input_file), 'r') - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,HDF5='elevation') - # create output variables with length equal to input elevation - ATM_L1b_input['lat'] = fileID['latitude'][:] - ATM_L1b_input['lon'] = fileID['longitude'][:] - ATM_L1b_input['data'] = fileID['elevation'][:] - time_hhmmss = fileID['instrument_parameters']['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # close the input HDF5 file - fileID.close() - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - # calculation of Julian day taking into account leap seconds - # converting to J2000 seconds - ATM_L1b_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L1b_input.items(): - ATM_L1b_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L1b_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L1b_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L1b_input,file_lines,HEM - -# PURPOSE: read the ATM Level-2 data file for variables of interest -def read_ATM_icessn_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - regex_pattern=r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # input file column names for variables of interest with column indices - # variables not used: (SNslope:4, WEslope:5, npt_used:7, npt_edit:8, d:9) - file_dtype = {'seconds':0, 'lat':1, 'lon':2, 'data':3, 'RMS':6, 'track':-1} - # compile regular expression operator for reading lines (extracts numbers) - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # output python dictionary with variables - ATM_L2_input = {} - # create output variables with length equal to the number of file lines - for key in file_dtype.keys(): - ATM_L2_input[key] = np.zeros_like(file_contents, dtype=np.float64) - # for each line within the file - for line_number,line_entries in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line_entries) - # for each variable of interest: save to dinput as float - for key,val in file_dtype.items(): - ATM_L2_input[key][line_number] = np.float64(line_contents[val]) - # convert shot time (seconds of day) to J2000 - hour = np.floor(ATM_L2_input['seconds']/3600.0) - minute = np.floor((ATM_L2_input['seconds'] % 3600)/60.0) - second = ATM_L2_input['seconds'] % 60.0 - # First column in Pre-IceBridge and ICESSN Version 1 files is GPS time - if (MISSION == 'BLATM2') or (SFX != 'csv'): - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - else: - leap_seconds = 0.0 - # calculation of Julian day - # converting to J2000 seconds - ATM_L2_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # convert RMS from centimeters to meters - ATM_L2_input['error'] = ATM_L2_input['RMS']/100.0 - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L2_input.items(): - ATM_L2_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L2_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L2_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L2_input,file_lines,HEM - -# PURPOSE: read the LVIS Level-2 data file for variables of interest -def read_LVIS_HDF5_file(input_file, input_subsetter): - # LVIS region flags: GL for Greenland and AQ for Antarctica - lvis_flag = {'GL':'N','AQ':'S'} - # regular expression pattern for extracting parameters from HDF5 files - # computed in read_icebridge_lvis.py - mission_flag = '(BLVIS2|BVLIS2|ILVIS2|ILVGH2)' - regex_pattern = r'{0}_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5'.format(mission_flag) - # extract mission, region and other parameters from filename - MISSION,REGION,YY,MMDD,RLD,SS = re.findall(regex_pattern,input_file).pop() - LDS_VERSION = '2.0.2' if (int(RLD[1:3]) >= 18) else '1.04' - # input and output python dictionaries with variables - file_input = {} - LVIS_L2_input = {} - fileID = h5py.File(input_file,'r') - # create output variables with length equal to input shot number - file_lines = file_length(input_file,input_subsetter,HDF5='Shot_Number') - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS104.html - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS202.html - if (LDS_VERSION == '1.04'): - # elevation surfaces - file_input['elev'] = fileID['Elevation_Surfaces/Elevation_Centroid'][:] - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # latitude - file_input['lat'] = fileID['Geolocation/Latitude_Centroid'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon'] = fileID['Geolocation/Longitude_Centroid'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - elif (LDS_VERSION == '2.0.2'): - # elevation surfaces - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # heights above lowest detected mode - file_input['RH50'] = fileID['Waveform/RH50'][:] - file_input['RH100'] = fileID['Waveform/RH100'][:] - # calculate centroidal elevation using 50% of waveform energy - file_input['elev'] = file_input['elev_low'] + file_input['RH50'] - # latitude - file_input['lat_top'] = fileID['Geolocation/Latitude_Top'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon_top'] = fileID['Geolocation/Longitude_Top'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - # linearly interpolate latitude and longitude to RH50 - file_input['lat'] = file_input['lat_low'] + file_input['RH50'] * \ - (file_input['lat_top'] - file_input['lat_low'])/file_input['RH100'] - file_input['lon'] = file_input['lon_low'] + file_input['RH50'] * \ - (file_input['lon_top'] - file_input['lon_low'])/file_input['RH100'] - # J2000 seconds - LVIS_L2_input['time'] = fileID['Time/J2000'][:] - # close the input HDF5 file - fileID.close() - # output combined variables - LVIS_L2_input['data'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lon'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lat'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['error'] = np.zeros_like(file_input['elev'],dtype=np.float64) - # find where elev high is equal to elev low - # see note about using LVIS centroid elevation product - # http://lvis.gsfc.nasa.gov/OIBDataStructure.html - ii = np.nonzero(file_input['elev_low'] == file_input['elev_high']) - jj = np.nonzero(file_input['elev_low'] != file_input['elev_high']) - # where lowest point of waveform is equal to highest point --> - # using the elev_low elevation - LVIS_L2_input['data'][ii] = file_input['elev_low'][ii] - # for other locations use the centroid elevation - # as the centroid is a useful product over rough terrain - # when you are calculating ice volume change - LVIS_L2_input['data'][jj] = file_input['elev'][jj] - # latitude and longitude for each case - # elevation low == elevation high - LVIS_L2_input['lon'][ii] = file_input['lon_low'][ii] - LVIS_L2_input['lat'][ii] = file_input['lat_low'][ii] - # centroid elevations - LVIS_L2_input['lon'][jj] = file_input['lon'][jj] - LVIS_L2_input['lat'][jj] = file_input['lat'][jj] - # estimated uncertainty for both cases - LVIS_variance_low = (file_input['elev_low'] - file_input['elev'])**2 - LVIS_variance_high = (file_input['elev_high'] - file_input['elev'])**2 - LVIS_L2_input['error']=np.sqrt((LVIS_variance_low + LVIS_variance_high)/2.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in LVIS_L2_input.items(): - LVIS_L2_input[key] = val[input_subsetter] - # return the output variables - return LVIS_L2_input,file_lines,lvis_flag[REGION] - -# PURPOSE: read Operation IceBridge data from NSIDC -# compute load pole tide radial displacements at data points and times -def compute_LPT_icebridge_data(arg, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # extract file name and subsetter indices lists - match_object = re.match(r'(.*?)(\[(.*?)\])?$',arg) - input_file = os.path.expanduser(match_object.group(1)) - # subset input file to indices - if match_object.group(2): - # decompress ranges and add to list - input_subsetter = [] - for i in re.findall(r'((\d+)-(\d+)|(\d+))',match_object.group(3)): - input_subsetter.append(int(i[3])) if i[3] else \ - input_subsetter.extend(range(int(i[1]),int(i[2])+1)) - else: - input_subsetter = None - - # output directory for input_file - DIRECTORY = os.path.dirname(input_file) - # calculate if input files are from ATM or LVIS (+GH) - regex = {} - regex['ATM'] = r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - regex['ATM1b'] = r'(BLATM1b|ILATM1b)_(\d+)_(\d+)(.*?).(qi|TXT|h5)$' - regex['LVIS'] = r'(BLVIS2|BVLIS2|ILVIS2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - regex['LVGH'] = r'(ILVGH2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - for key,val in regex.items(): - if re.match(val, os.path.basename(input_file)): - OIB = key - - # HDF5 file attributes - attrib = collections.OrderedDict() - # Modified Julian Days - attrib['time'] = {} - attrib['time']['long_name'] = 'Time' - attrib['time']['units'] = 'days since 1858-11-17T00:00:00' - attrib['time']['description'] = 'Modified Julian Days' - attrib['time']['standard_name'] = 'time' - attrib['time']['calendar'] = 'standard' - # latitude - attrib['lat'] = {} - attrib['lat']['long_name'] = 'Latitude_of_measurement' - attrib['lat']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lat']['units'] = 'Degrees_North' - # longitude - attrib['lon'] = {} - attrib['lon']['long_name'] = 'Longitude_of_measurement' - attrib['lon']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lon']['units'] = 'Degrees_East' - # load pole tides - attrib['tide_pole'] = {} - attrib['tide_pole']['long_name'] = 'Solid_Earth_Pole_Tide' - attrib['tide_pole']['description'] = ('Solid_Earth_pole_tide_radial_' - 'displacements_at_the_measurement_position_at_the_acquisition_' - 'time_due_to_polar_motion') - attrib['tide_pole']['reference'] = ('ftp://tai.bipm.org/iers/conv2010/' - 'chapter7/tn36_c7.pdf') - attrib['tide_pole']['units'] = 'meters' - - # extract information from first input file - # acquisition year, month and day - # number of points - # instrument (PRE-OIB ATM or LVIS, OIB ATM or LVIS) - if OIB in ('ATM','ATM1b'): - M1,YYMMDD1,HHMMSS1,AX1,SF1 = re.findall(regex[OIB], input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD1) == 6): - ypre,MM1,DD1 = YYMMDD1[:2],YYMMDD1[2:4],YYMMDD1[4:] - if (np.float64(ypre) >= 90): - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 1900.0) - else: - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 2000.0) - elif (len(YYMMDD1) == 8): - YY1,MM1,DD1 = YYMMDD1[:4],YYMMDD1[4:6],YYMMDD1[6:] - elif OIB in ('LVIS','LVGH'): - M1,RG1,YY1,MMDD1,RLD1,SS1 = re.findall(regex[OIB], input_file).pop() - MM1,DD1 = MMDD1[:2],MMDD1[2:] - - # read data from input_file - logger.info('{0} -->'.format(input_file)) - if (OIB == 'ATM'): - # load IceBridge ATM data from input_file - dinput,file_lines,HEM = read_ATM_icessn_file(input_file,input_subsetter) - elif (OIB == 'ATM1b'): - # load IceBridge Level-1b ATM data from input_file - dinput,file_lines,HEM = read_ATM_qfit_file(input_file,input_subsetter) - elif OIB in ('LVIS','LVGH'): - # load IceBridge LVIS data from input_file - dinput,file_lines,HEM = read_LVIS_HDF5_file(input_file,input_subsetter) - - # extract lat/lon - lon = dinput['lon'][:] - lat = dinput['lat'][:] - # convert time from UTC time of day to modified julian days (MJD) - # J2000: seconds since 2000-01-01 12:00:00 UTC - t = dinput['time'][:]/86400.0 + 51544.5 - # convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') - # convert calendar dates into year decimal - tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, - hour=HH,minute=MN,second=SS) - # elevation - h1 = np.copy(dinput['data'][:]) - - # degrees to radians - dtr = np.pi/180.0 - atr = np.pi/648000.0 - # earth and physical parameters (IERS and WGS84) - G = 6.67428e-11# universal constant of gravitation [m^3/(kg*s^2)] - GM = 3.986004418e14# geocentric gravitational constant [m^3/s^2] - ge = 9.7803278# mean equatorial gravity [m/s^2] - a_axis = 6378136.6# semimajor axis of the WGS84 ellipsoid [m] - flat = 1.0/298.257223563# flattening of the WGS84 ellipsoid - b_axis = (1.0 -flat)*a_axis# semiminor axis of the WGS84 ellipsoid [m] - omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] - # tidal love number appropriate for the load tide - hb2 = 0.6207 - # Linear eccentricity, first and second numerical eccentricity - lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2) - ecc1 = lin_ecc/a_axis - ecc2 = lin_ecc/b_axis - # m parameter [omega^2*a^2*b/(GM)]. p. 70, Eqn.(2-137) - m = omega**2*((1 -flat)*a_axis**3)/GM - # flattening components - f_2 = -flat + (5.0/2.0)*m + (1.0/2.0)*flat**2.0 - (26.0/7.0)*flat*m + \ - (15.0/4.0)*m**2.0 - f_4 = -(1.0/2.0)*flat**2.0 + (5.0/2.0)*flat*m - - # convert from geodetic latitude to geocentric latitude - # calculate X, Y and Z from geodetic latitude and longitude - X,Y,Z = pyTMD.spatial.to_cartesian(lon,lat,h=h1,a_axis=a_axis,flat=flat) - rr = np.sqrt(X**2.0 + Y**2.0 + Z**2.0) - # calculate geocentric latitude and convert to degrees - latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0))/dtr - # colatitude and longitude in radians - theta = dtr*(90.0 - latitude_geocentric) - phi = lon*dtr - - # compute normal gravity at spatial location and elevation of points. - # normal gravity at the equator. p. 79, Eqn.(2-186) - gamma_a = (GM/(a_axis*b_axis)) * (1.0-(3.0/2.0)*m - (3.0/14.0)*ecc2**2.0*m) - # Normal gravity. p. 80, Eqn.(2-199) - gamma_0 = gamma_a*(1.0 + f_2*np.cos(theta)**2.0 + - f_4*np.sin(np.pi*latitude_geocentric/180.0)**4.0) - # Normal gravity at height h. p. 82, Eqn.(2-215) - gamma_h = gamma_0*(1.0 - - (2.0/a_axis)*(1.0+flat+m-2.0*flat*np.cos(theta)**2.0)*h1 + \ - (3.0/a_axis**2.0)*h1**2.0) - - # pole tide files (mean and daily) - mean_pole_file = pyTMD.utilities.get_data_path(['data','mean-pole.tab']) - pole_tide_file = pyTMD.utilities.get_data_path(['data','finals.all']) - # read IERS daily polar motion values - EOP = read_iers_EOP(pole_tide_file) - # create cubic spline interpolations of daily polar motion values - xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['x'],k=3,s=0) - ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['y'],k=3,s=0) - # bad value - fill_value = -9999.0 - - # output load pole tide HDF5 file - # form: rg_NASA_LOAD_POLE_TIDE_WGS84_fl1yyyymmddjjjjj.H5 - # where rg is the hemisphere flag (GR or AN) for the region - # fl1 and fl2 are the data flags (ATM, LVIS, GLAS) - # yymmddjjjjj is the year, month, day and second of the input file - # output region flags: GR for Greenland and AN for Antarctica - hem_flag = {'N':'GR','S':'AN'} - # use starting second to distinguish between files for the day - JJ1 = np.min(dinput['time']) % 86400 - # output file format - args = (hem_flag[HEM],'LOAD_POLE_TIDE',OIB,YY1,MM1,DD1,JJ1) - FILENAME = '{0}_NASA_{1}_WGS84_{2}{3}{4}{5}{6:05.0f}.H5'.format(*args) - # print file information - logger.info('\t{0}'.format(FILENAME)) - - # open output HDF5 file - fid = h5py.File(os.path.join(DIRECTORY,FILENAME), 'w') - - # calculate angular coordinates of mean pole at time tdec - mpx,mpy,fl = iers_mean_pole(mean_pole_file,tdec,'2015') - # interpolate daily polar motion values to time using cubic splines - px = xSPL(t) - py = ySPL(t) - # calculate differentials from mean pole positions - mx = px - mpx - my = -(py - mpy) - # calculate radial displacement at time - dfactor = -hb2*atr*(omega**2*rr**2)/(2.0*gamma_h) - Srad = np.ma.zeros((file_lines),fill_value=fill_value) - Srad.data[:] = dfactor*np.sin(2.0*theta)*(mx*np.cos(phi) + my*np.sin(phi)) - # replace fill values - Srad.mask = np.isnan(Srad.data) - Srad.data[Srad.mask] = Srad.fill_value - # copy radial displacement to output dictionary - dinput['tide_pole'] = Srad.copy() - - # output dictionary with HDF5 variables - h5 = {} - # add variables to output file - for key,attributes in attrib.items(): - # Defining the HDF5 dataset variables for lat/lon - h5[key] = fid.create_dataset(key, (file_lines,), - data=dinput[key][:], dtype=dinput[key].dtype, - compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in attributes.items(): - h5[key].attrs[att_name] = att_val - # attach dimensions - if key not in ('time',): - for i,dim in enumerate(['time']): - h5[key].dims[i].label = 'RECORD_SIZE' - h5[key].dims[i].attach_scale(h5[dim]) - - # HDF5 file attributes - fid.attrs['featureType'] = 'trajectory' - fid.attrs['title'] = 'Load_Pole_Tide_correction_for_elevation_measurements' - fid.attrs['summary'] = ('Solid_Earth_pole_tide_radial_displacements_' - 'computed_at_elevation_measurements.') - fid.attrs['project'] = 'NASA_Operation_IceBridge' - fid.attrs['processing_level'] = '4' - fid.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - # add attributes for input file - fid.attrs['elevation_file'] = os.path.basename(input_file) - # add geospatial and temporal attributes - fid.attrs['geospatial_lat_min'] = dinput['lat'].min() - fid.attrs['geospatial_lat_max'] = dinput['lat'].max() - fid.attrs['geospatial_lon_min'] = dinput['lon'].min() - fid.attrs['geospatial_lon_max'] = dinput['lon'].max() - fid.attrs['geospatial_lat_units'] = "degrees_north" - fid.attrs['geospatial_lon_units'] = "degrees_east" - fid.attrs['geospatial_ellipsoid'] = "WGS84" - fid.attrs['time_type'] = 'UTC' - - # convert start/end time from MJD into Julian days - JD_start = np.min(t) + 2400000.5 - JD_end = np.max(t) + 2400000.5 - # convert to calendar date - cal = pyTMD.time.convert_julian(np.array([JD_start,JD_end]),astype=int) - # add attributes with measurement date start, end and duration - args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) - fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['hour'][-1],cal['minute'][-1],cal['second'][-1]) - fid.attrs['RangeEndingTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['year'][0],cal['month'][0],cal['day'][0]) - fid.attrs['RangeBeginningDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - args = (cal['year'][-1],cal['month'][-1],cal['day'][-1]) - fid.attrs['RangeEndingDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - duration = np.round(JD_end*86400.0 - JD_start*86400.0) - fid.attrs['DurationTimeSeconds'] ='{0:0.0f}'.format(duration) - # close the output HDF5 dataset - fid.close() - # change the permissions level to MODE - os.chmod(os.path.join(DIRECTORY,FILENAME), MODE) - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates radial load pole tide displacements for - correcting Operation IceBridge elevation data following IERS - Convention (2010) guidelines - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line options - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='Input Operation IceBridge file to run') - # verbosity settings - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of output file') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input file - for arg in args.infile: - compute_LPT_icebridge_data(arg, VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_OPT_ICESat_GLA12.py b/scripts/compute_OPT_ICESat_GLA12.py deleted file mode 100644 index 7d234ba2..00000000 --- a/scripts/compute_OPT_ICESat_GLA12.py +++ /dev/null @@ -1,458 +0,0 @@ -#!/usr/bin/env python -u""" -compute_OPT_ICESat_GLA12.py -Written by Tyler Sutterley (07/2022) -Calculates radial ocean pole tide displacements for correcting ICESat/GLAS - L2 GLA12 Antarctic and Greenland Ice Sheet elevation data following - IERS Convention (2010) guidelines - -COMMAND LINE OPTIONS: - -I X, --interpolate X: Interpolation method - spline - linear - nearest - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - spatial.py: utilities for reading, writing and operating on spatial data - utilities.py: download and management utilities for syncing files - iers_mean_pole.py: provides the angular coordinates of IERS Mean Pole - read_iers_EOP.py: read daily earth orientation parameters from IERS - read_ocean_pole_tide.py: read ocean pole load tide map from IERS - -REFERENCES: - S Desai, "Observing the pole tide with satellite altimetry", Journal of - Geophysical Research: Oceans, 107(C11), 2002. doi: 10.1029/2001JC001224 - S Desai, J Wahr and B Beckley "Revisiting the pole tide for and from - satellite altimetry", Journal of Geodesy, 89(12), p1233-1243, 2015. - doi: 10.1007/s00190-015-0848-7 - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 04/2022: use longcomplex data format to be windows compliant - use argparse descriptions within sphinx documentation - Updated 02/2022: save ICESat campaign attribute to output file - Updated 10/2021: using python logging for handling verbose output - Updated 07/2021: can use prefix files to define command line arguments - Updated 04/2021: can use a generically named GLA12 file as input - Updated 03/2021: use cartesian coordinate conversion routine in spatial - Updated 12/2020: H5py deprecation warning change to use make_scale - merged time conversion routines into module - Written 12/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import warnings -import numpy as np -import pyTMD.time -import pyTMD.spatial -import pyTMD.utilities -import scipy.interpolate -from pyTMD.iers_mean_pole import iers_mean_pole -from pyTMD.read_iers_EOP import read_iers_EOP -from pyTMD.read_ocean_pole_tide import read_ocean_pole_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat ice sheet HDF5 elevation data (GLAH12) from NSIDC -# compute ocean pole tide radial displacements at points and times -def compute_OPT_ICESat(FILE, METHOD=None, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get directory from FILE - logger.info('{0} -->'.format(FILE)) - DIRECTORY = os.path.dirname(FILE) - - # compile regular expression operator for extracting information from file - rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_' - r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE) - # extract parameters from ICESat/GLAS HDF5 file name - # PRD: Product number (01, 05, 06, 12, 13, 14, or 15) - # RL: Release number for process that created the product = 634 - # RGTP: Repeat ground-track phase (1=8-day, 2=91-day, 3=transfer orbit) - # ORB: Reference orbit number (starts at 1 and increments each time a - # new reference orbit ground track file is obtained.) - # INST: Instance number (increments every time the satellite enters a - # different reference orbit) - # CYCL: Cycle of reference orbit for this phase - # TRK: Track within reference orbit - # SEG: Segment of orbit - # GRAN: Granule version number - # TYPE: File type - try: - PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE = rx.findall(FILE).pop() - except: - # output ocean pole tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(FILE) - OUTPUT_FILE = '{0}_{1}{2}'.format(fileBasename,'OBP',fileExtension) - else: - # output ocean pole tide HDF5 file for NSIDC granules - args = (PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE) - file_format = 'GLAH{0}_{1}_OPT_{2}{3}{4}_{5}_{6}_{7}_{8}_{9}.h5' - OUTPUT_FILE = file_format.format(*args) - - # read GLAH12 HDF5 file - fileID = h5py.File(FILE,'r') - n_40HZ, = fileID['Data_40HZ']['Time']['i_rec_ndx'].shape - # get variables and attributes - rec_ndx_40HZ = fileID['Data_40HZ']['Time']['i_rec_ndx'][:].copy() - # seconds since 2000-01-01 12:00:00 UTC (J2000) - DS_UTCTime_40HZ = fileID['Data_40HZ']['DS_UTCTime_40'][:].copy() - # Latitude (degrees North) - lat_TPX = fileID['Data_40HZ']['Geolocation']['d_lat'][:].copy() - # Longitude (degrees East) - lon_40HZ = fileID['Data_40HZ']['Geolocation']['d_lon'][:].copy() - # Elevation (height above TOPEX/Poseidon ellipsoid in meters) - elev_TPX = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'][:].copy() - fv = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'].attrs['_FillValue'] - - - # convert time from UTC time of day to Modified Julian Days (MJD) - # J2000: seconds since 2000-01-01 12:00:00 UTC - t = DS_UTCTime_40HZ[:]/86400.0 + 51544.5 - # convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') - # convert calendar dates into year decimal - tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, - hour=HH,minute=MN,second=SS) - - # semimajor axis (a) and flattening (f) for TP and WGS84 ellipsoids - atop,ftop = (6378136.3,1.0/298.257) - awgs,fwgs = (6378137.0,1.0/298.257223563) - # convert from Topex/Poseidon to WGS84 Ellipsoids - lat_40HZ,elev_40HZ = pyTMD.spatial.convert_ellipsoid(lat_TPX, elev_TPX, - atop, ftop, awgs, fwgs, eps=1e-12, itmax=10) - - # degrees to radians and arcseconds to radians - dtr = np.pi/180.0 - atr = np.pi/648000.0 - # earth and physical parameters (IERS) - G = 6.67428e-11# universal constant of gravitation [m^3/(kg*s^2)] - GM = 3.986004418e14# geocentric gravitational constant [m^3/s^2] - ge = 9.7803278# mean equatorial gravity [m/s^2] - a_axis = 6378136.6# equatorial radius of the Earth [m] - flat = 1.0/298.257223563# flattening of the ellipsoid - omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] - rho_w = 1025.0# density of sea water [kg/m^3] - ge = 9.7803278# mean equatorial gravitational acceleration [m/s^2] - # Linear eccentricity and first numerical eccentricity - lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2) - ecc1 = lin_ecc/a_axis - # tidal love number differential (1 + kl - hl) for pole tide frequencies - gamma = 0.6870 + 0.0036j - - # convert from geodetic latitude to geocentric latitude - # calculate X, Y and Z from geodetic latitude and longitude - X,Y,Z = pyTMD.spatial.to_cartesian(lon_40HZ,lat_40HZ,h=elev_40HZ, - a_axis=a_axis,flat=flat) - # calculate geocentric latitude and convert to degrees - latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0))/dtr - - # pole tide displacement scale factor - Hp = np.sqrt(8.0*np.pi/15.0)*(omega**2*a_axis**4)/GM - K = 4.0*np.pi*G*rho_w*Hp*a_axis/(3.0*ge) - K1 = 4.0*np.pi*G*rho_w*Hp*a_axis**3/(3.0*GM) - - # read ocean pole tide map from Desai (2002) - ocean_pole_tide_file = pyTMD.utilities.get_data_path(['data', - 'opoleloadcoefcmcor.txt.gz']) - iur,iun,iue,ilon,ilat = read_ocean_pole_tide(ocean_pole_tide_file) - - # pole tide files (mean and daily) - mean_pole_file = pyTMD.utilities.get_data_path(['data','mean-pole.tab']) - pole_tide_file = pyTMD.utilities.get_data_path(['data','finals.all']) - - # read IERS daily polar motion values - EOP = read_iers_EOP(pole_tide_file) - # create cubic spline interpolations of daily polar motion values - xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['x'],k=3,s=0) - ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['y'],k=3,s=0) - - # interpolate ocean pole tide map from Desai (2002) - if (METHOD == 'spline'): - # use scipy bivariate splines to interpolate to output points - f1 = scipy.interpolate.RectBivariateSpline(ilon, ilat[::-1], - iur[:,::-1].real, kx=1, ky=1) - f2 = scipy.interpolate.RectBivariateSpline(ilon, ilat[::-1], - iur[:,::-1].imag, kx=1, ky=1) - UR = np.zeros((n_40HZ),dtype=np.longcomplex) - UR.real = f1.ev(lon_40HZ,latitude_geocentric) - UR.imag = f2.ev(lon_40HZ,latitude_geocentric) - else: - # use scipy regular grid to interpolate values for a given method - r1 = scipy.interpolate.RegularGridInterpolator((ilon,ilat[::-1]), - iur[:,::-1], method=METHOD) - UR = r1.__call__(np.c_[lon_40HZ,latitude_geocentric]) - - # calculate angular coordinates of mean pole at time tdec - mpx,mpy,fl = iers_mean_pole(mean_pole_file,tdec,'2015') - # interpolate daily polar motion values to t using cubic splines - px = xSPL(t) - py = ySPL(t) - # calculate differentials from mean pole positions - mx = px - mpx - my = -(py - mpy) - # calculate radial displacement at time - Urad = np.ma.zeros((n_40HZ),fill_value=fv) - Urad.data[:] = K*atr*np.real((mx*gamma.real + my*gamma.imag)*UR.real + - (my*gamma.real - mx*gamma.imag)*UR.imag) - # replace fill values - Urad.mask = np.isnan(Urad.data) - Urad.data[Urad.mask] = Urad.fill_value - - # copy variables for outputting to HDF5 file - IS_gla12_tide = dict(Data_40HZ={}) - IS_gla12_fill = dict(Data_40HZ={}) - IS_gla12_tide_attrs = dict(Data_40HZ={}) - - # copy global file attributes - global_attribute_list = ['featureType','title','comment','summary','license', - 'references','AccessConstraints','CitationforExternalPublication', - 'contributor_role','contributor_name','creator_name','creator_email', - 'publisher_name','publisher_email','publisher_url','platform','instrument', - 'processing_level','date_created','spatial_coverage_type','history', - 'keywords','keywords_vocabulary','naming_authority','project','time_type', - 'date_type','time_coverage_start','time_coverage_end', - 'time_coverage_duration','source','HDFVersion','identifier_product_type', - 'identifier_product_format_version','Conventions','institution', - 'ReprocessingPlanned','ReprocessingActual','LocalGranuleID', - 'ProductionDateTime','LocalVersionID','PGEVersion','OrbitNumber', - 'StartOrbitNumber','StopOrbitNumber','EquatorCrossingLongitude', - 'EquatorCrossingTime','EquatorCrossingDate','ShortName','VersionID', - 'InputPointer','RangeBeginningTime','RangeEndingTime','RangeBeginningDate', - 'RangeEndingDate','PercentGroundHit','OrbitQuality','Cycle','Track', - 'Instrument_State','Timing_Bias','ReferenceOrbit','SP_ICE_PATH_NO', - 'SP_ICE_GLAS_StartBlock','SP_ICE_GLAS_EndBlock','Instance','Range_Bias', - 'Instrument_State_Date','Instrument_State_Time','Range_Bias_Date', - 'Range_Bias_Time','Timing_Bias_Date','Timing_Bias_Time', - 'identifier_product_doi','identifier_file_uuid', - 'identifier_product_doi_authority'] - for att in global_attribute_list: - IS_gla12_tide_attrs[att] = fileID.attrs[att] - # copy ICESat campaign name from ancillary data - IS_gla12_tide_attrs['Campaign'] = fileID['ANCILLARY_DATA'].attrs['Campaign'] - - # add attributes for input GLA12 file - IS_gla12_tide_attrs['input_files'] = os.path.basename(FILE) - # update geospatial ranges for ellipsoid - IS_gla12_tide_attrs['geospatial_lat_min'] = np.min(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lat_max'] = np.max(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lon_min'] = np.min(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lon_max'] = np.max(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lat_units'] = "degrees_north" - IS_gla12_tide_attrs['geospatial_lon_units'] = "degrees_east" - IS_gla12_tide_attrs['geospatial_ellipsoid'] = "WGS84" - - # copy 40Hz group attributes - for att_name,att_val in fileID['Data_40HZ'].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][att_name] = att_val - # copy attributes for time, geolocation and geophysical groups - for var in ['Time','Geolocation','Geophysical']: - IS_gla12_tide['Data_40HZ'][var] = {} - IS_gla12_fill['Data_40HZ'][var] = {} - IS_gla12_tide_attrs['Data_40HZ'][var] = {} - for att_name,att_val in fileID['Data_40HZ'][var].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][var][att_name] = att_val - - # J2000 time - IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] = DS_UTCTime_40HZ - IS_gla12_fill['Data_40HZ']['DS_UTCTime_40'] = None - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'] = {} - for att_name,att_val in fileID['Data_40HZ']['DS_UTCTime_40'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'][att_name] = att_val - # record - IS_gla12_tide['Data_40HZ']['Time']['i_rec_ndx'] = rec_ndx_40HZ - IS_gla12_fill['Data_40HZ']['Time']['i_rec_ndx'] = None - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'] = {} - for att_name,att_val in fileID['Data_40HZ']['Time']['i_rec_ndx'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'][att_name] = att_val - # latitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lat'] = lat_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lat'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'] = {} - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lat'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'][att_name] = att_val - # longitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lon'] = lon_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lon'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'] = {} - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lon'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'][att_name] = att_val - - # geophysical variables - # computed ocean pole tide - IS_gla12_tide['Data_40HZ']['Geophysical']['d_opElv'] = Urad - IS_gla12_fill['Data_40HZ']['Geophysical']['d_opElv'] = Urad.fill_value - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv'] = {} - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['units'] = "meters" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['long_name'] = \ - "Ocean Pole Tide" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['description'] = ("Ocean " - "pole tide radial displacements due to polar motion") - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['reference'] = \ - 'ftp://tai.bipm.org/iers/conv2010/chapter7/opoleloadcoefcmcor.txt.gz' - IS_gla12_tide_attrs['Data_40HZ']['Geophysical']['d_opElv']['coordinates'] = \ - "../DS_UTCTime_40" - - # close the input HDF5 file - fileID.close() - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_tide_attrs, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE), - FILL_VALUE=IS_gla12_fill, CLOBBER=True) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat data to HDF5 -def HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_attrs, - FILENAME='', FILL_VALUE=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - # create 40HZ HDF5 records - h5 = dict(Data_40HZ={}) - - # add HDF5 file attributes - attrs = {a:v for a,v in IS_gla12_attrs.items() if not isinstance(v,dict)} - for att_name,att_val in attrs.items(): - fileID.attrs[att_name] = att_val - - # create Data_40HZ group - fileID.create_group('Data_40HZ') - # add HDF5 40HZ group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'].items(): - if att_name not in ('DS_UTCTime_40',) and not isinstance(att_val,dict): - fileID['Data_40HZ'].attrs[att_name] = att_val - - # add 40HZ time variable - val = IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] - attrs = IS_gla12_attrs['Data_40HZ']['DS_UTCTime_40'] - # Defining the HDF5 dataset variables - var = '{0}/{1}'.format('Data_40HZ','DS_UTCTime_40') - h5['Data_40HZ']['DS_UTCTime_40'] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, compression='gzip') - # make dimension - h5['Data_40HZ']['DS_UTCTime_40'].make_scale('DS_UTCTime_40') - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ']['DS_UTCTime_40'].attrs[att_name] = att_val - - # for each variable group - for group in ['Time','Geolocation','Geophysical']: - # add group to dict - h5['Data_40HZ'][group] = {} - # create Data_40HZ group - fileID.create_group('Data_40HZ/{0}'.format(group)) - # add HDF5 group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'][group].items(): - if not isinstance(att_val,dict): - fileID['Data_40HZ'][group].attrs[att_name] = att_val - # for each variable in the group - for key,val in IS_gla12_tide['Data_40HZ'][group].items(): - fillvalue = FILL_VALUE['Data_40HZ'][group][key] - attrs = IS_gla12_attrs['Data_40HZ'][group][key] - # Defining the HDF5 dataset variables - var = '{0}/{1}/{2}'.format('Data_40HZ',group,key) - # use variable compression if containing fill values - if fillvalue: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - fillvalue=fillvalue, compression='gzip') - else: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - compression='gzip') - # attach dimensions - for i,dim in enumerate(['DS_UTCTime_40']): - h5['Data_40HZ'][group][key].dims[i].attach_scale( - h5['Data_40HZ'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ'][group][key].attrs[att_name] = att_val - - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates radial ocean pole tide displacements for - correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet - elevation data following IERS Convention (2010) guidelines - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat GLA12 file to run') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest'), - help='Spatial interpolation method') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input GLA12 file - for FILE in args.infile: - compute_OPT_ICESat(FILE, METHOD=args.interpolate, - VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_OPT_icebridge_data.py b/scripts/compute_OPT_icebridge_data.py deleted file mode 100644 index 71464732..00000000 --- a/scripts/compute_OPT_icebridge_data.py +++ /dev/null @@ -1,698 +0,0 @@ -#!/usr/bin/env python -u""" -compute_OPT_icebridge_data.py -Written by Tyler Sutterley (07/2022) -Calculates radial ocean pole tide displacements for correcting Operation - IceBridge elevation data following IERS Convention (2010) guidelines - http://maia.usno.navy.mil/conventions/2010officialinfo.php - http://maia.usno.navy.mil/conventions/chapter7.php - -INPUTS: - ATM1B, ATM icessn or LVIS file from NSIDC - -COMMAND LINE OPTIONS: - -I X, --interpolate X: Interpolation method - spline - linear - nearest - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - spatial.py: utilities for reading, writing and operating on spatial data - utilities.py: download and management utilities for syncing files - iers_mean_pole.py: provides the angular coordinates of IERS Mean Pole - read_iers_EOP.py: read daily earth orientation parameters from IERS - read_ocean_pole_tide.py: read ocean pole load tide map from IERS - read_ATM1b_QFIT_binary.py: read ATM1b QFIT binary files (NSIDC version 1) - -UPDATE HISTORY: - Updated 07/2022: update imports of ATM1b QFIT functions to released version - place some imports within try/except statements - Updated 04/2022: include utf-8 encoding in reads to be windows compliant - use longcomplex data format to be windows compliant - use argparse descriptions within sphinx documentation - Updated 10/2021: using python logging for handling verbose output - using collections to store attributes in order of creation - Updated 07/2021: can use prefix files to define command line arguments - Updated 05/2021: modified import of ATM1b QFIT reader - Updated 03/2021: use cartesian coordinate conversion routine in spatial - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: merged time conversion routines into module - Updated 11/2020: use internal mean pole and finals EOP files - Updated 10/2020: using argparse to set command line parameters - Updated 09/2020: output modified julian days as time variable - Updated 08/2020: using builtin time operations. python3 regular expressions - replaced griddata interpolation with scipy regular grid interpolators - Updated 03/2020: use read_ATM1b_QFIT_binary from repository - Updated 05/2019: added option interpolate to choose the interpolation method - Updated 02/2019: using range for python3 compatibility - Updated 10/2018: updated GPS time calculation for calculating leap seconds - Written 06/2018 -""" -from __future__ import print_function - -import sys -import os -import re -import time -import logging -import argparse -import warnings -import collections -import numpy as np -import pyTMD.time -import pyTMD.spatial -import pyTMD.utilities -import scipy.interpolate -from pyTMD.iers_mean_pole import iers_mean_pole -from pyTMD.read_iers_EOP import read_iers_EOP -from pyTMD.read_ocean_pole_tide import read_ocean_pole_tide -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - import ATM1b_QFIT.read_ATM1b_QFIT_binary -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("ATM1b_QFIT not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: reading the number of file lines removing commented lines -def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): - # subset the data to indices if specified - if input_subsetter: - file_lines = len(input_subsetter) - elif HDF5: - # read the size of an input variable within a HDF5 file - with h5py.File(input_file,'r') as fileID: - file_lines, = fileID[HDF5].shape - elif QFIT: - # read the size of a QFIT binary file - file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) - else: - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - i = [i for i in f.readlines() if re.match(r'^(?!\#|\n)',i)] - file_lines = len(i) - # return the number of lines - return file_lines - -# PURPOSE: read the ATM Level-1b data file for variables of interest -def read_ATM_qfit_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - mission_flag = r'(BLATM1B|ILATM1B|ILNSA1B)' - regex_pattern = r'{0}_(\d+)_(\d+)(.*?).(qi|TXT|h5)'.format(mission_flag) - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # output python dictionary with variables - ATM_L1b_input = {} - # Version 1 of ATM QFIT files (ascii) - # output text file from qi2txt with proper filename format - # do not use the shortened output format from qi2txt - if (SFX == 'TXT'): - # compile regular expression operator for reading lines - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # create output variables with length equal to the number of lines - ATM_L1b_input['lat'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['lon'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['data'] = np.zeros_like(file_contents,dtype=np.float64) - hour = np.zeros_like(file_contents,dtype=np.float64) - minute = np.zeros_like(file_contents,dtype=np.float64) - second = np.zeros_like(file_contents,dtype=np.float64) - # for each line within the file - for i,line in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line) - ATM_L1b_input['lat'][i] = np.float64(line_contents[1]) - ATM_L1b_input['lon'][i] = np.float64(line_contents[2]) - ATM_L1b_input['data'][i] = np.float64(line_contents[3]) - hour[i] = np.float64(line_contents[-1][:2]) - minute[i] = np.float64(line_contents[-1][2:4]) - second[i] = np.float64(line_contents[-1][4:]) - # Version 1 of ATM QFIT files (binary) - elif (SFX == 'qi'): - # read input QFIT data file and subset if specified - fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,QFIT=True) - ATM_L1b_input['lat'] = fid['latitude'][:] - ATM_L1b_input['lon'] = fid['longitude'][:] - ATM_L1b_input['data'] = fid['elevation'][:] - time_hhmmss = fid['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # Version 2 of ATM QFIT files (HDF5) - elif (SFX == 'h5'): - # Open the HDF5 file for reading - fileID = h5py.File(os.path.expanduser(input_file), 'r') - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,HDF5='elevation') - # create output variables with length equal to input elevation - ATM_L1b_input['lat'] = fileID['latitude'][:] - ATM_L1b_input['lon'] = fileID['longitude'][:] - ATM_L1b_input['data'] = fileID['elevation'][:] - time_hhmmss = fileID['instrument_parameters']['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # close the input HDF5 file - fileID.close() - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - # calculation of Julian day taking into account leap seconds - # converting to J2000 seconds - ATM_L1b_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L1b_input.items(): - ATM_L1b_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L1b_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L1b_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L1b_input,file_lines,HEM - -# PURPOSE: read the ATM Level-2 data file for variables of interest -def read_ATM_icessn_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - regex_pattern=r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # input file column names for variables of interest with column indices - # variables not used: (SNslope:4, WEslope:5, npt_used:7, npt_edit:8, d:9) - file_dtype = {'seconds':0, 'lat':1, 'lon':2, 'data':3, 'RMS':6, 'track':-1} - # compile regular expression operator for reading lines (extracts numbers) - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() - if re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # output python dictionary with variables - ATM_L2_input = {} - # create output variables with length equal to the number of file lines - for key in file_dtype.keys(): - ATM_L2_input[key] = np.zeros_like(file_contents, dtype=np.float64) - # for each line within the file - for line_number,line_entries in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line_entries) - # for each variable of interest: save to dinput as float - for key,val in file_dtype.items(): - ATM_L2_input[key][line_number] = np.float64(line_contents[val]) - # convert shot time (seconds of day) to J2000 - hour = np.floor(ATM_L2_input['seconds']/3600.0) - minute = np.floor((ATM_L2_input['seconds'] % 3600)/60.0) - second = ATM_L2_input['seconds'] % 60.0 - # First column in Pre-IceBridge and ICESSN Version 1 files is GPS time - if (MISSION == 'BLATM2') or (SFX != 'csv'): - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - else: - leap_seconds = 0.0 - # calculation of Julian day - # converting to J2000 seconds - ATM_L2_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # convert RMS from centimeters to meters - ATM_L2_input['error'] = ATM_L2_input['RMS']/100.0 - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L2_input.items(): - ATM_L2_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L2_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L2_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L2_input,file_lines,HEM - -# PURPOSE: read the LVIS Level-2 data file for variables of interest -def read_LVIS_HDF5_file(input_file, input_subsetter): - # LVIS region flags: GL for Greenland and AQ for Antarctica - lvis_flag = {'GL':'N','AQ':'S'} - # regular expression pattern for extracting parameters from HDF5 files - # computed in read_icebridge_lvis.py - mission_flag = '(BLVIS2|BVLIS2|ILVIS2|ILVGH2)' - regex_pattern = r'{0}_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5'.format(mission_flag) - # extract mission, region and other parameters from filename - MISSION,REGION,YY,MMDD,RLD,SS = re.findall(regex_pattern,input_file).pop() - LDS_VERSION = '2.0.2' if (int(RLD[1:3]) >= 18) else '1.04' - # input and output python dictionaries with variables - file_input = {} - LVIS_L2_input = {} - fileID = h5py.File(input_file,'r') - # create output variables with length equal to input shot number - file_lines = file_length(input_file,input_subsetter,HDF5='Shot_Number') - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS104.html - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS202.html - if (LDS_VERSION == '1.04'): - # elevation surfaces - file_input['elev'] = fileID['Elevation_Surfaces/Elevation_Centroid'][:] - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # latitude - file_input['lat'] = fileID['Geolocation/Latitude_Centroid'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon'] = fileID['Geolocation/Longitude_Centroid'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - elif (LDS_VERSION == '2.0.2'): - # elevation surfaces - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # heights above lowest detected mode - file_input['RH50'] = fileID['Waveform/RH50'][:] - file_input['RH100'] = fileID['Waveform/RH100'][:] - # calculate centroidal elevation using 50% of waveform energy - file_input['elev'] = file_input['elev_low'] + file_input['RH50'] - # latitude - file_input['lat_top'] = fileID['Geolocation/Latitude_Top'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon_top'] = fileID['Geolocation/Longitude_Top'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - # linearly interpolate latitude and longitude to RH50 - file_input['lat'] = file_input['lat_low'] + file_input['RH50'] * \ - (file_input['lat_top'] - file_input['lat_low'])/file_input['RH100'] - file_input['lon'] = file_input['lon_low'] + file_input['RH50'] * \ - (file_input['lon_top'] - file_input['lon_low'])/file_input['RH100'] - # J2000 seconds - LVIS_L2_input['time'] = fileID['Time/J2000'][:] - # close the input HDF5 file - fileID.close() - # output combined variables - LVIS_L2_input['data'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lon'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lat'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['error'] = np.zeros_like(file_input['elev'],dtype=np.float64) - # find where elev high is equal to elev low - # see note about using LVIS centroid elevation product - # http://lvis.gsfc.nasa.gov/OIBDataStructure.html - ii = np.nonzero(file_input['elev_low'] == file_input['elev_high']) - jj = np.nonzero(file_input['elev_low'] != file_input['elev_high']) - # where lowest point of waveform is equal to highest point --> - # using the elev_low elevation - LVIS_L2_input['data'][ii] = file_input['elev_low'][ii] - # for other locations use the centroid elevation - # as the centroid is a useful product over rough terrain - # when you are calculating ice volume change - LVIS_L2_input['data'][jj] = file_input['elev'][jj] - # latitude and longitude for each case - # elevation low == elevation high - LVIS_L2_input['lon'][ii] = file_input['lon_low'][ii] - LVIS_L2_input['lat'][ii] = file_input['lat_low'][ii] - # centroid elevations - LVIS_L2_input['lon'][jj] = file_input['lon'][jj] - LVIS_L2_input['lat'][jj] = file_input['lat'][jj] - # estimated uncertainty for both cases - LVIS_variance_low = (file_input['elev_low'] - file_input['elev'])**2 - LVIS_variance_high = (file_input['elev_high'] - file_input['elev'])**2 - LVIS_L2_input['error']=np.sqrt((LVIS_variance_low + LVIS_variance_high)/2.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in LVIS_L2_input.items(): - LVIS_L2_input[key] = val[input_subsetter] - # return the output variables - return LVIS_L2_input,file_lines,lvis_flag[REGION] - -# PURPOSE: read Operation IceBridge data from NSIDC -# compute ocean pole tide radial displacements at data points and times -def compute_OPT_icebridge_data(arg,METHOD=None,VERBOSE=False,MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # extract file name and subsetter indices lists - match_object = re.match(r'(.*?)(\[(.*?)\])?$',arg) - input_file = os.path.expanduser(match_object.group(1)) - # subset input file to indices - if match_object.group(2): - # decompress ranges and add to list - input_subsetter = [] - for i in re.findall(r'((\d+)-(\d+)|(\d+))',match_object.group(3)): - input_subsetter.append(int(i[3])) if i[3] else \ - input_subsetter.extend(range(int(i[1]),int(i[2])+1)) - else: - input_subsetter = None - - # output directory for input_file - DIRECTORY = os.path.dirname(input_file) - # calculate if input files are from ATM or LVIS (+GH) - regex = {} - regex['ATM'] = r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - regex['ATM1b'] = r'(BLATM1b|ILATM1b)_(\d+)_(\d+)(.*?).(qi|TXT|h5)$' - regex['LVIS'] = r'(BLVIS2|BVLIS2|ILVIS2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - regex['LVGH'] = r'(ILVGH2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - for key,val in regex.items(): - if re.match(val, os.path.basename(input_file)): - OIB = key - - # invalid value - fill_value = -9999.0 - # HDF5 file attributes - attrib = collections.OrderedDict() - # Modified Julian Days - attrib['time'] = {} - attrib['time']['long_name'] = 'Time' - attrib['time']['units'] = 'days since 1858-11-17T00:00:00' - attrib['time']['description'] = 'Modified Julian Days' - attrib['time']['standard_name'] = 'time' - attrib['time']['calendar'] = 'standard' - # latitude - attrib['lat'] = {} - attrib['lat']['long_name'] = 'Latitude_of_measurement' - attrib['lat']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lat']['units'] = 'Degrees_North' - # longitude - attrib['lon'] = {} - attrib['lon']['long_name'] = 'Longitude_of_measurement' - attrib['lon']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lon']['units'] = 'Degrees_East' - # ocean pole tides - attrib['tide_oc_pole'] = {} - attrib['tide_oc_pole']['long_name'] = 'Ocean_Pole_Tide' - attrib['tide_oc_pole']['description'] = ('Ocean_pole_tide_radial_' - 'displacements_at_the_measurement_position_at_the_acquisition_time_due_' - 'to_polar_motion') - attrib['tide_oc_pole']['reference'] = ('ftp://tai.bipm.org/iers/conv2010/' - 'chapter7/opoleloadcoefcmcor.txt.gz') - attrib['tide_oc_pole']['units'] = 'meters' - - # extract information from first input file - # acquisition year, month and day - # number of points - # instrument (PRE-OIB ATM or LVIS, OIB ATM or LVIS) - if OIB in ('ATM','ATM1b'): - M1,YYMMDD1,HHMMSS1,AX1,SF1 = re.findall(regex[OIB], input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD1) == 6): - ypre,MM1,DD1 = YYMMDD1[:2],YYMMDD1[2:4],YYMMDD1[4:] - if (np.float64(ypre) >= 90): - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 1900.0) - else: - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 2000.0) - elif (len(YYMMDD1) == 8): - YY1,MM1,DD1 = YYMMDD1[:4],YYMMDD1[4:6],YYMMDD1[6:] - elif OIB in ('LVIS','LVGH'): - M1,RG1,YY1,MMDD1,RLD1,SS1 = re.findall(regex[OIB], input_file).pop() - MM1,DD1 = MMDD1[:2],MMDD1[2:] - - # read data from input_file - logger.info('{0} -->'.format(input_file)) - if (OIB == 'ATM'): - # load IceBridge ATM data from input_file - dinput,file_lines,HEM = read_ATM_icessn_file(input_file,input_subsetter) - elif (OIB == 'ATM1b'): - # load IceBridge Level-1b ATM data from input_file - dinput,file_lines,HEM = read_ATM_qfit_file(input_file,input_subsetter) - elif OIB in ('LVIS','LVGH'): - # load IceBridge LVIS data from input_file - dinput,file_lines,HEM = read_LVIS_HDF5_file(input_file,input_subsetter) - - # extract lat/lon - lon = dinput['lon'][:] - lat = dinput['lat'][:] - # convert time from UTC time of day to modified julian days (MJD) - # J2000: seconds since 2000-01-01 12:00:00 UTC - t = dinput['time'][:]/86400.0 + 51544.5 - # convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') - # convert calendar dates into year decimal - tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, - hour=HH,minute=MN,second=SS) - # elevation - h1 = dinput['data'][:] - - # degrees to radians and arcseconds to radians - dtr = np.pi/180.0 - atr = np.pi/648000.0 - # earth and physical parameters (IERS) - G = 6.67428e-11# universal constant of gravitation [m^3/(kg*s^2)] - GM = 3.986004418e14# geocentric gravitational constant [m^3/s^2] - ge = 9.7803278# mean equatorial gravity [m/s^2] - a_axis = 6378136.6# equatorial radius of the Earth [m] - flat = 1.0/298.257223563# flattening of the ellipsoid - omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] - rho_w = 1025.0# density of sea water [kg/m^3] - ge = 9.7803278# mean equatorial gravitational acceleration [m/s^2] - # Linear eccentricity and first numerical eccentricity - lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2) - ecc1 = lin_ecc/a_axis - # tidal love number differential (1 + kl - hl) for pole tide frequencies - gamma = 0.6870 + 0.0036j - - # convert from geodetic latitude to geocentric latitude - # calculate X, Y and Z from geodetic latitude and longitude - X,Y,Z = pyTMD.spatial.to_cartesian(lon,lat,h=h1,a_axis=a_axis,flat=flat) - # calculate geocentric latitude and convert to degrees - latitude_geocentric = np.arctan(Z / np.sqrt(X**2.0 + Y**2.0))/dtr - - # pole tide displacement scale factor - Hp = np.sqrt(8.0*np.pi/15.0)*(omega**2*a_axis**4)/GM - K = 4.0*np.pi*G*rho_w*Hp*a_axis/(3.0*ge) - K1 = 4.0*np.pi*G*rho_w*Hp*a_axis**3/(3.0*GM) - - # read ocean pole tide map from Desai (2002) - ocean_pole_tide_file = pyTMD.utilities.get_data_path(['data', - 'opoleloadcoefcmcor.txt.gz']) - iur,iun,iue,ilon,ilat = read_ocean_pole_tide(ocean_pole_tide_file) - - # pole tide files (mean and daily) - mean_pole_file = pyTMD.utilities.get_data_path(['data','mean-pole.tab']) - pole_tide_file = pyTMD.utilities.get_data_path(['data','finals.all']) - - # read IERS daily polar motion values - EOP = read_iers_EOP(pole_tide_file) - # create cubic spline interpolations of daily polar motion values - xSPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['x'],k=3,s=0) - ySPL = scipy.interpolate.UnivariateSpline(EOP['MJD'],EOP['y'],k=3,s=0) - # bad value - fill_value = -9999.0 - - # output ocean pole tide HDF5 file - # form: rg_NASA_OCEAN_POLE_TIDE_WGS84_fl1yyyymmddjjjjj.H5 - # where rg is the hemisphere flag (GR or AN) for the region - # fl1 and fl2 are the data flags (ATM, LVIS, GLAS) - # yymmddjjjjj is the year, month, day and second of the input file - # output region flags: GR for Greenland and AN for Antarctica - hem_flag = {'N':'GR','S':'AN'} - # use starting second to distinguish between files for the day - JJ1 = np.min(dinput['time']) % 86400 - # output file format - args = (hem_flag[HEM],'OCEAN_POLE_TIDE',OIB,YY1,MM1,DD1,JJ1) - FILENAME = '{0}_NASA_{1}_WGS84_{2}{3}{4}{5}{6:05.0f}.H5'.format(*args) - # print file information - logger.info('\t{0}'.format(FILENAME)) - - # open output HDF5 file - fid = h5py.File(os.path.join(DIRECTORY,FILENAME), 'w') - - # interpolate ocean pole tide map from Desai (2002) - if (METHOD == 'spline'): - # use scipy bivariate splines to interpolate to output points - f1 = scipy.interpolate.RectBivariateSpline(ilon, ilat[::-1], - iur[:,::-1].real, kx=1, ky=1) - f2 = scipy.interpolate.RectBivariateSpline(ilon, ilat[::-1], - iur[:,::-1].imag, kx=1, ky=1) - UR = np.zeros((file_lines),dtype=np.longcomplex) - UR.real = f1.ev(lon,latitude_geocentric) - UR.imag = f2.ev(lon,latitude_geocentric) - else: - # use scipy regular grid to interpolate values for a given method - r1 = scipy.interpolate.RegularGridInterpolator((ilon,ilat[::-1]), - iur[:,::-1], method=METHOD) - UR = r1.__call__(np.c_[lon,latitude_geocentric]) - - # calculate angular coordinates of mean pole at time tdec - mpx,mpy,fl = iers_mean_pole(mean_pole_file,tdec,'2015') - # interpolate daily polar motion values to t using cubic splines - px = xSPL(t) - py = ySPL(t) - # calculate differentials from mean pole positions - mx = px - mpx - my = -(py - mpy) - # calculate radial displacement at time - Urad = np.ma.zeros((file_lines),fill_value=fill_value) - Urad.data[:] = K*atr*np.real((mx*gamma.real + my*gamma.imag)*UR.real + - (my*gamma.real - mx*gamma.imag)*UR.imag) - # replace fill values - Urad.mask = np.isnan(Urad.data) - Urad.data[Urad.mask] = Urad.fill_value - # copy radial displacement to output variable - dinput['tide_oc_pole'] = Urad.copy() - - # output dictionary with HDF5 variables - h5 = {} - # add variables to output file - for key,attributes in attrib.items(): - # Defining the HDF5 dataset variables for lat/lon - h5[key] = fid.create_dataset(key, (file_lines,), - data=dinput[key][:], dtype=dinput[key].dtype, - compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in attributes.items(): - h5[key].attrs[att_name] = att_val - # attach dimensions - if key not in ('time',): - for i,dim in enumerate(['time']): - h5[key].dims[i].label = 'RECORD_SIZE' - h5[key].dims[i].attach_scale(h5[dim]) - - # HDF5 file attributes - fid.attrs['featureType'] = 'trajectory' - fid.attrs['title'] = 'Tidal_correction_for_elevation_measurements' - fid.attrs['summary'] = ('Ocean_pole_tide_radial_displacements_' - 'computed_at_elevation_measurements.') - fid.attrs['project'] = 'NASA_Operation_IceBridge' - fid.attrs['processing_level'] = '4' - fid.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - # add attributes for input file - fid.attrs['elevation_file'] = os.path.basename(input_file) - # add geospatial and temporal attributes - fid.attrs['geospatial_lat_min'] = dinput['lat'].min() - fid.attrs['geospatial_lat_max'] = dinput['lat'].max() - fid.attrs['geospatial_lon_min'] = dinput['lon'].min() - fid.attrs['geospatial_lon_max'] = dinput['lon'].max() - fid.attrs['geospatial_lat_units'] = "degrees_north" - fid.attrs['geospatial_lon_units'] = "degrees_east" - fid.attrs['geospatial_ellipsoid'] = "WGS84" - fid.attrs['time_type'] = 'UTC' - - # convert start/end time from MJD into Julian days - JD_start = np.min(t) + 2400000.5 - JD_end = np.max(t) + 2400000.5 - # convert to calendar date - cal = pyTMD.time.convert_julian(np.array([JD_start,JD_end]),astype=int) - # add attributes with measurement date start, end and duration - args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) - fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['hour'][-1],cal['minute'][-1],cal['second'][-1]) - fid.attrs['RangeEndingTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['year'][0],cal['month'][0],cal['day'][0]) - fid.attrs['RangeBeginningDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - args = (cal['year'][-1],cal['month'][-1],cal['day'][-1]) - fid.attrs['RangeEndingDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - duration = np.round(JD_end*86400.0 - JD_start*86400.0) - fid.attrs['DurationTimeSeconds'] ='{0:0.0f}'.format(duration) - # close the output HDF5 dataset - fid.close() - # change the permissions level to MODE - os.chmod(os.path.join(DIRECTORY,FILENAME), MODE) - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates radial ocean pole tide displacements for - correcting Operation IceBridge elevation data following IERS - Convention (2010) guidelines - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line options - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='Input Operation IceBridge file to run') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest'), - help='Spatial interpolation method') - # verbosity settings - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of output file') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input file - for arg in args.infile: - compute_OPT_icebridge_data(arg, METHOD=args.interpolate, - VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat2_ATL03.py b/scripts/compute_tides_ICESat2_ATL03.py deleted file mode 100644 index 4f0303e4..00000000 --- a/scripts/compute_tides_ICESat2_ATL03.py +++ /dev/null @@ -1,639 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat2_ATL03.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat-2 photon height data -Calculated at ATL03 segment level using reference photon geolocation and time -Segment level corrections can be applied to the individual photon events (PEs) - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - --atlas-format X: ATLAS tide model format (OTIS, netcdf) - --gzip, -G: Tide model files are gzip compressed - --definition-file X: Model definition file for use as correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - --apply-flexure: Apply ice flexure scaling factor to height constituents - Only valid for models containing flexure fields - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL03.py: reads ICESat-2 global geolocated photon data files - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - added command line option to apply flexure for applicable models - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named ATL03 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - added valid data extrapolation with nearest_extrap - merged time conversion routines into module - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 07/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 03/2020: use read_ICESat2_ATL03.py from read-ICESat-2 repository - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Updated 11/2019: calculate minor constituents as separate variable - added AOTIM-5-2018 tide model (2018 update to 2004 model) - Updated 10/2019: external read functions. adjust regex for processed files - changing Y/N flags to True/False - Updated 09/2019: using date functions paralleling public repository - add option for TPXO9-atlas. add OTIS netcdf tide option - Written 04/2019 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL03 import read_HDF5_ATL03_main, \ - read_HDF5_ATL03_beam -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 geolocated photon data (ATL03) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, - TIDE_MODEL=None, - ATLAS_FORMAT=None, - GZIP=True, - DEFINITION_FILE=None, - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=None, - APPLY_FLEXURE=False, - VERBOSE=False, - MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl03_mds,IS2_atl03_attrs,IS2_atl03_beams = read_HDF5_ATL03_main(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # flexure flag if being applied - flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' - # extract parameters from ICESat-2 ATLAS HDF5 file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' - r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,flexure_flag,fileExtension) - OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) - else: - # output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,flexure_flag,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_{1}{2}_TIDES_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl03_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl03_tide = {} - IS2_atl03_fill = {} - IS2_atl03_dims = {} - IS2_atl03_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl03_tide['ancillary_data'] = {} - IS2_atl03_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl03_tide['ancillary_data'][key] = IS2_atl03_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl03_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl03_attrs['ancillary_data'][key].items(): - IS2_atl03_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl03_beams): - # output data dictionaries for beam - IS2_atl03_tide[gtx] = dict(geolocation={}, geophys_corr={}) - IS2_atl03_fill[gtx] = dict(geolocation={}, geophys_corr={}) - IS2_atl03_dims[gtx] = dict(geolocation={}, geophys_corr={}) - IS2_atl03_tide_attrs[gtx] = dict(geolocation={}, geophys_corr={}) - - # read data and attributes for beam - val,attrs = read_HDF5_ATL03_beam(INPUT_FILE,gtx,ATTRIBUTES=True) - # number of segments - n_seg = len(val['geolocation']['segment_id']) - # extract variables for computing tides - segment_id = val['geolocation']['segment_id'].copy() - delta_time = val['geolocation']['delta_time'].copy() - lon = val['geolocation']['reference_photon_lon'].copy() - lat = val['geolocation']['reference_photon_lat'].copy() - # invalid value - fv = attrs['geolocation']['sigma_h']['_FillValue'] - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + delta_time - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, - model.model_file, model.projection, type=model.type, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - grid=model.format, apply_flexure=APPLY_FLEXURE) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file, - model.model_file, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, - compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(lon, lat, model.model_file, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(lon, lat, model.model_file, - type=model.type, version=model.version, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, - compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # predict tidal elevations at time and infer minor corrections - tide = np.ma.empty((n_seg),fill_value=fv) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(tide_time, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(tide_time, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace masked and nan values with fill value - invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) - tide.data[invalid] = tide.fill_value - tide.mask[invalid] = True - - # group attributes for beam - IS2_atl03_tide_attrs[gtx]['Description'] = attrs['Description'] - IS2_atl03_tide_attrs[gtx]['atlas_pce'] = attrs['atlas_pce'] - IS2_atl03_tide_attrs[gtx]['atlas_beam_type'] = attrs['atlas_beam_type'] - IS2_atl03_tide_attrs[gtx]['groundtrack_id'] = attrs['groundtrack_id'] - IS2_atl03_tide_attrs[gtx]['atmosphere_profile'] = attrs['atmosphere_profile'] - IS2_atl03_tide_attrs[gtx]['atlas_spot_number'] = attrs['atlas_spot_number'] - IS2_atl03_tide_attrs[gtx]['sc_orientation'] = attrs['sc_orientation'] - - # group attributes for geolocation - IS2_atl03_tide_attrs[gtx]['geolocation']['Description'] = ("Contains parameters related to " - "geolocation. The rate of all of these parameters is at the rate corresponding to the " - "ICESat-2 Geolocation Along Track Segment interval (nominally 20 m along-track).") - IS2_atl03_tide_attrs[gtx]['geolocation']['data_rate'] = ("Data within this group are " - "stored at the ICESat-2 20m segment rate.") - # group attributes for geophys_corr - IS2_atl03_tide_attrs[gtx]['geophys_corr']['Description'] = ("Contains parameters used to " - "correct photon heights for geophysical effects, such as tides. These parameters are " - "posted at the same interval as the ICESat-2 Geolocation Along-Track Segment interval " - "(nominally 20m along-track).") - IS2_atl03_tide_attrs[gtx]['geophys_corr']['data_rate'] = ("These parameters are stored at " - "the ICESat-2 Geolocation Along Track Segment rate (nominally every 20 m along-track).") - - # geolocation, time and segment ID - # delta time in geolocation group - IS2_atl03_tide[gtx]['geolocation']['delta_time'] = delta_time - IS2_atl03_fill[gtx]['geolocation']['delta_time'] = None - IS2_atl03_dims[gtx]['geolocation']['delta_time'] = None - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['standard_name'] = "time" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['calendar'] = "standard" - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['description'] = ("Elapsed seconds " - "from the ATLAS SDP GPS Epoch, corresponding to the transmit time of the reference " - "photon. The ATLAS Standard Data Products (SDP) epoch offset is defined within " - "/ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds between the GPS epoch " - "(1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By adding the offset " - "contained within atlas_sdp_gps_epoch to delta time parameters, the time in gps_seconds " - "relative to the GPS epoch can be computed.") - IS2_atl03_tide_attrs[gtx]['geolocation']['delta_time']['coordinates'] = \ - "segment_id reference_photon_lat reference_photon_lon" - # delta time in geophys_corr group - IS2_atl03_tide[gtx]['geophys_corr']['delta_time'] = delta_time - IS2_atl03_fill[gtx]['geophys_corr']['delta_time'] = None - IS2_atl03_dims[gtx]['geophys_corr']['delta_time'] = None - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time'] = {} - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['standard_name'] = "time" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['calendar'] = "standard" - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['description'] = ("Elapsed seconds " - "from the ATLAS SDP GPS Epoch, corresponding to the transmit time of the reference " - "photon. The ATLAS Standard Data Products (SDP) epoch offset is defined within " - "/ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds between the GPS epoch " - "(1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By adding the offset " - "contained within atlas_sdp_gps_epoch to delta time parameters, the time in gps_seconds " - "relative to the GPS epoch can be computed.") - IS2_atl03_tide_attrs[gtx]['geophys_corr']['delta_time']['coordinates'] = ("../geolocation/segment_id " - "../geolocation/reference_photon_lat ../geolocation/reference_photon_lon") - - # latitude - IS2_atl03_tide[gtx]['geolocation']['reference_photon_lat'] = lat - IS2_atl03_fill[gtx]['geolocation']['reference_photon_lat'] = None - IS2_atl03_dims[gtx]['geolocation']['reference_photon_lat'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['units'] = "degrees_north" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['contentType'] = "physicalMeasurement" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['long_name'] = "Latitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['standard_name'] = "latitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['description'] = ("Latitude of each " - "reference photon. Computed from the ECF Cartesian coordinates of the bounce point.") - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['valid_min'] = -90.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['valid_max'] = 90.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lat']['coordinates'] = \ - "segment_id delta_time reference_photon_lon" - # longitude - IS2_atl03_tide[gtx]['geolocation']['reference_photon_lon'] = lon - IS2_atl03_fill[gtx]['geolocation']['reference_photon_lon'] = None - IS2_atl03_dims[gtx]['geolocation']['reference_photon_lon'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['units'] = "degrees_east" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['contentType'] = "physicalMeasurement" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['long_name'] = "Longitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['standard_name'] = "longitude" - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['description'] = ("Longitude of each " - "reference photon. Computed from the ECF Cartesian coordinates of the bounce point.") - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['valid_min'] = -180.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['valid_max'] = 180.0 - IS2_atl03_tide_attrs[gtx]['geolocation']['reference_photon_lon']['coordinates'] = \ - "segment_id delta_time reference_photon_lat" - # segment ID - IS2_atl03_tide[gtx]['geolocation']['segment_id'] = segment_id - IS2_atl03_fill[gtx]['geolocation']['segment_id'] = None - IS2_atl03_dims[gtx]['geolocation']['segment_id'] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id'] = {} - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['units'] = "1" - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['contentType'] = "referenceInformation" - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['long_name'] = "Along-track segment ID number" - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['description'] = ("A 7 digit number " - "identifying the along-track geolocation segment number. These are sequential, starting with " - "1 for the first segment after an ascending equatorial crossing node. Equal to the segment_id for " - "the second of the two 20m ATL03 segments included in the 40m ATL03 segment") - IS2_atl03_tide_attrs[gtx]['geolocation']['segment_id']['coordinates'] = \ - "delta_time reference_photon_lat reference_photon_lon" - - # computed tide - IS2_atl03_tide[gtx]['geophys_corr'][model.atl03] = tide - IS2_atl03_fill[gtx]['geophys_corr'][model.atl03] = tide.fill_value - IS2_atl03_dims[gtx]['geophys_corr'][model.atl03] = ['delta_time'] - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03] = {} - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['units'] = "meters" - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['contentType'] = "referenceInformation" - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['long_name'] = model.long_name - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['description'] = model.description - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['source'] = model.name - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['reference'] = model.reference - IS2_atl03_tide_attrs[gtx]['geophys_corr'][model.atl03]['coordinates'] = \ - ("../geolocation/segment_id ../geolocation/delta_time " - "../geolocation/reference_photon_lat ../geolocation/reference_photon_lon") - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL03_tide_write(IS2_atl03_tide, IS2_atl03_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl03_fill, DIMENSIONS=IS2_atl03_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL03_tide_write(IS2_atl03_tide, IS2_atl03_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl03_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl03_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl03_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - h5[gtx] = {} - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl03_attrs[gtx][att_name] - # create geolocation and geophys_corr groups - for key in ['geolocation','geophys_corr']: - fileID[gtx].create_group(key) - h5[gtx][key] = {} - for att_name in ['Description','data_rate']: - att_val = IS2_atl03_attrs[gtx][key][att_name] - fileID[gtx][key].attrs[att_name] = att_val - - # all variables for group - groupkeys = set(IS2_atl03_tide[gtx][key].keys())-set(['delta_time']) - for k in ['delta_time',*sorted(groupkeys)]: - # values and attributes - v = IS2_atl03_tide[gtx][key][k] - attrs = IS2_atl03_attrs[gtx][key][k] - fillvalue = FILL_VALUE[gtx][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,key,k) - if fillvalue: - h5[gtx][key][k] = fileID.create_dataset(val, np.shape(v), - data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx][key][k] = fileID.create_dataset(val, np.shape(v), - data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx][key][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx][key][k]): - h5[gtx][key][k].dims[i].attach_scale(h5[gtx][key][dim]) - else: - # make dimension - h5[gtx][key][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L2A Global Geolocated Photon Data' - fileID.attrs['summary'] = ('The purpose of ATL03 is to provide along-track ' - 'photon data for all 6 ATLAS beams and associated statistics') - fileID.attrs['description'] = ('Photon heights determined by ATBD ' - 'Algorithm using POD and PPD. All photon events per transmit pulse ' - 'per beam. Includes POD and PPD vectors. Classification of each ' - 'photon by several ATBD Algorithms.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL03 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl03_tide[gtx]['geolocation']['reference_photon_lon'] - lat = IS2_atl03_tide[gtx]['geolocation']['reference_photon_lat'] - delta_time = IS2_atl03_tide[gtx]['geolocation']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl03_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat-2 ATL03 - geolocated photon height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat-2 geolocated photon height files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL03 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # apply flexure scaling factors to height constituents - parser.add_argument('--apply-flexure', - default=False, action='store_true', - help='Apply ice flexure scaling factor to height constituents') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL03 file - for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, - TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, - METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, - CUTOFF=args.cutoff, - APPLY_FLEXURE=args.apply_flexure, - VERBOSE=args.verbose, - MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat2_ATL06.py b/scripts/compute_tides_ICESat2_ATL06.py deleted file mode 100644 index ce076d69..00000000 --- a/scripts/compute_tides_ICESat2_ATL06.py +++ /dev/null @@ -1,641 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat2_ATL06.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat-2 land ice elevation data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - --apply-flexure: Apply ice flexure scaling factor to height constituents - Only valid for models containing flexure fields - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL06.py: reads ICESat-2 land ice along-track height data files - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - added command line option to apply flexure for applicable models - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named ATL06 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - added valid data extrapolation with nearest_extrap - merged time conversion routines into module - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 07/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 03/2020: use read_ICESat2_ATL06.py from read-ICESat-2 repository - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Updated 11/2019: calculate minor constituents as separate variable - compute tide values at all segments and then mask to valid - added AOTIM-5-2018 tide model (2018 update to 2004 model) - Updated 10/2019: external read functions. adjust regex for processed files - changing Y/N flags to True/False - Updated 09/2019: using date functions paralleling public repository - add option for TPXO9-atlas. add OTIS netcdf tide option - Updated 05/2019: check if beam exists in a try except else clause - Updated 04/2019: check if subsetted beam contains land ice data - Written 04/2019 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL06 import read_HDF5_ATL06 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 land ice data (ATL06) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, - TIDE_MODEL=None, - ATLAS_FORMAT=None, - GZIP=True, - DEFINITION_FILE=None, - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=None, - APPLY_FLEXURE=False, - VERBOSE=False, - MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl06_mds,IS2_atl06_attrs,IS2_atl06_beams = read_HDF5_ATL06(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # flexure flag if being applied - flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' - # extract parameters from ICESat-2 ATLAS HDF5 file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' - r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,flexure_flag,fileExtension) - OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) - else: - # output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,flexure_flag,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_{1}{2}_TIDES_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl06_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl06_tide = {} - IS2_atl06_fill = {} - IS2_atl06_dims = {} - IS2_atl06_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl06_tide['ancillary_data'] = {} - IS2_atl06_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl06_tide['ancillary_data'][key] = IS2_atl06_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl06_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl06_attrs['ancillary_data'][key].items(): - IS2_atl06_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl06_beams): - # output data dictionaries for beam - IS2_atl06_tide[gtx] = dict(land_ice_segments={}) - IS2_atl06_fill[gtx] = dict(land_ice_segments={}) - IS2_atl06_dims[gtx] = dict(land_ice_segments={}) - IS2_atl06_tide_attrs[gtx] = dict(land_ice_segments={}) - - # number of segments - val = IS2_atl06_mds[gtx]['land_ice_segments'] - n_seg = len(val['segment_id']) - # find valid segments for beam - fv = IS2_atl06_attrs[gtx]['land_ice_segments']['h_li']['_FillValue'] - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - grid=model.format, apply_flexure=APPLY_FLEXURE) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, type=model.type, version=model.version, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # predict tidal elevations at time and infer minor corrections - tide = np.ma.empty((n_seg),fill_value=fv) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(tide_time, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(tide_time, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace masked and nan values with fill value - invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) - tide.data[invalid] = tide.fill_value - tide.mask[invalid] = True - - # group attributes for beam - IS2_atl06_tide_attrs[gtx]['Description'] = IS2_atl06_attrs[gtx]['Description'] - IS2_atl06_tide_attrs[gtx]['atlas_pce'] = IS2_atl06_attrs[gtx]['atlas_pce'] - IS2_atl06_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl06_attrs[gtx]['atlas_beam_type'] - IS2_atl06_tide_attrs[gtx]['groundtrack_id'] = IS2_atl06_attrs[gtx]['groundtrack_id'] - IS2_atl06_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl06_attrs[gtx]['atmosphere_profile'] - IS2_atl06_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl06_attrs[gtx]['atlas_spot_number'] - IS2_atl06_tide_attrs[gtx]['sc_orientation'] = IS2_atl06_attrs[gtx]['sc_orientation'] - # group attributes for land_ice_segments - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['Description'] = ("The land_ice_segments group " - "contains the primary set of derived products. This includes geolocation, height, and " - "standard error and quality measures for each segment. This group is sparse, meaning " - "that parameters are provided only for pairs of segments for which at least one beam " - "has a valid surface-height measurement.") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['data_rate'] = ("Data within this group are " - "sparse. Data values are provided only for those ICESat-2 20m segments where at " - "least one beam has a valid land ice height measurement.") - - # geolocation, time and segment ID - # delta time - IS2_atl06_tide[gtx]['land_ice_segments']['delta_time'] = val['delta_time'].copy() - IS2_atl06_fill[gtx]['land_ice_segments']['delta_time'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['delta_time'] = None - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['standard_name'] = "time" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['calendar'] = "standard" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['description'] = ("Number of GPS " - "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset " - "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds " - "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By " - "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the " - "time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['delta_time']['coordinates'] = \ - "segment_id latitude longitude" - # latitude - IS2_atl06_tide[gtx]['land_ice_segments']['latitude'] = val['latitude'].copy() - IS2_atl06_fill[gtx]['land_ice_segments']['latitude'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['latitude'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['units'] = "degrees_north" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['contentType'] = "physicalMeasurement" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['long_name'] = "Latitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['standard_name'] = "latitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['valid_min'] = -90.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['valid_max'] = 90.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['latitude']['coordinates'] = \ - "segment_id delta_time longitude" - # longitude - IS2_atl06_tide[gtx]['land_ice_segments']['longitude'] = val['longitude'].copy() - IS2_atl06_fill[gtx]['land_ice_segments']['longitude'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['longitude'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['units'] = "degrees_east" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['contentType'] = "physicalMeasurement" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['long_name'] = "Longitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['standard_name'] = "longitude" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['valid_min'] = -180.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['valid_max'] = 180.0 - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['longitude']['coordinates'] = \ - "segment_id delta_time latitude" - # segment ID - IS2_atl06_tide[gtx]['land_ice_segments']['segment_id'] = val['segment_id'] - IS2_atl06_fill[gtx]['land_ice_segments']['segment_id'] = None - IS2_atl06_dims[gtx]['land_ice_segments']['segment_id'] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['units'] = "1" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['contentType'] = "referenceInformation" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['long_name'] = "Along-track segment ID number" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['description'] = ("A 7 digit number " - "identifying the along-track geolocation segment number. These are sequential, starting with " - "1 for the first segment after an ascending equatorial crossing node. Equal to the segment_id for " - "the second of the two 20m ATL03 segments included in the 40m ATL06 segment") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['segment_id']['coordinates'] = \ - "delta_time latitude longitude" - - # geophysical variables - IS2_atl06_tide[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_fill[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_dims[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['Description'] = ("The geophysical group " - "contains parameters used to correct segment heights for geophysical effects, parameters " - "related to solar background and parameters indicative of the presence or absence of clouds.") - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical']['data_rate'] = ("Data within this group " - "are stored at the land_ice_segments segment rate.") - # computed tide - IS2_atl06_tide[gtx]['land_ice_segments']['geophysical'][model.atl06] = tide - IS2_atl06_fill[gtx]['land_ice_segments']['geophysical'][model.atl06] = tide.fill_value - IS2_atl06_dims[gtx]['land_ice_segments']['geophysical'][model.atl06] = ['delta_time'] - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06] = {} - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['units'] = "meters" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['contentType'] = "referenceInformation" - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['long_name'] = model.long_name - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['description'] = model.description - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['source'] = model.name - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['reference'] = model.reference - IS2_atl06_tide_attrs[gtx]['land_ice_segments']['geophysical'][model.atl06]['coordinates'] = \ - "../segment_id ../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL06_tide_write(IS2_atl06_tide, IS2_atl06_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl06_fill, DIMENSIONS=IS2_atl06_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL06_tide_write(IS2_atl06_tide, IS2_atl06_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl06_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl06_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl06_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl06_attrs[gtx][att_name] - # create land_ice_segments group - fileID[gtx].create_group('land_ice_segments') - h5[gtx] = dict(land_ice_segments={}) - for att_name in ['Description','data_rate']: - att_val = IS2_atl06_attrs[gtx]['land_ice_segments'][att_name] - fileID[gtx]['land_ice_segments'].attrs[att_name] = att_val - - # delta_time, geolocation and segment_id variables - for k in ['delta_time','latitude','longitude','segment_id']: - # values and attributes - v = IS2_atl06_tide[gtx]['land_ice_segments'][k] - attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][k] - fillvalue = FILL_VALUE[gtx]['land_ice_segments'][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,'land_ice_segments',k) - if fillvalue: - h5[gtx]['land_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx]['land_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx]['land_ice_segments'][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['land_ice_segments'][k]): - h5[gtx]['land_ice_segments'][k].dims[i].attach_scale( - h5[gtx]['land_ice_segments'][dim]) - else: - # make dimension - h5[gtx]['land_ice_segments'][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['land_ice_segments'][k].attrs[att_name] = att_val - - # add to geophysical corrections - key = 'geophysical' - fileID[gtx]['land_ice_segments'].create_group(key) - h5[gtx]['land_ice_segments'][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl06_attrs[gtx]['land_ice_segments'][key][att_name] - fileID[gtx]['land_ice_segments'][key].attrs[att_name] = att_val - for k,v in IS2_atl06_tide[gtx]['land_ice_segments'][key].items(): - # attributes - attrs = IS2_atl06_attrs[gtx]['land_ice_segments'][key][k] - fillvalue = FILL_VALUE[gtx]['land_ice_segments'][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,'land_ice_segments',key,k) - if fillvalue: - h5[gtx]['land_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx]['land_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['land_ice_segments'][key][k]): - h5[gtx]['land_ice_segments'][key][k].dims[i].attach_scale( - h5[gtx]['land_ice_segments'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['land_ice_segments'][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Land Ice Height' - fileID.attrs['summary'] = ('Estimates of the ice-sheet tidal parameters ' - 'needed to interpret and assess the quality of the height estimates.') - fileID.attrs['description'] = ('Land ice parameters for each beam. All ' - 'parameters are calculated for the same along-track increments for ' - 'each beam and repeat.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL06 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl06_tide[gtx]['land_ice_segments']['longitude'] - lat = IS2_atl06_tide[gtx]['land_ice_segments']['latitude'] - delta_time = IS2_atl06_tide[gtx]['land_ice_segments']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl06_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat-2 ATL06 - land ice elevation data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat-2 land ice height files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL06 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # apply flexure scaling factors to height constituents - parser.add_argument('--apply-flexure', - default=False, action='store_true', - help='Apply ice flexure scaling factor to height constituents') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL06 file - for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, - TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, - METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, - CUTOFF=args.cutoff, - APPLY_FLEXURE=args.apply_flexure, - VERBOSE=args.verbose, - MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat2_ATL07.py b/scripts/compute_tides_ICESat2_ATL07.py deleted file mode 100644 index 667a25ee..00000000 --- a/scripts/compute_tides_ICESat2_ATL07.py +++ /dev/null @@ -1,649 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat2_ATL07.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat-2 sea ice height data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL07.py: reads ICESat-2 sea ice height data files - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named ATL07 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - added valid data extrapolation with nearest_extrap - merged time conversion routines into module - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 07/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 03/2020: use read_ICESat2_ATL07.py from read-ICESat-2 repository - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Updated 11/2019: added AOTIM-5-2018 tide model (2018 update to 2004 model) - Forked 11/2019 from compute_tides_ICESat2_ATL06.py - Updated 10/2019: external read functions. adjust regex for processed files - changing Y/N flags to True/False - Updated 09/2019: using date functions paralleling public repository - add option for TPXO9-atlas. add OTIS netcdf tide option - Updated 05/2019: check if beam exists in a try except else clause - Updated 04/2019: check if subsetted beam contains sea ice data - Written 04/2019 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL07 import read_HDF5_ATL07 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 sea ice height (ATL07) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl07_mds,IS2_atl07_attrs,IS2_atl07_beams = read_HDF5_ATL07(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 sea ice file name - rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})' - r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) - else: - # output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,HEM,model.name,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX) - ff = '{0}-{1}_{2}_TIDES_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5' - OUTPUT_FILE = ff.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl07_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl07_tide = {} - IS2_atl07_fill = {} - IS2_atl07_dims = {} - IS2_atl07_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl07_tide['ancillary_data'] = {} - IS2_atl07_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl07_tide['ancillary_data'][key] = IS2_atl07_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl07_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl07_attrs['ancillary_data'][key].items(): - IS2_atl07_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl07_beams): - # output data dictionaries for beam - IS2_atl07_tide[gtx] = dict(sea_ice_segments={}) - IS2_atl07_fill[gtx] = dict(sea_ice_segments={}) - IS2_atl07_dims[gtx] = dict(sea_ice_segments={}) - IS2_atl07_tide_attrs[gtx] = dict(sea_ice_segments={}) - - # number of segments - val = IS2_atl07_mds[gtx]['sea_ice_segments'] - n_seg = len(val['height_segment_id']) - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - scale=model.scale, cutoff=CUTOFF, compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, type=model.type, version=model.version, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # predict tidal elevations at time and infer minor corrections - tide = np.ma.empty((n_seg)) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(tide_time, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(tide_time, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace masked and nan values with fill value - invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) - tide.data[invalid] = tide.fill_value - tide.mask[invalid] = True - - # group attributes for beam - IS2_atl07_tide_attrs[gtx]['Description'] = IS2_atl07_attrs[gtx]['Description'] - IS2_atl07_tide_attrs[gtx]['atlas_pce'] = IS2_atl07_attrs[gtx]['atlas_pce'] - IS2_atl07_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl07_attrs[gtx]['atlas_beam_type'] - IS2_atl07_tide_attrs[gtx]['groundtrack_id'] = IS2_atl07_attrs[gtx]['groundtrack_id'] - IS2_atl07_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl07_attrs[gtx]['atmosphere_profile'] - IS2_atl07_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl07_attrs[gtx]['atlas_spot_number'] - IS2_atl07_tide_attrs[gtx]['sc_orientation'] = IS2_atl07_attrs[gtx]['sc_orientation'] - # group attributes for sea_ice_segments - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['Description'] = ("Top group for sea " - "ice segments as computed by the ATBD algorithm.") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['data_rate'] = ("Data within this " - "group are stored at the variable segment rate.") - - # geolocation, time and segment ID - # delta time - IS2_atl07_tide[gtx]['sea_ice_segments']['delta_time'] = val['delta_time'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['delta_time'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['delta_time'] = None - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['standard_name'] = "time" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['source'] = "telemetry" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['calendar'] = "standard" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['description'] = ("Number of " - "GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch " - "offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS " - "seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP " - "epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time " - "parameters, the time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['delta_time']['coordinates'] = \ - "height_segment_id latitude longitude" - # latitude - IS2_atl07_tide[gtx]['sea_ice_segments']['latitude'] = val['latitude'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['latitude'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['latitude'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['units'] = "degrees_north" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['contentType'] = "physicalMeasurement" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['long_name'] = "Latitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['standard_name'] = "latitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['valid_min'] = -90.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['valid_max'] = 90.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['latitude']['coordinates'] = \ - "height_segment_id delta_time longitude" - # longitude - IS2_atl07_tide[gtx]['sea_ice_segments']['longitude'] = val['longitude'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['longitude'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['longitude'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['units'] = "degrees_east" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['contentType'] = "physicalMeasurement" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['long_name'] = "Longitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['standard_name'] = "longitude" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['valid_min'] = -180.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['valid_max'] = 180.0 - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['longitude']['coordinates'] = \ - "height_segment_id delta_time latitude" - # segment ID - IS2_atl07_tide[gtx]['sea_ice_segments']['height_segment_id'] = val['height_segment_id'] - IS2_atl07_fill[gtx]['sea_ice_segments']['height_segment_id'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['height_segment_id'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['units'] = "1" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['long_name'] = \ - "Identifier of each height segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['description'] = \ - "Identifier of each height segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['height_segment_id']['coordinates'] = \ - "delta_time latitude longitude" - # geolocation segment beginning - IS2_atl07_tide[gtx]['sea_ice_segments']['geoseg_beg'] = val['geoseg_beg'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_beg'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_beg'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['units'] = "1" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['long_name'] = "Beginning GEOSEG" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['description'] = \ - "Geolocation segment (geoseg) ID associated with the first photon used in this sea ice segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_beg']['coordinates'] = \ - "height_segment_id delta_time latitude longitude" - # geolocation segment ending - IS2_atl07_tide[gtx]['sea_ice_segments']['geoseg_end'] = val['geoseg_end'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_end'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_end'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['units'] = "1" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['long_name'] = "Ending GEOSEG" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['description'] = \ - "Geolocation segment (geoseg) ID associated with the last photon used in this sea ice segment" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geoseg_end']['coordinates'] = \ - "height_segment_id delta_time latitude longitude" - # along track distance - IS2_atl07_tide[gtx]['sea_ice_segments']['seg_dist_x'] = val['seg_dist_x'].copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['seg_dist_x'] = None - IS2_atl07_dims[gtx]['sea_ice_segments']['seg_dist_x'] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['units'] = "meters" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['long_name'] = "Along track distance" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['description'] = \ - "Along-track distance from the equator crossing to the segment center." - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['seg_dist_x']['coordinates'] = \ - "height_segment_id delta_time latitude longitude" - - # geophysical variables - IS2_atl07_tide[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['Description'] = ("Contains geophysical " - "parameters and corrections used to correct photon heights for geophysical effects, such as tides.") - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical']['data_rate'] = ("Data within this group " - "are stored at the sea_ice_height segment rate.") - - # computed tide - IS2_atl07_tide[gtx]['sea_ice_segments']['geophysical'][model.atl07] = tide.copy() - IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'][model.atl07] = tide.fill_value - IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'][model.atl07] = ['delta_time'] - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07] = {} - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['units'] = "meters" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['contentType'] = "referenceInformation" - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['long_name'] = model.long_name - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['description'] = model.description - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['source'] = model.name - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['reference'] = model.reference - IS2_atl07_tide_attrs[gtx]['sea_ice_segments']['geophysical'][model.atl07]['coordinates'] = \ - "../height_segment_id ../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL07_tide_write(IS2_atl07_tide, IS2_atl07_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl07_fill, DIMENSIONS=IS2_atl07_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL07_tide_write(IS2_atl07_tide, IS2_atl07_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl07_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl07_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl07_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl07_attrs[gtx][att_name] - # create sea_ice_segments group - fileID[gtx].create_group('sea_ice_segments') - h5[gtx] = dict(sea_ice_segments={}) - for att_name in ['Description','data_rate']: - att_val = IS2_atl07_attrs[gtx]['sea_ice_segments'][att_name] - fileID[gtx]['sea_ice_segments'].attrs[att_name] = att_val - - # delta_time, geolocation and segment identification variables - for k in ['delta_time','latitude','longitude','height_segment_id', - 'geoseg_beg','geoseg_end','seg_dist_x']: - # values and attributes - v = IS2_atl07_tide[gtx]['sea_ice_segments'][k] - attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][k] - fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,'sea_ice_segments',k) - if fillvalue: - h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx]['sea_ice_segments'][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][k]): - h5[gtx]['sea_ice_segments'][k].dims[i].attach_scale( - h5[gtx]['sea_ice_segments'][dim]) - else: - # make dimension - h5[gtx]['sea_ice_segments'][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['sea_ice_segments'][k].attrs[att_name] = att_val - - # add to geophysical corrections - key = 'geophysical' - fileID[gtx]['sea_ice_segments'].create_group(key) - h5[gtx]['sea_ice_segments'][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl07_attrs[gtx]['sea_ice_segments'][key][att_name] - fileID[gtx]['sea_ice_segments'][key].attrs[att_name] = att_val - for k,v in IS2_atl07_tide[gtx]['sea_ice_segments'][key].items(): - # attributes - attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][key][k] - fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,'sea_ice_segments',key,k) - if fillvalue: - h5[gtx]['sea_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx]['sea_ice_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][key][k]): - h5[gtx]['sea_ice_segments'][key][k].dims[i].attach_scale( - h5[gtx]['sea_ice_segments'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['sea_ice_segments'][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Height' - fileID.attrs['summary'] = ('Estimates of the sea ice tidal parameters ' - 'needed to interpret and assess the quality of the height estimates.') - fileID.attrs['description'] = ('The data set (ATL07) contains along-track ' - 'heights for sea ice and open water leads (at varying length scales) ' - 'relative to the WGS84 ellipsoid (ITRF2014 reference frame) after ' - 'adjustment for geoidal and tidal variations, and inverted barometer ' - 'effects.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL07 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl07_tide[gtx]['sea_ice_segments']['longitude'] - lat = IS2_atl07_tide[gtx]['sea_ice_segments']['latitude'] - delta_time = IS2_atl07_tide[gtx]['sea_ice_segments']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl07_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat-2 ATL07 - sea ice height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat-2 sea ice height files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL07 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL07 file - for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat2_ATL10.py b/scripts/compute_tides_ICESat2_ATL10.py deleted file mode 100644 index 2a659dee..00000000 --- a/scripts/compute_tides_ICESat2_ATL10.py +++ /dev/null @@ -1,612 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat2_ATL10.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat-2 sea ice height data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL10.py: reads ICESat-2 sea ice freeboard data files - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Forked 12/2021 from compute_tides_ICESat2_ATL07.py - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 10/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named ATL10 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - added valid data extrapolation with nearest_extrap - merged time conversion routines into module - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 10/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 03/2020: use read_ICESat2_ATL10.py from read-ICESat-2 repository - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Updated 11/2019: added AOTIM-5-2018 tide model (2018 update to 2004 model) - Forked 11/2019 from compute_tides_ICESat2_atl06.py - Updated 10/2019: external read functions. adjust regex for processed files - changing Y/N flags to True/False - Updated 09/2019: using date functions paralleling public repository - add option for TPXO9-atlas. add OTIS netcdf tide option - Updated 05/2019: check if beam exists in a try except else clause - Updated 04/2019: check if subsetted beam contains sea ice data - Written 04/2019 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL10 import read_HDF5_ATL10 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 sea ice freeboard (ATL10) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl10_mds,IS2_atl10_attrs,IS2_atl10_beams = read_HDF5_ATL10(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 sea ice file name - rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})' - r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,HEM,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) - else: - # output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,HEM,model.name,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX) - ff = '{0}-{1}_{2}_TIDES_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5' - OUTPUT_FILE = ff.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl10_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl10_tide = {} - IS2_atl10_fill = {} - IS2_atl10_dims = {} - IS2_atl10_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl10_tide['ancillary_data'] = {} - IS2_atl10_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl10_tide['ancillary_data'][key] = IS2_atl10_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl10_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl10_attrs['ancillary_data'][key].items(): - IS2_atl10_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl10_beams): - # output data dictionaries for beam - IS2_atl10_tide[gtx] = dict(freeboard_beam_segment={},leads={}) - IS2_atl10_fill[gtx] = dict(freeboard_beam_segment={},leads={}) - IS2_atl10_dims[gtx] = dict(freeboard_beam_segment={},leads={}) - IS2_atl10_tide_attrs[gtx] = dict(freeboard_beam_segment={},leads={}) - - # group attributes for beam - IS2_atl10_tide_attrs[gtx]['Description'] = IS2_atl10_attrs[gtx]['Description'] - IS2_atl10_tide_attrs[gtx]['atlas_pce'] = IS2_atl10_attrs[gtx]['atlas_pce'] - IS2_atl10_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl10_attrs[gtx]['atlas_beam_type'] - IS2_atl10_tide_attrs[gtx]['groundtrack_id'] = IS2_atl10_attrs[gtx]['groundtrack_id'] - IS2_atl10_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl10_attrs[gtx]['atmosphere_profile'] - IS2_atl10_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl10_attrs[gtx]['atlas_spot_number'] - IS2_atl10_tide_attrs[gtx]['sc_orientation'] = IS2_atl10_attrs[gtx]['sc_orientation'] - - # group attributes for freeboard_beam_segment - IS2_atl10_tide_attrs[gtx]['freeboard_beam_segment']['Description'] = ("Contains freeboard " - "estimate and associated height segment parameters for only the sea ice segments by beam.") - IS2_atl10_tide_attrs[gtx]['freeboard_beam_segment']['data_rate'] = ("Data within this " - "group are stored at the freeboard swath segment rate.") - # group attributes for leads - IS2_atl10_tide_attrs[gtx]['leads']['Description'] = ("Contains parameters relating " - "to the freeboard values.") - IS2_atl10_tide_attrs[gtx]['leads']['data_rate'] = ("Data within this " - "group are stored at the lead index rate.") - - # for each ATL10 group - for group in ['freeboard_beam_segment','leads']: - # number of segments - val = IS2_atl10_mds[gtx][group] - n_seg = len(val['delta_time']) - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - scale=model.scale, cutoff=CUTOFF, compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, type=model.type, version=model.version, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # predict tidal elevations at time and infer minor corrections - tide = np.ma.empty((n_seg)) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(tide_time, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(tide_time, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace masked and nan values with fill value - invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) - tide.data[invalid] = tide.fill_value - tide.mask[invalid] = True - - # geolocation, time and segment ID - # delta time - IS2_atl10_tide[gtx][group]['delta_time'] = val['delta_time'].copy() - IS2_atl10_fill[gtx][group]['delta_time'] = None - IS2_atl10_dims[gtx][group]['delta_time'] = None - IS2_atl10_tide_attrs[gtx][group]['delta_time'] = {} - IS2_atl10_tide_attrs[gtx][group]['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['standard_name'] = "time" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['source'] = "telemetry" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['calendar'] = "standard" - IS2_atl10_tide_attrs[gtx][group]['delta_time']['description'] = ("Number of " - "GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch " - "offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS " - "seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP " - "epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time " - "parameters, the time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl10_tide_attrs[gtx][group]['delta_time']['coordinates'] = \ - "latitude longitude" - # latitude - IS2_atl10_tide[gtx][group]['latitude'] = val['latitude'].copy() - IS2_atl10_fill[gtx][group]['latitude'] = None - IS2_atl10_dims[gtx][group]['latitude'] = ['delta_time'] - IS2_atl10_tide_attrs[gtx][group]['latitude'] = {} - IS2_atl10_tide_attrs[gtx][group]['latitude']['units'] = "degrees_north" - IS2_atl10_tide_attrs[gtx][group]['latitude']['contentType'] = "physicalMeasurement" - IS2_atl10_tide_attrs[gtx][group]['latitude']['long_name'] = "Latitude" - IS2_atl10_tide_attrs[gtx][group]['latitude']['standard_name'] = "latitude" - IS2_atl10_tide_attrs[gtx][group]['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl10_tide_attrs[gtx][group]['latitude']['valid_min'] = -90.0 - IS2_atl10_tide_attrs[gtx][group]['latitude']['valid_max'] = 90.0 - IS2_atl10_tide_attrs[gtx][group]['latitude']['coordinates'] = \ - "delta_time longitude" - # longitude - IS2_atl10_tide[gtx][group]['longitude'] = val['longitude'].copy() - IS2_atl10_fill[gtx][group]['longitude'] = None - IS2_atl10_dims[gtx][group]['longitude'] = ['delta_time'] - IS2_atl10_tide_attrs[gtx][group]['longitude'] = {} - IS2_atl10_tide_attrs[gtx][group]['longitude']['units'] = "degrees_east" - IS2_atl10_tide_attrs[gtx][group]['longitude']['contentType'] = "physicalMeasurement" - IS2_atl10_tide_attrs[gtx][group]['longitude']['long_name'] = "Longitude" - IS2_atl10_tide_attrs[gtx][group]['longitude']['standard_name'] = "longitude" - IS2_atl10_tide_attrs[gtx][group]['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl10_tide_attrs[gtx][group]['longitude']['valid_min'] = -180.0 - IS2_atl10_tide_attrs[gtx][group]['longitude']['valid_max'] = 180.0 - IS2_atl10_tide_attrs[gtx][group]['longitude']['coordinates'] = \ - "delta_time latitude" - - # geophysical variables - IS2_atl10_tide[gtx][group]['geophysical'] = {} - IS2_atl10_fill[gtx][group]['geophysical'] = {} - IS2_atl10_dims[gtx][group]['geophysical'] = {} - IS2_atl10_tide_attrs[gtx][group]['geophysical'] = {} - IS2_atl10_tide_attrs[gtx][group]['geophysical']['Description'] = ("Contains geophysical " - "parameters and corrections used to correct photon heights for geophysical effects, " - "such as tides.") - IS2_atl10_tide_attrs[gtx][group]['geophysical']['data_rate'] = ("Data within this group " - "are stored at the variable segment rate.") - - # computed tide - IS2_atl10_tide[gtx][group]['geophysical'][model.atl10] = tide.copy() - IS2_atl10_fill[gtx][group]['geophysical'][model.atl10] = tide.fill_value - IS2_atl10_dims[gtx][group]['geophysical'][model.atl10] = ['delta_time'] - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10] = {} - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['units'] = "meters" - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['contentType'] = \ - "referenceInformation" - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['long_name'] = model.long_name - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['description'] = model.description - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['source'] = model.name - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['reference'] = model.reference - IS2_atl10_tide_attrs[gtx][group]['geophysical'][model.atl10]['coordinates'] = \ - "../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL10_tide_write(IS2_atl10_tide, IS2_atl10_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl10_fill, DIMENSIONS=IS2_atl10_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL10_tide_write(IS2_atl10_tide, IS2_atl10_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl10_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl10_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl10_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl10_attrs[gtx][att_name] - # create freeboard_beam_segment and leads groups - h5[gtx] = dict(freeboard_beam_segment={},leads={}) - for group in ['freeboard_beam_segment','leads']: - fileID[gtx].create_group(group) - for att_name in ['Description','data_rate']: - att_val = IS2_atl10_attrs[gtx][group][att_name] - fileID[gtx][group].attrs[att_name] = att_val - - # delta_time and geolocation variables - for k in ['delta_time','latitude','longitude']: - # values and attributes - v = IS2_atl10_tide[gtx][group][k] - attrs = IS2_atl10_attrs[gtx][group][k] - fillvalue = FILL_VALUE[gtx][group][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,group,k) - if fillvalue: - h5[gtx][group][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx][group][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx][group][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx][group][k]): - h5[gtx][group][k].dims[i].attach_scale( - h5[gtx][group][dim]) - else: - # make dimension - h5[gtx][group][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx][group][k].attrs[att_name] = att_val - - # add to geophysical corrections - key = 'geophysical' - fileID[gtx][group].create_group(key) - h5[gtx][group][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl10_attrs[gtx][group][key][att_name] - fileID[gtx][group][key].attrs[att_name] = att_val - for k,v in IS2_atl10_tide[gtx][group][key].items(): - # attributes - attrs = IS2_atl10_attrs[gtx][group][key][k] - fillvalue = FILL_VALUE[gtx][group][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,group,key,k) - if fillvalue: - h5[gtx][group][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx][group][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx][group][key][k]): - h5[gtx][group][key][k].dims[i].attach_scale( - h5[gtx][group][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx][group][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Freeboard' - fileID.attrs['summary'] = ('Estimates of the sea ice tidal parameters ' - 'needed to interpret and assess the quality of the freeboard estimates.') - fileID.attrs['description'] = ('The data set (ATL10) contains estimates ' - 'of sea ice freeboard, calculated using three different approaches. ' - 'Sea ice leads used to establish the reference sea surface and ' - 'descriptive statistics used in the height estimates are also provided') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL10 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - # for each ATL10 group - for group in ['freeboard_beam_segment','leads']: - lon = IS2_atl10_tide[gtx][group]['longitude'] - lat = IS2_atl10_tide[gtx][group]['latitude'] - delta_time = IS2_atl10_tide[gtx][group]['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl10_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat-2 ATL10 - sea ice freeboard data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat-2 sea ice height files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL10 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL10 file - for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat2_ATL11.py b/scripts/compute_tides_ICESat2_ATL11.py deleted file mode 100644 index 39ca3cf8..00000000 --- a/scripts/compute_tides_ICESat2_ATL11.py +++ /dev/null @@ -1,802 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat2_ATL11.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat-2 annual land ice height data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - --apply-flexure: Apply ice flexure scaling factor to height constituents - Only valid for models containing flexure fields - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL11.py: reads ICESat-2 annual land ice height data files - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - added command line option to apply flexure for applicable models - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named ATL11 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 02/2021: additionally calculate tides for crossing track data - Updated 01/2021: using standalone ATL11 reader - Updated 12/2020: merged time conversion routines into module - Written 12/2020 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import collections -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL11 import read_HDF5_ATL11 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 annual land ice height data (ATL11) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, - TIDE_MODEL=None, - ATLAS_FORMAT=None, - GZIP=True, - DEFINITION_FILE=None, - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=None, - APPLY_FLEXURE=False, - VERBOSE=False, - MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl11_mds,IS2_atl11_attrs,IS2_atl11_pairs = read_HDF5_ATL11(INPUT_FILE, - ATTRIBUTES=True, CROSSOVERS=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # flexure flag if being applied - flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' - # extract parameters from ICESat-2 ATLAS HDF5 file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})_(\d{2})(\d{2})_' - r'(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,TRK,GRAN,SCYC,ECYC,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,flexure_flag,fileExtension) - OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) - else: - # output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,flexure_flag,TRK,GRAN,SCYC,ECYC,RL,VERS,AUX) - file_format = '{0}_{1}{2}_TIDES_{3}{4}_{5}{6}_{7}_{8}{9}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl11_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl11_tide = {} - IS2_atl11_fill = {} - IS2_atl11_dims = {} - IS2_atl11_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl11_tide['ancillary_data'] = {} - IS2_atl11_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl11_tide['ancillary_data'][key] = IS2_atl11_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl11_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl11_attrs['ancillary_data'][key].items(): - IS2_atl11_tide_attrs['ancillary_data'][key][att_name] = att_val - # HDF5 group name for across-track data - XT = 'crossing_track_data' - - # for each input beam pair within the file - for ptx in sorted(IS2_atl11_pairs): - # output data dictionaries for beam - IS2_atl11_tide[ptx] = dict(cycle_stats=collections.OrderedDict(), - crossing_track_data=collections.OrderedDict()) - IS2_atl11_fill[ptx] = dict(cycle_stats={},crossing_track_data={}) - IS2_atl11_dims[ptx] = dict(cycle_stats={},crossing_track_data={}) - IS2_atl11_tide_attrs[ptx] = dict(cycle_stats={},crossing_track_data={}) - - # extract along-track and across-track variables - ref_pt = {} - latitude = {} - longitude = {} - delta_time = {} - # along-track (AT) reference point, latitude, longitude and time - ref_pt['AT'] = IS2_atl11_mds[ptx]['ref_pt'].copy() - latitude['AT'] = np.ma.array(IS2_atl11_mds[ptx]['latitude'], - fill_value=IS2_atl11_attrs[ptx]['latitude']['_FillValue']) - longitude['AT'] = np.ma.array(IS2_atl11_mds[ptx]['longitude'], - fill_value=IS2_atl11_attrs[ptx]['longitude']['_FillValue']) - delta_time['AT'] = np.ma.array(IS2_atl11_mds[ptx]['delta_time'], - fill_value=IS2_atl11_attrs[ptx]['delta_time']['_FillValue']) - # across-track (XT) reference point, latitude, longitude and time - ref_pt['XT'] = IS2_atl11_mds[ptx][XT]['ref_pt'].copy() - latitude['XT'] = np.ma.array(IS2_atl11_mds[ptx][XT]['latitude'], - fill_value=IS2_atl11_attrs[ptx][XT]['latitude']['_FillValue']) - longitude['XT'] = np.ma.array(IS2_atl11_mds[ptx][XT]['longitude'], - fill_value=IS2_atl11_attrs[ptx][XT]['longitude']['_FillValue']) - delta_time['XT'] = np.ma.array(IS2_atl11_mds[ptx][XT]['delta_time'], - fill_value=IS2_atl11_attrs[ptx][XT]['delta_time']['_FillValue']) - - # number of average segments and number of included cycles - # fill_value for invalid heights and corrections - fv = IS2_atl11_attrs[ptx]['h_corr']['_FillValue'] - # shape of along-track and across-track data - n_points,n_cycles = delta_time['AT'].shape - n_cross, = delta_time['XT'].shape - # allocate for output tidal variables - tide = {} - # along-track (AT) tides - tide['AT'] = np.ma.empty((n_points,n_cycles),fill_value=fv) - tide['AT'].mask = (delta_time['AT'] == delta_time['AT'].fill_value) - # across-track (XT) tides - tide['XT'] = np.ma.empty((n_cross),fill_value=fv) - tide['XT'].mask = (delta_time['XT'] == delta_time['XT'].fill_value) - - # calculate tides for along-track and across-track data - for track in ['AT','XT']: - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + delta_time[track] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), - scale=1.0/86400.0) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(longitude[track], - latitude[track], model.grid_file, model.model_file, - model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - grid=model.format, apply_flexure=APPLY_FLEXURE) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(longitude[track], - latitude[track], model.grid_file, model.model_file, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(longitude[track], - latitude[track], model.model_file, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, - compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(longitude[track], - latitude[track], model.model_file, - type=model.type, version=model.version, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # calculate tides for track type - if (track == 'AT'): - # calculate tides for each cycle if along-track - for cycle in range(n_cycles): - # find valid time and spatial points for cycle - tide[track].mask[:,cycle] |= np.any(hc.mask,axis=1) - valid, = np.nonzero(~tide[track].mask[:,cycle]) - # predict tidal elevations and infer minor corrections - tide[track].data[valid,cycle] = predict_tide_drift( - tide_time[valid,cycle], hc[valid,:], c, - deltat=deltat[valid,cycle], corrections=model.format) - minor = infer_minor_corrections(tide_time[valid,cycle], hc[valid,:], - c, deltat=deltat[valid,cycle], corrections=model.format) - tide[track].data[valid,cycle] += minor.data[:] - elif (track == 'XT'): - # find valid time and spatial points - tide[track].mask[:] |= np.any(hc.mask,axis=1) - valid, = np.nonzero(~tide[track].mask[:]) - # predict tidal elevations and infer minor corrections - tide[track].data[valid] = predict_tide_drift(tide_time[valid], - hc[valid,:], c, deltat=deltat[valid], - corrections=model.format) - minor = infer_minor_corrections(tide_time[valid], hc[valid,:], - c, deltat=deltat[valid], corrections=model.format) - tide[track].data[valid] += minor.data[:] - - # replace masked and nan values with fill value - invalid = np.nonzero(np.isnan(tide[track].data) | tide[track].mask) - tide[track].data[invalid] = tide[track].fill_value - tide[track].mask[invalid] = True - - # group attributes for beam - IS2_atl11_tide_attrs[ptx]['description'] = ('Contains the primary science parameters ' - 'for this data set') - IS2_atl11_tide_attrs[ptx]['beam_pair'] = IS2_atl11_attrs[ptx]['beam_pair'] - IS2_atl11_tide_attrs[ptx]['ReferenceGroundTrack'] = IS2_atl11_attrs[ptx]['ReferenceGroundTrack'] - IS2_atl11_tide_attrs[ptx]['first_cycle'] = IS2_atl11_attrs[ptx]['first_cycle'] - IS2_atl11_tide_attrs[ptx]['last_cycle'] = IS2_atl11_attrs[ptx]['last_cycle'] - IS2_atl11_tide_attrs[ptx]['equatorial_radius'] = IS2_atl11_attrs[ptx]['equatorial_radius'] - IS2_atl11_tide_attrs[ptx]['polar_radius'] = IS2_atl11_attrs[ptx]['polar_radius'] - - # geolocation, time and reference point - # reference point - IS2_atl11_tide[ptx]['ref_pt'] = ref_pt['AT'].copy() - IS2_atl11_fill[ptx]['ref_pt'] = None - IS2_atl11_dims[ptx]['ref_pt'] = None - IS2_atl11_tide_attrs[ptx]['ref_pt'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['ref_pt']['units'] = "1" - IS2_atl11_tide_attrs[ptx]['ref_pt']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx]['ref_pt']['long_name'] = "Reference point number" - IS2_atl11_tide_attrs[ptx]['ref_pt']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['ref_pt']['description'] = ("The reference point is the " - "7 digit segment_id number corresponding to the center of the ATL06 data used " - "for each ATL11 point. These are sequential, starting with 1 for the first " - "segment after an ascending equatorial crossing node.") - IS2_atl11_tide_attrs[ptx]['ref_pt']['coordinates'] = \ - "delta_time latitude longitude" - # cycle_number - IS2_atl11_tide[ptx]['cycle_number'] = IS2_atl11_mds[ptx]['cycle_number'].copy() - IS2_atl11_fill[ptx]['cycle_number'] = None - IS2_atl11_dims[ptx]['cycle_number'] = None - IS2_atl11_tide_attrs[ptx]['cycle_number'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['cycle_number']['units'] = "1" - IS2_atl11_tide_attrs[ptx]['cycle_number']['long_name'] = "Orbital cycle number" - IS2_atl11_tide_attrs[ptx]['cycle_number']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['cycle_number']['description'] = ("Number of 91-day periods " - "that have elapsed since ICESat-2 entered the science orbit. Each of the 1,387 " - "reference ground track (RGTs) is targeted in the polar regions once " - "every 91 days.") - # delta time - IS2_atl11_tide[ptx]['delta_time'] = delta_time['AT'].copy() - IS2_atl11_fill[ptx]['delta_time'] = delta_time['AT'].fill_value - IS2_atl11_dims[ptx]['delta_time'] = ['ref_pt','cycle_number'] - IS2_atl11_tide_attrs[ptx]['delta_time'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl11_tide_attrs[ptx]['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl11_tide_attrs[ptx]['delta_time']['standard_name'] = "time" - IS2_atl11_tide_attrs[ptx]['delta_time']['calendar'] = "standard" - IS2_atl11_tide_attrs[ptx]['delta_time']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['delta_time']['description'] = ("Number of GPS " - "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset " - "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds " - "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By " - "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the " - "time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl11_tide_attrs[ptx]['delta_time']['coordinates'] = \ - "ref_pt cycle_number latitude longitude" - # latitude - IS2_atl11_tide[ptx]['latitude'] = latitude['AT'].copy() - IS2_atl11_fill[ptx]['latitude'] = latitude['AT'].fill_value - IS2_atl11_dims[ptx]['latitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx]['latitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['latitude']['units'] = "degrees_north" - IS2_atl11_tide_attrs[ptx]['latitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx]['latitude']['long_name'] = "Latitude" - IS2_atl11_tide_attrs[ptx]['latitude']['standard_name'] = "latitude" - IS2_atl11_tide_attrs[ptx]['latitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['latitude']['description'] = ("Center latitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx]['latitude']['valid_min'] = -90.0 - IS2_atl11_tide_attrs[ptx]['latitude']['valid_max'] = 90.0 - IS2_atl11_tide_attrs[ptx]['latitude']['coordinates'] = \ - "ref_pt delta_time longitude" - # longitude - IS2_atl11_tide[ptx]['longitude'] = longitude['AT'].copy() - IS2_atl11_fill[ptx]['longitude'] = longitude['AT'].fill_value - IS2_atl11_dims[ptx]['longitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx]['longitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['longitude']['units'] = "degrees_east" - IS2_atl11_tide_attrs[ptx]['longitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx]['longitude']['long_name'] = "Longitude" - IS2_atl11_tide_attrs[ptx]['longitude']['standard_name'] = "longitude" - IS2_atl11_tide_attrs[ptx]['longitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx]['longitude']['description'] = ("Center longitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx]['longitude']['valid_min'] = -180.0 - IS2_atl11_tide_attrs[ptx]['longitude']['valid_max'] = 180.0 - IS2_atl11_tide_attrs[ptx]['longitude']['coordinates'] = \ - "ref_pt delta_time latitude" - - # cycle statistics variables - IS2_atl11_tide_attrs[ptx]['cycle_stats']['Description'] = ("The cycle_stats subgroup " - "contains summary information about segments for each reference point, including " - "the uncorrected mean heights for reference surfaces, blowing snow and cloud " - "indicators, and geolocation and height misfit statistics.") - IS2_atl11_tide_attrs[ptx]['cycle_stats']['data_rate'] = ("Data within this group " - "are stored at the average segment rate.") - # computed tide - IS2_atl11_tide[ptx]['cycle_stats'][model.atl11] = tide['AT'].copy() - IS2_atl11_fill[ptx]['cycle_stats'][model.atl11] = tide['AT'].fill_value - IS2_atl11_dims[ptx]['cycle_stats'][model.atl11] = ['ref_pt','cycle_number'] - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['units'] = "meters" - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['long_name'] = model.long_name - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['description'] = model.description - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['source'] = model.name - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['reference'] = model.reference - IS2_atl11_tide_attrs[ptx]['cycle_stats'][model.atl11]['coordinates'] = \ - "../ref_pt ../cycle_number ../delta_time ../latitude ../longitude" - - # crossing track variables - IS2_atl11_tide_attrs[ptx][XT]['Description'] = ("The crossing_track_data " - "subgroup contains elevation data at crossover locations. These are " - "locations where two ICESat-2 pair tracks cross, so data are available " - "from both the datum track, for which the granule was generated, and " - "from the crossing track.") - IS2_atl11_tide_attrs[ptx][XT]['data_rate'] = ("Data within this group are " - "stored at the average segment rate.") - - # reference point - IS2_atl11_tide[ptx][XT]['ref_pt'] = ref_pt['XT'].copy() - IS2_atl11_fill[ptx][XT]['ref_pt'] = None - IS2_atl11_dims[ptx][XT]['ref_pt'] = None - IS2_atl11_tide_attrs[ptx][XT]['ref_pt'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['units'] = "1" - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['long_name'] = ("fit center reference point number, " - "segment_id") - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['source'] = "derived, ATL11 algorithm" - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['description'] = ("The reference-point number of the " - "fit center for the datum track. The reference point is the 7 digit segment_id number " - "corresponding to the center of the ATL06 data used for each ATL11 point. These are " - "sequential, starting with 1 for the first segment after an ascending equatorial " - "crossing node.") - IS2_atl11_tide_attrs[ptx][XT]['ref_pt']['coordinates'] = \ - "delta_time latitude longitude" - - # reference ground track of the crossing track - IS2_atl11_tide[ptx][XT]['rgt'] = IS2_atl11_mds[ptx][XT]['rgt'].copy() - IS2_atl11_fill[ptx][XT]['rgt'] = IS2_atl11_attrs[ptx][XT]['rgt']['_FillValue'] - IS2_atl11_dims[ptx][XT]['rgt'] = None - IS2_atl11_tide_attrs[ptx][XT]['rgt'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['rgt']['units'] = "1" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['long_name'] = "crossover reference ground track" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['rgt']['description'] = "The RGT number for the crossing data." - IS2_atl11_tide_attrs[ptx][XT]['rgt']['coordinates'] = \ - "ref_pt delta_time latitude longitude" - # cycle_number of the crossing track - IS2_atl11_tide[ptx][XT]['cycle_number'] = IS2_atl11_mds[ptx][XT]['cycle_number'].copy() - IS2_atl11_fill[ptx][XT]['cycle_number'] = IS2_atl11_attrs[ptx][XT]['cycle_number']['_FillValue'] - IS2_atl11_dims[ptx][XT]['cycle_number'] = None - IS2_atl11_tide_attrs[ptx][XT]['cycle_number'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['units'] = "1" - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['long_name'] = "crossover cycle number" - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['cycle_number']['description'] = ("Cycle number for the " - "crossing data. Number of 91-day periods that have elapsed since ICESat-2 entered " - "the science orbit. Each of the 1,387 reference ground track (RGTs) is targeted " - "in the polar regions once every 91 days.") - # delta time of the crossing track - IS2_atl11_tide[ptx][XT]['delta_time'] = delta_time['XT'].copy() - IS2_atl11_fill[ptx][XT]['delta_time'] = delta_time['XT'].fill_value - IS2_atl11_dims[ptx][XT]['delta_time'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['delta_time'] = {} - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['standard_name'] = "time" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['calendar'] = "standard" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['delta_time']['description'] = ("Number of GPS " - "seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch offset " - "is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS seconds " - "between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP epoch. By " - "adding the offset contained within atlas_sdp_gps_epoch to delta time parameters, the " - "time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl11_tide_attrs[ptx]['delta_time']['coordinates'] = \ - "ref_pt latitude longitude" - # latitude of the crossover measurement - IS2_atl11_tide[ptx][XT]['latitude'] = latitude['XT'].copy() - IS2_atl11_fill[ptx][XT]['latitude'] = latitude['XT'].fill_value - IS2_atl11_dims[ptx][XT]['latitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['latitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['latitude']['units'] = "degrees_north" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['long_name'] = "crossover latitude" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['standard_name'] = "latitude" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['latitude']['description'] = ("Center latitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx][XT]['latitude']['valid_min'] = -90.0 - IS2_atl11_tide_attrs[ptx][XT]['latitude']['valid_max'] = 90.0 - IS2_atl11_tide_attrs[ptx][XT]['latitude']['coordinates'] = \ - "ref_pt delta_time longitude" - # longitude of the crossover measurement - IS2_atl11_tide[ptx][XT]['longitude'] = longitude['XT'].copy() - IS2_atl11_fill[ptx][XT]['longitude'] = longitude['XT'].fill_value - IS2_atl11_dims[ptx][XT]['longitude'] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT]['longitude'] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT]['longitude']['units'] = "degrees_east" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['contentType'] = "physicalMeasurement" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['long_name'] = "crossover longitude" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['standard_name'] = "longitude" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['source'] = "ATL06" - IS2_atl11_tide_attrs[ptx][XT]['longitude']['description'] = ("Center longitude of " - "selected segments") - IS2_atl11_tide_attrs[ptx][XT]['longitude']['valid_min'] = -180.0 - IS2_atl11_tide_attrs[ptx][XT]['longitude']['valid_max'] = 180.0 - IS2_atl11_tide_attrs[ptx][XT]['longitude']['coordinates'] = \ - "ref_pt delta_time latitude" - # computed tide for the crossover measurement - IS2_atl11_tide[ptx][XT][model.atl11] = tide['XT'].copy() - IS2_atl11_fill[ptx][XT][model.atl11] = tide['XT'].fill_value - IS2_atl11_dims[ptx][XT][model.atl11] = ['ref_pt'] - IS2_atl11_tide_attrs[ptx][XT][model.atl11] = collections.OrderedDict() - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['units'] = "meters" - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['contentType'] = "referenceInformation" - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['long_name'] = model.long_name - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['description'] = model.description - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['source'] = model.name - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['reference'] = model.reference - IS2_atl11_tide_attrs[ptx][XT][model.atl11]['coordinates'] = \ - "ref_pt delta_time latitude longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL11_tide_write(IS2_atl11_tide, IS2_atl11_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl11_fill, DIMENSIONS=IS2_atl11_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL11_tide_write(IS2_atl11_tide, IS2_atl11_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl11_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl11_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam pair - pairs = [k for k in IS2_atl11_tide.keys() if bool(re.match(r'pt\d',k))] - for ptx in pairs: - fileID.create_group(ptx) - h5[ptx] = {} - # add HDF5 group attributes for beam - for att_name in ['description','beam_pair','ReferenceGroundTrack', - 'first_cycle','last_cycle','equatorial_radius','polar_radius']: - fileID[ptx].attrs[att_name] = IS2_atl11_attrs[ptx][att_name] - - # ref_pt, cycle number, geolocation and delta_time variables - for k in ['ref_pt','cycle_number','delta_time','latitude','longitude']: - # values and attributes - v = IS2_atl11_tide[ptx][k] - attrs = IS2_atl11_attrs[ptx][k] - fillvalue = FILL_VALUE[ptx][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}'.format(ptx,k) - if fillvalue: - h5[ptx][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[ptx][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[ptx][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[ptx][k]): - h5[ptx][k].dims[i].attach_scale(h5[ptx][dim]) - else: - # make dimension - h5[ptx][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[ptx][k].attrs[att_name] = att_val - - # add to cycle_stats and crossing_track_data variables - for key in ['cycle_stats','crossing_track_data']: - fileID[ptx].create_group(key) - h5[ptx][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl11_attrs[ptx][key][att_name] - fileID[ptx][key].attrs[att_name] = att_val - for k,v in IS2_atl11_tide[ptx][key].items(): - # attributes - attrs = IS2_atl11_attrs[ptx][key][k] - fillvalue = FILL_VALUE[ptx][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(ptx,key,k) - if fillvalue: - h5[ptx][key][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[ptx][key][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[ptx][key][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[ptx][key][k]): - if (key == 'cycle_stats'): - h5[ptx][key][k].dims[i].attach_scale(h5[ptx][dim]) - else: - h5[ptx][key][k].dims[i].attach_scale(h5[ptx][key][dim]) - else: - # make dimension - h5[ptx][key][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[ptx][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 Annual Land Ice Height' - fileID.attrs['summary'] = ('The purpose of ATL11 is to provide an ICESat-2 ' - 'satellite cycle summary of heights and height changes of land-based ' - 'ice and will be provided as input to ATL15 and ATL16, gridded ' - 'estimates of heights and height-changes.') - fileID.attrs['description'] = ('Land ice parameters for each beam pair. ' - 'All parameters are calculated for the same along-track increments ' - 'for each beam pair and repeat.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL11 files - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for ptx in pairs: - lon = IS2_atl11_tide[ptx]['longitude'] - lat = IS2_atl11_tide[ptx]['latitude'] - delta_time = IS2_atl11_tide[ptx]['delta_time'] - valid = np.nonzero(delta_time != FILL_VALUE[ptx]['delta_time']) - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time[valid].min() if (delta_time[valid].min() < tmn) else tmn - tmx = delta_time[valid].max() if (delta_time[valid].max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl11_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat-2 ATL11 - annual land ice height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat-2 annual land ice height files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL11 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # apply flexure scaling factors to height constituents - parser.add_argument('--apply-flexure', - default=False, action='store_true', - help='Apply ice flexure scaling factor to height constituents') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL11 file - for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, - TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, - METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, - CUTOFF=args.cutoff, - APPLY_FLEXURE=args.apply_flexure, - VERBOSE=args.verbose, - MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat2_ATL12.py b/scripts/compute_tides_ICESat2_ATL12.py deleted file mode 100644 index 8fddf9c2..00000000 --- a/scripts/compute_tides_ICESat2_ATL12.py +++ /dev/null @@ -1,612 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat2_ATL12.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat-2 ocean surface height data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - read_ICESat2_ATL12.py: reads ICESat-2 ocean surface height data files - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named ATL12 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: H5py deprecation warning change to use make_scale - added valid data extrapolation with nearest_extrap - merged time conversion routines into module - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 07/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 03/2020: use read_ICESat2_ATL12.py from read-ICESat-2 repository - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Forked 12/2019 from compute_tides_ICESat2_ATL07.py - Updated 11/2019: added AOTIM-5-2018 tide model (2018 update to 2004 model) - Forked 11/2019 from compute_tides_ICESat2_atl06.py - Updated 10/2019: external read functions. adjust regex for processed files - changing Y/N flags to True/False - Updated 09/2019: using date functions paralleling public repository - add option for TPXO9-atlas. add OTIS netcdf tide option - Updated 05/2019: check if beam exists in a try except else clause - Updated 04/2019: check if subsetted beam contains ocean surface data - Written 04/2019 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import datetime -import warnings -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - from icesat2_toolkit.read_ICESat2_ATL12 import read_HDF5_ATL12 -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("icesat2_toolkit not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat-2 ocean surface height (ATL12) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # read data from input file - logger.info('{0} -->'.format(INPUT_FILE)) - IS2_atl12_mds,IS2_atl12_attrs,IS2_atl12_beams = read_HDF5_ATL12(INPUT_FILE, - ATTRIBUTES=True) - DIRECTORY = os.path.dirname(INPUT_FILE) - # extract parameters from ICESat-2 ATLAS HDF5 ocean surface file name - rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' - r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') - try: - SUB,PRD,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX = rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) - else: - # output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_{1}_TIDES_{2}{3}{4}{5}{6}{7}_{8}{9}{10}_{11}_{12}{13}.h5' - OUTPUT_FILE = file_format.format(*args) - - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl12_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # copy variables for outputting to HDF5 file - IS2_atl12_tide = {} - IS2_atl12_fill = {} - IS2_atl12_dims = {} - IS2_atl12_tide_attrs = {} - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - # Add this value to delta time parameters to compute full gps_seconds - IS2_atl12_tide['ancillary_data'] = {} - IS2_atl12_tide_attrs['ancillary_data'] = {} - for key in ['atlas_sdp_gps_epoch']: - # get each HDF5 variable - IS2_atl12_tide['ancillary_data'][key] = IS2_atl12_mds['ancillary_data'][key] - # Getting attributes of group and included variables - IS2_atl12_tide_attrs['ancillary_data'][key] = {} - for att_name,att_val in IS2_atl12_attrs['ancillary_data'][key].items(): - IS2_atl12_tide_attrs['ancillary_data'][key][att_name] = att_val - - # for each input beam within the file - for gtx in sorted(IS2_atl12_beams): - # output data dictionaries for beam - IS2_atl12_tide[gtx] = dict(ssh_segments={}) - IS2_atl12_fill[gtx] = dict(ssh_segments={}) - IS2_atl12_dims[gtx] = dict(ssh_segments={}) - IS2_atl12_tide_attrs[gtx] = dict(ssh_segments={}) - - # number of segments - val = IS2_atl12_mds[gtx]['ssh_segments'] - n_seg = len(val['delt_seg']) - - # convert time from ATLAS SDP to days relative to Jan 1, 1992 - gps_seconds = atlas_sdp_gps_epoch + val['delta_time'] - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(val['longitude'], - val['latitude'], model.grid_file, model.model_file, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, type=model.type, version=model.version, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # predict tidal elevations at time and infer minor corrections - tide = np.ma.empty((n_seg)) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(tide_time, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(tide_time, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace masked and nan values with fill value - invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) - tide.data[invalid] = tide.fill_value - tide.mask[invalid] = True - - # group attributes for beam - IS2_atl12_tide_attrs[gtx]['Description'] = IS2_atl12_attrs[gtx]['Description'] - IS2_atl12_tide_attrs[gtx]['atlas_pce'] = IS2_atl12_attrs[gtx]['atlas_pce'] - IS2_atl12_tide_attrs[gtx]['atlas_beam_type'] = IS2_atl12_attrs[gtx]['atlas_beam_type'] - IS2_atl12_tide_attrs[gtx]['groundtrack_id'] = IS2_atl12_attrs[gtx]['groundtrack_id'] - IS2_atl12_tide_attrs[gtx]['atmosphere_profile'] = IS2_atl12_attrs[gtx]['atmosphere_profile'] - IS2_atl12_tide_attrs[gtx]['atlas_spot_number'] = IS2_atl12_attrs[gtx]['atlas_spot_number'] - IS2_atl12_tide_attrs[gtx]['sc_orientation'] = IS2_atl12_attrs[gtx]['sc_orientation'] - # group attributes for ssh_segments - IS2_atl12_tide_attrs[gtx]['ssh_segments']['Description'] = ("Contains " - "parameters relating to the calculated surface height.") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['data_rate'] = ("Data within " - "this group are stored at the variable ocean processing segment rate.") - - # geolocation, time and segment ID - # delta time - IS2_atl12_tide[gtx]['ssh_segments']['delta_time'] = val['delta_time'].copy() - IS2_atl12_fill[gtx]['ssh_segments']['delta_time'] = None - IS2_atl12_dims[gtx]['ssh_segments']['delta_time'] = None - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['units'] = "seconds since 2018-01-01" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['long_name'] = "Elapsed GPS seconds" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['standard_name'] = "time" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['source'] = "telemetry" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['calendar'] = "standard" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['description'] = ("Number of " - "GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch " - "offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS " - "seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP " - "epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time " - "parameters, the time in gps_seconds relative to the GPS epoch can be computed.") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delta_time']['coordinates'] = \ - "latitude longitude" - # latitude - IS2_atl12_tide[gtx]['ssh_segments']['latitude'] = val['latitude'].copy() - IS2_atl12_fill[gtx]['ssh_segments']['latitude'] = None - IS2_atl12_dims[gtx]['ssh_segments']['latitude'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['units'] = "degrees_north" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['contentType'] = "physicalMeasurement" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['long_name'] = "Latitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['standard_name'] = "latitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['description'] = ("Latitude of " - "segment center") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['valid_min'] = -90.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['valid_max'] = 90.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['latitude']['coordinates'] = \ - "delta_time longitude" - # longitude - IS2_atl12_tide[gtx]['ssh_segments']['longitude'] = val['longitude'].copy() - IS2_atl12_fill[gtx]['ssh_segments']['longitude'] = None - IS2_atl12_dims[gtx]['ssh_segments']['longitude'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['units'] = "degrees_east" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['contentType'] = "physicalMeasurement" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['long_name'] = "Longitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['standard_name'] = "longitude" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['description'] = ("Longitude of " - "segment center") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['valid_min'] = -180.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['valid_max'] = 180.0 - IS2_atl12_tide_attrs[gtx]['ssh_segments']['longitude']['coordinates'] = \ - "delta_time latitude" - # Ocean Segment Duration - IS2_atl12_tide[gtx]['ssh_segments']['delt_seg'] = val['delt_seg'] - IS2_atl12_fill[gtx]['ssh_segments']['delt_seg'] = None - IS2_atl12_dims[gtx]['ssh_segments']['delt_seg'] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['units'] = "seconds" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['contentType'] = \ - "referenceInformation" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['long_name'] = \ - "Ocean Segment Duration" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['description'] = \ - "Time duration segment" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['delt_seg']['coordinates'] = \ - "delta_time latitude longitude" - - # stats variables - IS2_atl12_tide[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_fill[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_dims[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['Description'] = ("Contains parameters " - "related to quality and corrections on the sea surface height parameters.") - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats']['data_rate'] = ("Data within this group " - "are stored at the variable ocean processing segment rate.") - - # computed tide - IS2_atl12_tide[gtx]['ssh_segments']['stats'][model.atl12] = tide.copy() - IS2_atl12_fill[gtx]['ssh_segments']['stats'][model.atl12] = tide.fill_value - IS2_atl12_dims[gtx]['ssh_segments']['stats'][model.atl12] = ['delta_time'] - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12] = {} - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['units'] = "meters" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['contentType'] = "referenceInformation" - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['long_name'] = model.long_name - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['description'] = model.description - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['source'] = model.name - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['reference'] = model.reference - IS2_atl12_tide_attrs[gtx]['ssh_segments']['stats'][model.atl12]['coordinates'] = \ - "../delta_time ../latitude ../longitude" - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_ATL12_tide_write(IS2_atl12_tide, IS2_atl12_tide_attrs, - CLOBBER=True, INPUT=os.path.basename(INPUT_FILE), - FILL_VALUE=IS2_atl12_fill, DIMENSIONS=IS2_atl12_dims, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE)) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat-2 data to HDF5 -def HDF5_ATL12_tide_write(IS2_atl12_tide, IS2_atl12_attrs, INPUT=None, - FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - - # create HDF5 records - h5 = {} - - # number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - # and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC) - h5['ancillary_data'] = {} - for k,v in IS2_atl12_tide['ancillary_data'].items(): - # Defining the HDF5 dataset variables - val = 'ancillary_data/{0}'.format(k) - h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in IS2_atl12_attrs['ancillary_data'][k].items(): - h5['ancillary_data'][k].attrs[att_name] = att_val - - # write each output beam - beams = [k for k in IS2_atl12_tide.keys() if bool(re.match(r'gt\d[lr]',k))] - for gtx in beams: - fileID.create_group(gtx) - # add HDF5 group attributes for beam - for att_name in ['Description','atlas_pce','atlas_beam_type', - 'groundtrack_id','atmosphere_profile','atlas_spot_number', - 'sc_orientation']: - fileID[gtx].attrs[att_name] = IS2_atl12_attrs[gtx][att_name] - # create ssh_segments group - fileID[gtx].create_group('ssh_segments') - h5[gtx] = dict(ssh_segments={}) - for att_name in ['Description','data_rate']: - att_val = IS2_atl12_attrs[gtx]['ssh_segments'][att_name] - fileID[gtx]['ssh_segments'].attrs[att_name] = att_val - - # delta_time, geolocation and segment description variables - for k in ['delta_time','latitude','longitude','delt_seg']: - # values and attributes - v = IS2_atl12_tide[gtx]['ssh_segments'][k] - attrs = IS2_atl12_attrs[gtx]['ssh_segments'][k] - fillvalue = FILL_VALUE[gtx]['ssh_segments'][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}'.format(gtx,'ssh_segments',k) - if fillvalue: - h5[gtx]['ssh_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue, - compression='gzip') - else: - h5[gtx]['ssh_segments'][k] = fileID.create_dataset(val, - np.shape(v), data=v, dtype=v.dtype, compression='gzip') - # create or attach dimensions for HDF5 variable - if DIMENSIONS[gtx]['ssh_segments'][k]: - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['ssh_segments'][k]): - h5[gtx]['ssh_segments'][k].dims[i].attach_scale( - h5[gtx]['ssh_segments'][dim]) - else: - # make dimension - h5[gtx]['ssh_segments'][k].make_scale(k) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['ssh_segments'][k].attrs[att_name] = att_val - - # add to stats variables - key = 'stats' - fileID[gtx]['ssh_segments'].create_group(key) - h5[gtx]['ssh_segments'][key] = {} - for att_name in ['Description','data_rate']: - att_val=IS2_atl12_attrs[gtx]['ssh_segments'][key][att_name] - fileID[gtx]['ssh_segments'][key].attrs[att_name] = att_val - for k,v in IS2_atl12_tide[gtx]['ssh_segments'][key].items(): - # attributes - attrs = IS2_atl12_attrs[gtx]['ssh_segments'][key][k] - fillvalue = FILL_VALUE[gtx]['ssh_segments'][key][k] - # Defining the HDF5 dataset variables - val = '{0}/{1}/{2}/{3}'.format(gtx,'ssh_segments',key,k) - if fillvalue: - h5[gtx]['ssh_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, fillvalue=fillvalue, compression='gzip') - else: - h5[gtx]['ssh_segments'][key][k] = \ - fileID.create_dataset(val, np.shape(v), data=v, - dtype=v.dtype, compression='gzip') - # attach dimensions - for i,dim in enumerate(DIMENSIONS[gtx]['ssh_segments'][key][k]): - h5[gtx]['ssh_segments'][key][k].dims[i].attach_scale( - h5[gtx]['ssh_segments'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5[gtx]['ssh_segments'][key][k].attrs[att_name] = att_val - - # HDF5 file title - fileID.attrs['featureType'] = 'trajectory' - fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Ocean Surface Height' - fileID.attrs['summary'] = ('Estimates of the ocean surface tidal parameters ' - 'needed to interpret and assess the quality of ocean height estimates.') - fileID.attrs['description'] = ('Sea Surface Height (SSH) of the global ' - 'open ocean including the ice-free seasonal ice zone (SIZ) and ' - 'near-coast regions.') - date_created = datetime.datetime.today() - fileID.attrs['date_created'] = date_created.isoformat() - project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = project - platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2' - fileID.attrs['project'] = platform - # add attribute for elevation instrument and designated processing level - instrument = 'ATLAS > Advanced Topographic Laser Altimeter System' - fileID.attrs['instrument'] = instrument - fileID.attrs['source'] = 'Spacecraft' - fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2' - fileID.attrs['processing_level'] = '4' - # add attributes for input ATL12 file - fileID.attrs['input_files'] = os.path.basename(INPUT) - # find geospatial and temporal ranges - lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf) - for gtx in beams: - lon = IS2_atl12_tide[gtx]['ssh_segments']['longitude'] - lat = IS2_atl12_tide[gtx]['ssh_segments']['latitude'] - delta_time = IS2_atl12_tide[gtx]['ssh_segments']['delta_time'] - # setting the geospatial and temporal ranges - lnmn = lon.min() if (lon.min() < lnmn) else lnmn - lnmx = lon.max() if (lon.max() > lnmx) else lnmx - ltmn = lat.min() if (lat.min() < ltmn) else ltmn - ltmx = lat.max() if (lat.max() > ltmx) else ltmx - tmn = delta_time.min() if (delta_time.min() < tmn) else tmn - tmx = delta_time.max() if (delta_time.max() > tmx) else tmx - # add geospatial and temporal attributes - fileID.attrs['geospatial_lat_min'] = ltmn - fileID.attrs['geospatial_lat_max'] = ltmx - fileID.attrs['geospatial_lon_min'] = lnmn - fileID.attrs['geospatial_lon_max'] = lnmx - fileID.attrs['geospatial_lat_units'] = "degrees_north" - fileID.attrs['geospatial_lon_units'] = "degrees_east" - fileID.attrs['geospatial_ellipsoid'] = "WGS84" - fileID.attrs['date_type'] = 'UTC' - fileID.attrs['time_type'] = 'CCSDS UTC-A' - # convert start and end time from ATLAS SDP seconds into GPS seconds - atlas_sdp_gps_epoch=IS2_atl12_tide['ancillary_data']['atlas_sdp_gps_epoch'] - gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx]) - # calculate leap seconds - leaps = pyTMD.time.count_leap_seconds(gps_seconds) - # convert from seconds since 1980-01-06T00:00:00 to Julian days - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, - epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) - # convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') - # add attributes with measurement date start, end and duration - tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), - int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) - fileID.attrs['time_coverage_start'] = tcs.isoformat() - tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]), - int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1))) - fileID.attrs['time_coverage_end'] = tce.isoformat() - fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn) - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat-2 ATL12 - ocean surface height data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat-2 ocean surface height files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat-2 ATL12 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input ATL12 file - for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_ICESat_GLA12.py b/scripts/compute_tides_ICESat_GLA12.py deleted file mode 100644 index d2975187..00000000 --- a/scripts/compute_tides_ICESat_GLA12.py +++ /dev/null @@ -1,540 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_ICESat_GLA12.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting ICESat/GLAS L2 GLA12 - Antarctic and Greenland Ice Sheet elevation data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - --atlas-format X: ATLAS tide model format (OTIS, netcdf) - --gzip, -G: Tide model files are gzip compressed - --definition-file X: Model definition file for use as correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - --apply-flexure: Apply ice flexure scaling factor to height constituents - Only valid for models containing flexure fields - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - spatial: utilities for reading, writing and operating on spatial data - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - -UPDATE HISTORY: - Updated 07/2022: place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - added command line option to apply flexure for applicable models - Updated 04/2022: use argparse descriptions within documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: save ICESat campaign attribute to output file - added Arctic 2km model (Arc2kmTM) to list of available tide models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - Updated 04/2021: can use a generically named GLA12 file as input - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - Updated 12/2020: updated for public release - H5py deprecation warning change to use make_scale and not create_scale - added valid data extrapolation with nearest_extrap - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 07/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Updated 11/2019: calculate minor constituents as separate variable - compute tide values at all segments and then mask to valid - added AOTIM-5-2018 tide model (2018 update to 2004 model) - Updated 10/2019: external read functions. adjust regex for processed files - changing Y/N flags to True/False - Updated 09/2019: using date functions paralleling public repository - add option for TPXO9-atlas. add OTIS netcdf tide option - Written 12/2018 -""" -from __future__ import print_function - -import sys -import os -import re -import logging -import argparse -import warnings -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.spatial -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: read ICESat ice sheet HDF5 elevation data (GLAH12) from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat(tide_dir, INPUT_FILE, - TIDE_MODEL=None, - ATLAS_FORMAT=None, - GZIP=True, - DEFINITION_FILE=None, - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=None, - APPLY_FLEXURE=False, - VERBOSE=False, - MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # get directory from INPUT_FILE - logger.info('{0} -->'.format(INPUT_FILE)) - DIRECTORY = os.path.dirname(INPUT_FILE) - # flexure flag if being applied - flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' - # compile regular expression operator for extracting information from file - rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_' - r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE) - # extract parameters from ICESat/GLAS HDF5 file name - # PRD: Product number (01, 05, 06, 12, 13, 14, or 15) - # RL: Release number for process that created the product = 634 - # RGTP: Repeat ground-track phase (1=8-day, 2=91-day, 3=transfer orbit) - # ORB: Reference orbit number (starts at 1 and increments each time a - # new reference orbit ground track file is obtained.) - # INST: Instance number (increments every time the satellite enters a - # different reference orbit) - # CYCL: Cycle of reference orbit for this phase - # TRK: Track within reference orbit - # SEG: Segment of orbit - # GRAN: Granule version number - # TYPE: File type - try: - PRD,RL,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE = rx.findall(INPUT_FILE).pop() - except: - # output tide HDF5 file (generic) - fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,flexure_flag,fileExtension) - OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) - else: - # output tide HDF5 file for NSIDC granules - args = (PRD,RL,model.name,flexure_flag,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE) - file_format = 'GLAH{0}_{1}_{2}{3}_TIDES_{4}{5}{6}_{7}_{8}_{9}_{10}_{11}.h5' - OUTPUT_FILE = file_format.format(*args) - - # read GLAH12 HDF5 file - fileID = h5py.File(INPUT_FILE,'r') - n_40HZ, = fileID['Data_40HZ']['Time']['i_rec_ndx'].shape - # get variables and attributes - rec_ndx_40HZ = fileID['Data_40HZ']['Time']['i_rec_ndx'][:].copy() - # seconds since 2000-01-01 12:00:00 UTC (J2000) - DS_UTCTime_40HZ = fileID['Data_40HZ']['DS_UTCTime_40'][:].copy() - # Latitude (degrees North) - lat_TPX = fileID['Data_40HZ']['Geolocation']['d_lat'][:].copy() - # Longitude (degrees East) - lon_40HZ = fileID['Data_40HZ']['Geolocation']['d_lon'][:].copy() - # Elevation (height above TOPEX/Poseidon ellipsoid in meters) - elev_TPX = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'][:].copy() - fv = fileID['Data_40HZ']['Elevation_Surfaces']['d_elev'].attrs['_FillValue'] - - # semimajor axis (a) and flattening (f) for TP and WGS84 ellipsoids - atop,ftop = (6378136.3,1.0/298.257) - awgs,fwgs = (6378137.0,1.0/298.257223563) - # convert from Topex/Poseidon to WGS84 Ellipsoids - lat_40HZ,elev_40HZ = pyTMD.spatial.convert_ellipsoid(lat_TPX, elev_TPX, - atop, ftop, awgs, fwgs, eps=1e-12, itmax=10) - - # convert time from J2000 to days relative to Jan 1, 1992 (48622mjd) - # J2000: seconds since 2000-01-01 12:00:00 UTC - tide_time = pyTMD.time.convert_delta_time(DS_UTCTime_40HZ, - epoch1=(2000,1,1,12,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(lon_40HZ, lat_40HZ, - model.grid_file, model.model_file, model.projection, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, grid=model.format, apply_flexure=APPLY_FLEXURE) - deltat = np.zeros_like(tide_time) - elif (model.format == 'netcdf'): - amp,ph,D,c = extract_netcdf_constants(lon_40HZ, lat_40HZ, - model.grid_file, model.model_file, type=model.type, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - deltat = np.zeros_like(tide_time) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(lon_40HZ, lat_40HZ, - model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(lon_40HZ, lat_40HZ, - model.model_file, type=model.type, version=model.version, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, tide_time) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # predict tidal elevations at time and infer minor corrections - tide = np.ma.empty((n_40HZ),fill_value=fv) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(tide_time, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(tide_time, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace masked and nan values with fill value - invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) - tide.data[invalid] = tide.fill_value - tide.mask[invalid] = True - - # copy variables for outputting to HDF5 file - IS_gla12_tide = dict(Data_40HZ={}) - IS_gla12_fill = dict(Data_40HZ={}) - IS_gla12_tide_attrs = dict(Data_40HZ={}) - - # copy global file attributes of interest - global_attribute_list = ['featureType','title','comment','summary','license', - 'references','AccessConstraints','CitationforExternalPublication', - 'contributor_role','contributor_name','creator_name','creator_email', - 'publisher_name','publisher_email','publisher_url','platform','instrument', - 'processing_level','date_created','spatial_coverage_type','history', - 'keywords','keywords_vocabulary','naming_authority','project','time_type', - 'date_type','time_coverage_start','time_coverage_end', - 'time_coverage_duration','source','HDFVersion','identifier_product_type', - 'identifier_product_format_version','Conventions','institution', - 'ReprocessingPlanned','ReprocessingActual','LocalGranuleID', - 'ProductionDateTime','LocalVersionID','PGEVersion','OrbitNumber', - 'StartOrbitNumber','StopOrbitNumber','EquatorCrossingLongitude', - 'EquatorCrossingTime','EquatorCrossingDate','ShortName','VersionID', - 'InputPointer','RangeBeginningTime','RangeEndingTime','RangeBeginningDate', - 'RangeEndingDate','PercentGroundHit','OrbitQuality','Cycle','Track', - 'Instrument_State','Timing_Bias','ReferenceOrbit','SP_ICE_PATH_NO', - 'SP_ICE_GLAS_StartBlock','SP_ICE_GLAS_EndBlock','Instance','Range_Bias', - 'Instrument_State_Date','Instrument_State_Time','Range_Bias_Date', - 'Range_Bias_Time','Timing_Bias_Date','Timing_Bias_Time', - 'identifier_product_doi','identifier_file_uuid', - 'identifier_product_doi_authority'] - for att in global_attribute_list: - IS_gla12_tide_attrs[att] = fileID.attrs[att] - # copy ICESat campaign name from ancillary data - IS_gla12_tide_attrs['Campaign'] = fileID['ANCILLARY_DATA'].attrs['Campaign'] - - # add attributes for input GLA12 file - IS_gla12_tide_attrs['input_files'] = os.path.basename(INPUT_FILE) - # update geospatial ranges for ellipsoid - IS_gla12_tide_attrs['geospatial_lat_min'] = np.min(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lat_max'] = np.max(lat_40HZ) - IS_gla12_tide_attrs['geospatial_lon_min'] = np.min(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lon_max'] = np.max(lon_40HZ) - IS_gla12_tide_attrs['geospatial_lat_units'] = "degrees_north" - IS_gla12_tide_attrs['geospatial_lon_units'] = "degrees_east" - IS_gla12_tide_attrs['geospatial_ellipsoid'] = "WGS84" - - # copy 40Hz group attributes - for att_name,att_val in fileID['Data_40HZ'].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][att_name] = att_val - # copy attributes for time, geolocation and geophysical groups - for var in ['Time','Geolocation','Geophysical']: - IS_gla12_tide['Data_40HZ'][var] = {} - IS_gla12_fill['Data_40HZ'][var] = {} - IS_gla12_tide_attrs['Data_40HZ'][var] = {} - for att_name,att_val in fileID['Data_40HZ'][var].attrs.items(): - IS_gla12_tide_attrs['Data_40HZ'][var][att_name] = att_val - - # J2000 time - IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] = DS_UTCTime_40HZ - IS_gla12_fill['Data_40HZ']['DS_UTCTime_40'] = None - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'] = {} - for att_name,att_val in fileID['Data_40HZ']['DS_UTCTime_40'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['DS_UTCTime_40'][att_name] = att_val - # record - IS_gla12_tide['Data_40HZ']['Time']['i_rec_ndx'] = rec_ndx_40HZ - IS_gla12_fill['Data_40HZ']['Time']['i_rec_ndx'] = None - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'] = {} - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx']['coordinates'] = \ - "../DS_UTCTime_40" - for att_name,att_val in fileID['Data_40HZ']['Time']['i_rec_ndx'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Time']['i_rec_ndx'][att_name] = att_val - # latitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lat'] = lat_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lat'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'] = {} - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat']['coordinates'] = \ - "../DS_UTCTime_40" - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lat'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lat'][att_name] = att_val - # longitude - IS_gla12_tide['Data_40HZ']['Geolocation']['d_lon'] = lon_40HZ - IS_gla12_fill['Data_40HZ']['Geolocation']['d_lon'] = None - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'] = {} - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon']['coordinates'] = \ - "../DS_UTCTime_40" - for att_name,att_val in fileID['Data_40HZ']['Geolocation']['d_lon'].attrs.items(): - if att_name not in ('DIMENSION_LIST','CLASS','NAME'): - IS_gla12_tide_attrs['Data_40HZ']['Geolocation']['d_lon'][att_name] = att_val - - # geophysical variables - # computed tide - IS_gla12_tide['Data_40HZ']['Geophysical'][model.gla12] = tide - IS_gla12_fill['Data_40HZ']['Geophysical'][model.gla12] = tide.fill_value - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12] = {} - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12]['units'] = "meters" - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12]['long_name'] = model.long_name - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12]['description'] = model.description - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12]['source'] = model.name - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12]['reference'] = model.reference - IS_gla12_tide_attrs['Data_40HZ']['Geophysical'][model.gla12]['coordinates'] = \ - "../DS_UTCTime_40" - - # close the input HDF5 file - fileID.close() - - # print file information - logger.info('\t{0}'.format(OUTPUT_FILE)) - HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_tide_attrs, - FILENAME=os.path.join(DIRECTORY,OUTPUT_FILE), - FILL_VALUE=IS_gla12_fill, CLOBBER=True) - # change the permissions mode - os.chmod(os.path.join(DIRECTORY,OUTPUT_FILE), MODE) - -# PURPOSE: outputting the tide values for ICESat data to HDF5 -def HDF5_GLA12_tide_write(IS_gla12_tide, IS_gla12_attrs, - FILENAME='', FILL_VALUE=None, CLOBBER=False): - # setting HDF5 clobber attribute - if CLOBBER: - clobber = 'w' - else: - clobber = 'w-' - - # open output HDF5 file - fileID = h5py.File(os.path.expanduser(FILENAME), clobber) - # create 40HZ HDF5 records - h5 = dict(Data_40HZ={}) - - # add HDF5 file attributes - attrs = {a:v for a,v in IS_gla12_attrs.items() if not isinstance(v,dict)} - for att_name,att_val in attrs.items(): - fileID.attrs[att_name] = att_val - - # create Data_40HZ group - fileID.create_group('Data_40HZ') - # add HDF5 40HZ group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'].items(): - if att_name not in ('DS_UTCTime_40',) and not isinstance(att_val,dict): - fileID['Data_40HZ'].attrs[att_name] = att_val - - # add 40HZ time variable - val = IS_gla12_tide['Data_40HZ']['DS_UTCTime_40'] - attrs = IS_gla12_attrs['Data_40HZ']['DS_UTCTime_40'] - # Defining the HDF5 dataset variables - var = '{0}/{1}'.format('Data_40HZ','DS_UTCTime_40') - h5['Data_40HZ']['DS_UTCTime_40'] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, compression='gzip') - # make dimension - h5['Data_40HZ']['DS_UTCTime_40'].make_scale('DS_UTCTime_40') - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ']['DS_UTCTime_40'].attrs[att_name] = att_val - - # for each variable group - for group in ['Time','Geolocation','Geophysical']: - # add group to dict - h5['Data_40HZ'][group] = {} - # create Data_40HZ group - fileID.create_group('Data_40HZ/{0}'.format(group)) - # add HDF5 group attributes - for att_name,att_val in IS_gla12_attrs['Data_40HZ'][group].items(): - if not isinstance(att_val,dict): - fileID['Data_40HZ'][group].attrs[att_name] = att_val - # for each variable in the group - for key,val in IS_gla12_tide['Data_40HZ'][group].items(): - fillvalue = FILL_VALUE['Data_40HZ'][group][key] - attrs = IS_gla12_attrs['Data_40HZ'][group][key] - # Defining the HDF5 dataset variables - var = '{0}/{1}/{2}'.format('Data_40HZ',group,key) - # use variable compression if containing fill values - if fillvalue: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - fillvalue=fillvalue, compression='gzip') - else: - h5['Data_40HZ'][group][key] = fileID.create_dataset(var, - np.shape(val), data=val, dtype=val.dtype, - compression='gzip') - # attach dimensions - for i,dim in enumerate(['DS_UTCTime_40']): - h5['Data_40HZ'][group][key].dims[i].attach_scale( - h5['Data_40HZ'][dim]) - # add HDF5 variable attributes - for att_name,att_val in attrs.items(): - h5['Data_40HZ'][group][key].attrs[att_name] = att_val - - # Closing the HDF5 file - fileID.close() - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting ICESat/GLAS - L2 GLA12 Antarctic and Greenland Ice Sheet elevation data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input ICESat GLAS files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='ICESat GLA12 file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # apply flexure scaling factors to height constituents - parser.add_argument('--apply-flexure', - default=False, action='store_true', - help='Apply ice flexure scaling factor to height constituents') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input GLA12 file - for FILE in args.infile: - compute_tides_ICESat(args.directory, FILE, - TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, - METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, - CUTOFF=args.cutoff, - APPLY_FLEXURE=args.apply_flexure, - VERBOSE=args.verbose, - MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/scripts/compute_tides_icebridge_data.py b/scripts/compute_tides_icebridge_data.py deleted file mode 100644 index 19ac81d4..00000000 --- a/scripts/compute_tides_icebridge_data.py +++ /dev/null @@ -1,770 +0,0 @@ -#!/usr/bin/env python -u""" -compute_tides_icebridge_data.py -Written by Tyler Sutterley (07/2022) -Calculates tidal elevations for correcting Operation IceBridge elevation data - -Uses OTIS format tidal solutions provided by Ohio State University and ESR - http://volkov.oce.orst.edu/tides/region.html - https://www.esr.org/research/polar-tide-models/list-of-polar-tide-models/ - ftp://ftp.esr.org/pub/datasets/tmd/ -Global Tide Model (GOT) solutions provided by Richard Ray at GSFC -or Finite Element Solution (FES) models provided by AVISO - -INPUTS: - ATM1B, ATM icessn or LVIS file from NSIDC - -COMMAND LINE OPTIONS: - -D X, --directory X: Working data directory - -T X, --tide X: Tide model to use in correction - --atlas-format X: ATLAS tide model format (OTIS, netcdf) - --gzip, -G: Tide model files are gzip compressed - --definition-file X: Model definition file for use as correction - -I X, --interpolate X: Interpolation method - spline - linear - nearest - bilinear - -E X, --extrapolate X: Extrapolate with nearest-neighbors - -c X, --cutoff X: Extrapolation cutoff in kilometers - set to inf to extrapolate for all points - --apply-flexure: Apply ice flexure scaling factor to height constituents - Only valid for models containing flexure fields - -M X, --mode X: Permission mode of directories and files created - -V, --verbose: Output information about each created file - -PYTHON DEPENDENCIES: - numpy: Scientific Computing Tools For Python - https://numpy.org - https://numpy.org/doc/stable/user/numpy-for-matlab-users.html - scipy: Scientific Tools for Python - https://docs.scipy.org/doc/ - h5py: Python interface for Hierarchal Data Format 5 (HDF5) - https://www.h5py.org/ - netCDF4: Python interface to the netCDF C library - https://unidata.github.io/netcdf4-python/netCDF4/index.html - pyproj: Python interface to PROJ library - https://pypi.org/project/pyproj/ - -PROGRAM DEPENDENCIES: - time.py: utilities for calculating time operations - model.py: retrieves tide model parameters for named tide models - utilities.py: download and management utilities for syncing files - calc_astrol_longitudes.py: computes the basic astronomical mean longitudes - calc_delta_time.py: calculates difference between universal and dynamic time - convert_ll_xy.py: convert lat/lon points to and from projected coordinates - infer_minor_corrections.py: return corrections for minor constituents - load_constituent.py: loads parameters for a given tidal constituent - load_nodal_corrections.py: load the nodal corrections for tidal constituents - read_tide_model.py: extract tidal harmonic constants from OTIS tide models - read_netcdf_model.py: extract tidal harmonic constants from netcdf models - read_GOT_model.py: extract tidal harmonic constants from GSFC GOT models - read_FES_model.py: extract tidal harmonic constants from FES tide models - bilinear_interp.py: bilinear interpolation of data to coordinates - nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates - predict_tide_drift.py: predict tidal elevations using harmonic constants - read_ATM1b_QFIT_binary.py: read ATM1b QFIT binary files (NSIDC version 1) - -UPDATE HISTORY: - Updated 07/2022: update imports of ATM1b QFIT functions to released version - place some imports within try/except statements - Updated 05/2022: added ESR netCDF4 formats to list of model types - updated keyword arguments to read tide model programs - added command line option to apply flexure for applicable models - Updated 04/2022: include utf-8 encoding in reads to be windows compliant - use argparse descriptions within sphinx documentation - Updated 03/2022: using static decorators to define available models - Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models - Updated 12/2021: added TPXO9-atlas-v5 to list of available tide models - Updated 10/2021: using python logging for handling verbose output - using collections to store attributes in order of creation - Updated 09/2021: refactor to use model class for files and attributes - Updated 07/2021: can use prefix files to define command line arguments - Updated 06/2021: added new Gr1km-v2 1km Greenland model from ESR - Updated 05/2021: added option for extrapolation cutoff in kilometers - modified import of ATM1b QFIT reader - Updated 03/2021: added TPXO9-atlas-v4 in binary OTIS format - simplified netcdf inputs to be similar to binary OTIS read program - replaced numpy bool/int to prevent deprecation warnings - Updated 12/2020: added valid data extrapolation with nearest_extrap - merged time conversion routines into module - Updated 11/2020: added model constituents from TPXO9-atlas-v3 - Updated 10/2020: using argparse to set command line parameters - Updated 09/2020: output ocean and load tide as tide_ocean and tide_load - Updated 08/2020: using builtin time operations. python3 regular expressions - Updated 07/2020: added FES2014 and FES2014_load. use merged delta times - Updated 06/2020: added version 2 of TPXO9-atlas (TPXO9-atlas-v2) - Updated 03/2020: use read_ATM1b_QFIT_binary from repository - Updated 02/2020: changed CATS2008 grid to match version on U.S. Antarctic - Program Data Center http://www.usap-dc.org/view/dataset/601235 - Updated 11/2019: added AOTIM-5-2018 tide model (2018 update to 2004 model) - Updated 09/2019: added TPXO9_atlas reading from netcdf4 tide files - Updated 05/2019: added option interpolate to choose the interpolation method - Updated 02/2019: using range for python3 compatibility - Updated 10/2018: updated GPS time calculation for calculating leap seconds - Updated 07/2018: added GSFC Global Ocean Tides (GOT) models - Written 06/2018 -""" -from __future__ import print_function - -import sys -import os -import re -import time -import logging -import argparse -import warnings -import collections -import numpy as np -import pyTMD.time -import pyTMD.model -import pyTMD.utilities -from pyTMD.calc_delta_time import calc_delta_time -from pyTMD.infer_minor_corrections import infer_minor_corrections -from pyTMD.predict_tide_drift import predict_tide_drift -from pyTMD.read_tide_model import extract_tidal_constants -from pyTMD.read_netcdf_model import extract_netcdf_constants -from pyTMD.read_GOT_model import extract_GOT_constants -from pyTMD.read_FES_model import extract_FES_constants -# attempt imports -try: - import h5py -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("h5py not available") -try: - import ATM1b_QFIT.read_ATM1b_QFIT_binary -except (ImportError, ModuleNotFoundError) as e: - warnings.filterwarnings("always") - warnings.warn("ATM1b_QFIT not available") -# ignore warnings -warnings.filterwarnings("ignore") - -# PURPOSE: reading the number of file lines removing commented lines -def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): - # subset the data to indices if specified - if input_subsetter: - file_lines = len(input_subsetter) - elif HDF5: - # read the size of an input variable within a HDF5 file - with h5py.File(input_file,'r') as fileID: - file_lines, = fileID[HDF5].shape - elif QFIT: - # read the size of a QFIT binary file - file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) - else: - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - i = [i for i in f.readlines() if re.match(r'^(?!\#|\n)',i)] - file_lines = len(i) - # return the number of lines - return file_lines - -## PURPOSE: read the ATM Level-1b data file for variables of interest -def read_ATM_qfit_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - mission_flag = r'(BLATM1B|ILATM1B|ILNSA1B)' - regex_pattern = r'{0}_(\d+)_(\d+)(.*?).(qi|TXT|h5)'.format(mission_flag) - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # output python dictionary with variables - ATM_L1b_input = {} - # Version 1 of ATM QFIT files (ascii) - # output text file from qi2txt with proper filename format - # do not use the shortened output format from qi2txt - if (SFX == 'TXT'): - # compile regular expression operator for reading lines - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # create output variables with length equal to the number of lines - ATM_L1b_input['lat'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['lon'] = np.zeros_like(file_contents,dtype=np.float64) - ATM_L1b_input['data'] = np.zeros_like(file_contents,dtype=np.float64) - hour = np.zeros_like(file_contents,dtype=np.float64) - minute = np.zeros_like(file_contents,dtype=np.float64) - second = np.zeros_like(file_contents,dtype=np.float64) - # for each line within the file - for i,line in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line) - ATM_L1b_input['lat'][i] = np.float64(line_contents[1]) - ATM_L1b_input['lon'][i] = np.float64(line_contents[2]) - ATM_L1b_input['data'][i] = np.float64(line_contents[3]) - hour[i] = np.float64(line_contents[-1][:2]) - minute[i] = np.float64(line_contents[-1][2:4]) - second[i] = np.float64(line_contents[-1][4:]) - # Version 1 of ATM QFIT files (binary) - elif (SFX == 'qi'): - # read input QFIT data file and subset if specified - fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,QFIT=True) - ATM_L1b_input['lat'] = fid['latitude'][:] - ATM_L1b_input['lon'] = fid['longitude'][:] - ATM_L1b_input['data'] = fid['elevation'][:] - time_hhmmss = fid['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # Version 2 of ATM QFIT files (HDF5) - elif (SFX == 'h5'): - # Open the HDF5 file for reading - fileID = h5py.File(os.path.expanduser(input_file), 'r') - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter,HDF5='elevation') - # create output variables with length equal to input elevation - ATM_L1b_input['lat'] = fileID['latitude'][:] - ATM_L1b_input['lon'] = fileID['longitude'][:] - ATM_L1b_input['data'] = fileID['elevation'][:] - time_hhmmss = fileID['instrument_parameters']['time_hhmmss'][:] - # extract hour, minute and second from time_hhmmss - hour = np.zeros_like(time_hhmmss,dtype=np.float64) - minute = np.zeros_like(time_hhmmss,dtype=np.float64) - second = np.zeros_like(time_hhmmss,dtype=np.float64) - # for each line within the file - for i,packed_time in enumerate(time_hhmmss): - # convert to zero-padded string with 3 decimal points - line_contents = '{0:010.3f}'.format(packed_time) - hour[i] = np.float64(line_contents[:2]) - minute[i] = np.float64(line_contents[2:4]) - second[i] = np.float64(line_contents[4:]) - # close the input HDF5 file - fileID.close() - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - # calculation of Julian day taking into account leap seconds - # converting to J2000 seconds - ATM_L1b_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L1b_input.items(): - ATM_L1b_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L1b_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L1b_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L1b_input,file_lines,HEM - -# PURPOSE: read the ATM Level-2 data file for variables of interest -def read_ATM_icessn_file(input_file, input_subsetter): - # regular expression pattern for extracting parameters - regex_pattern=r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - # extract mission and other parameters from filename - MISSION,YYMMDD,HHMMSS,AUX,SFX = re.findall(regex_pattern,input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD) == 6): - ypre,month,day = np.array([YYMMDD[:2],YYMMDD[2:4],YYMMDD[4:]],dtype='i') - year = (ypre + 1900.0) if (ypre >= 90) else (ypre + 2000.0) - elif (len(YYMMDD) == 8): - year,month,day = np.array([YYMMDD[:4],YYMMDD[4:6],YYMMDD[6:]],dtype='i') - # input file column names for variables of interest with column indices - # variables not used: (SNslope:4, WEslope:5, npt_used:7, npt_edit:8, d:9) - file_dtype = {'seconds':0, 'lat':1, 'lon':2, 'data':3, 'RMS':6, 'track':-1} - # compile regular expression operator for reading lines (extracts numbers) - regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' - rx = re.compile(regex_pattern, re.VERBOSE) - # read the input file, split at lines and remove all commented lines - with open(input_file, mode='r', encoding='utf8') as f: - file_contents = [i for i in f.read().splitlines() if - re.match(r'^(?!\#|\n)',i)] - # number of lines of data within file - file_lines = file_length(input_file,input_subsetter) - # output python dictionary with variables - ATM_L2_input = {} - # create output variables with length equal to the number of file lines - for key in file_dtype.keys(): - ATM_L2_input[key] = np.zeros_like(file_contents, dtype=np.float64) - # for each line within the file - for line_number,line_entries in enumerate(file_contents): - # find numerical instances within the line - line_contents = rx.findall(line_entries) - # for each variable of interest: save to dinput as float - for key,val in file_dtype.items(): - ATM_L2_input[key][line_number] = np.float64(line_contents[val]) - # convert shot time (seconds of day) to J2000 - hour = np.floor(ATM_L2_input['seconds']/3600.0) - minute = np.floor((ATM_L2_input['seconds'] % 3600)/60.0) - second = ATM_L2_input['seconds'] % 60.0 - # First column in Pre-IceBridge and ICESSN Version 1 files is GPS time - if (MISSION == 'BLATM2') or (SFX != 'csv'): - # calculate the number of leap seconds between GPS time (seconds - # since Jan 6, 1980 00:00:00) and UTC - gps_seconds = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second, - epoch=(1980,1,6,0,0,0),scale=86400.0) - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - else: - leap_seconds = 0.0 - # calculation of Julian day - # converting to J2000 seconds - ATM_L2_input['time'] = pyTMD.time.convert_calendar_dates(year,month,day, - hour=hour,minute=minute,second=second-leap_seconds, - epoch=(2000,1,1,12,0,0,0),scale=86400.0) - # convert RMS from centimeters to meters - ATM_L2_input['error'] = ATM_L2_input['RMS']/100.0 - # subset the data to indices if specified - if input_subsetter: - for key,val in ATM_L2_input.items(): - ATM_L2_input[key] = val[input_subsetter] - # hemispheric shot count - count = {} - count['N'] = np.count_nonzero(ATM_L2_input['lat'] >= 0.0) - count['S'] = np.count_nonzero(ATM_L2_input['lat'] < 0.0) - # determine hemisphere with containing shots in file - HEM, = [key for key, val in count.items() if val] - # return the output variables - return ATM_L2_input,file_lines,HEM - -# PURPOSE: read the LVIS Level-2 data file for variables of interest -def read_LVIS_HDF5_file(input_file, input_subsetter): - # LVIS region flags: GL for Greenland and AQ for Antarctica - lvis_flag = {'GL':'N','AQ':'S'} - # regular expression pattern for extracting parameters from HDF5 files - # computed in read_icebridge_lvis.py - mission_flag = '(BLVIS2|BVLIS2|ILVIS2|ILVGH2)' - regex_pattern = r'{0}_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5'.format(mission_flag) - # extract mission, region and other parameters from filename - MISSION,REGION,YY,MMDD,RLD,SS = re.findall(regex_pattern,input_file).pop() - LDS_VERSION = '2.0.2' if (int(RLD[1:3]) >= 18) else '1.04' - # input and output python dictionaries with variables - file_input = {} - LVIS_L2_input = {} - fileID = h5py.File(input_file,'r') - # create output variables with length equal to input shot number - file_lines = file_length(input_file,input_subsetter,HDF5='Shot_Number') - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS104.html - # https://lvis.gsfc.nasa.gov/Data/Data_Structure/DataStructure_LDS202.html - if (LDS_VERSION == '1.04'): - # elevation surfaces - file_input['elev'] = fileID['Elevation_Surfaces/Elevation_Centroid'][:] - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # latitude - file_input['lat'] = fileID['Geolocation/Latitude_Centroid'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon'] = fileID['Geolocation/Longitude_Centroid'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - elif (LDS_VERSION == '2.0.2'): - # elevation surfaces - file_input['elev_low'] = fileID['Elevation_Surfaces/Elevation_Low'][:] - file_input['elev_high'] = fileID['Elevation_Surfaces/Elevation_High'][:] - # heights above lowest detected mode - file_input['RH50'] = fileID['Waveform/RH50'][:] - file_input['RH100'] = fileID['Waveform/RH100'][:] - # calculate centroidal elevation using 50% of waveform energy - file_input['elev'] = file_input['elev_low'] + file_input['RH50'] - # latitude - file_input['lat_top'] = fileID['Geolocation/Latitude_Top'][:] - file_input['lat_low'] = fileID['Geolocation/Latitude_Low'][:] - # longitude - file_input['lon_top'] = fileID['Geolocation/Longitude_Top'][:] - file_input['lon_low'] = fileID['Geolocation/Longitude_Low'][:] - # linearly interpolate latitude and longitude to RH50 - file_input['lat'] = file_input['lat_low'] + file_input['RH50'] * \ - (file_input['lat_top'] - file_input['lat_low'])/file_input['RH100'] - file_input['lon'] = file_input['lon_low'] + file_input['RH50'] * \ - (file_input['lon_top'] - file_input['lon_low'])/file_input['RH100'] - # J2000 seconds - LVIS_L2_input['time'] = fileID['Time/J2000'][:] - # close the input HDF5 file - fileID.close() - # output combined variables - LVIS_L2_input['data'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lon'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['lat'] = np.zeros_like(file_input['elev'],dtype=np.float64) - LVIS_L2_input['error'] = np.zeros_like(file_input['elev'],dtype=np.float64) - # find where elev high is equal to elev low - # see note about using LVIS centroid elevation product - # http://lvis.gsfc.nasa.gov/OIBDataStructure.html - ii = np.nonzero(file_input['elev_low'] == file_input['elev_high']) - jj = np.nonzero(file_input['elev_low'] != file_input['elev_high']) - # where lowest point of waveform is equal to highest point --> - # using the elev_low elevation - LVIS_L2_input['data'][ii] = file_input['elev_low'][ii] - # for other locations use the centroid elevation - # as the centroid is a useful product over rough terrain - # when you are calculating ice volume change - LVIS_L2_input['data'][jj] = file_input['elev'][jj] - # latitude and longitude for each case - # elevation low == elevation high - LVIS_L2_input['lon'][ii] = file_input['lon_low'][ii] - LVIS_L2_input['lat'][ii] = file_input['lat_low'][ii] - # centroid elevations - LVIS_L2_input['lon'][jj] = file_input['lon'][jj] - LVIS_L2_input['lat'][jj] = file_input['lat'][jj] - # estimated uncertainty for both cases - LVIS_variance_low = (file_input['elev_low'] - file_input['elev'])**2 - LVIS_variance_high = (file_input['elev_high'] - file_input['elev'])**2 - LVIS_L2_input['error']=np.sqrt((LVIS_variance_low + LVIS_variance_high)/2.0) - # subset the data to indices if specified - if input_subsetter: - for key,val in LVIS_L2_input.items(): - LVIS_L2_input[key] = val[input_subsetter] - # return the output variables - return LVIS_L2_input,file_lines,lvis_flag[REGION] - -# PURPOSE: read Operation IceBridge data from NSIDC -# compute tides at points and times using tidal model driver algorithms -def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, - ATLAS_FORMAT=None, - GZIP=True, - DEFINITION_FILE=None, - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=None, - APPLY_FLEXURE=False, - VERBOSE=False, - MODE=0o775): - - # create logger for verbosity level - loglevel = logging.INFO if VERBOSE else logging.CRITICAL - logger = pyTMD.utilities.build_logger('pytmd',level=loglevel) - - # get parameters for tide model - if DEFINITION_FILE is not None: - model = pyTMD.model(tide_dir).from_file(DEFINITION_FILE) - else: - model = pyTMD.model(tide_dir, format=ATLAS_FORMAT, - compressed=GZIP).elevation(TIDE_MODEL) - - # extract file name and subsetter indices lists - match_object = re.match(r'(.*?)(\[(.*?)\])?$',arg) - input_file = os.path.expanduser(match_object.group(1)) - # subset input file to indices - if match_object.group(2): - # decompress ranges and add to list - input_subsetter = [] - for i in re.findall(r'((\d+)-(\d+)|(\d+))',match_object.group(3)): - input_subsetter.append(int(i[3])) if i[3] else \ - input_subsetter.extend(range(int(i[1]),int(i[2])+1)) - else: - input_subsetter = None - - # output directory for input_file - DIRECTORY = os.path.dirname(input_file) - # calculate if input files are from ATM or LVIS (+GH) - regex = {} - regex['ATM'] = r'(BLATM2|ILATM2)_(\d+)_(\d+)_smooth_nadir(.*?)(csv|seg|pt)$' - regex['ATM1b'] = r'(BLATM1b|ILATM1b)_(\d+)_(\d+)(.*?).(qi|TXT|h5)$' - regex['LVIS'] = r'(BLVIS2|BVLIS2|ILVIS2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - regex['LVGH'] = r'(ILVGH2)_(.*?)(\d+)_(\d+)_(R\d+)_(\d+).H5$' - for key,val in regex.items(): - if re.match(val, os.path.basename(input_file)): - OIB = key - - # HDF5 file attributes - attrib = collections.OrderedDict() - # time - attrib['time'] = {} - attrib['time']['long_name'] = 'Time' - attrib['time']['description'] = ('Time_corresponding_to_the_measurement_' - 'position') - attrib['time']['units'] = 'Days since 1992-01-01T00:00:00' - attrib['time']['standard_name'] = 'time' - attrib['time']['calendar'] = 'standard' - # latitude - attrib['lat'] = {} - attrib['lat']['long_name'] = 'Latitude_of_measurement' - attrib['lat']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lat']['units'] = 'Degrees_North' - # longitude - attrib['lon'] = {} - attrib['lon']['long_name'] = 'Longitude_of_measurement' - attrib['lon']['description'] = ('Corresponding_to_the_measurement_' - 'position_at_the_acquisition_time') - attrib['lon']['units'] = 'Degrees_East' - # tides - attrib[model.variable] = {} - attrib[model.variable]['description'] = model.description - attrib[model.variable]['reference'] = model.reference - attrib[model.variable]['model'] = model.name - attrib[model.variable]['units'] = 'meters' - attrib[model.variable]['long_name'] = model.long_name - - # extract information from first input file - # acquisition year, month and day - # number of points - # instrument (PRE-OIB ATM or LVIS, OIB ATM or LVIS) - if OIB in ('ATM','ATM1b'): - M1,YYMMDD1,HHMMSS1,AX1,SF1 = re.findall(regex[OIB], input_file).pop() - # early date strings omitted century and millenia (e.g. 93 for 1993) - if (len(YYMMDD1) == 6): - ypre,MM1,DD1 = YYMMDD1[:2],YYMMDD1[2:4],YYMMDD1[4:] - if (np.float64(ypre) >= 90): - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 1900.0) - else: - YY1 = '{0:4.0f}'.format(np.float64(ypre) + 2000.0) - elif (len(YYMMDD1) == 8): - YY1,MM1,DD1 = YYMMDD1[:4],YYMMDD1[4:6],YYMMDD1[6:] - elif OIB in ('LVIS','LVGH'): - M1,RG1,YY1,MMDD1,RLD1,SS1 = re.findall(regex[OIB], input_file).pop() - MM1,DD1 = MMDD1[:2],MMDD1[2:] - - # read data from input_file - logger.info('{0} -->'.format(input_file)) - if (OIB == 'ATM'): - # load IceBridge ATM data from input_file - dinput,file_lines,HEM = read_ATM_icessn_file(input_file,input_subsetter) - elif (OIB == 'ATM1b'): - # load IceBridge Level-1b ATM data from input_file - dinput,file_lines,HEM = read_ATM_qfit_file(input_file,input_subsetter) - elif OIB in ('LVIS','LVGH'): - # load IceBridge LVIS data from input_file - dinput,file_lines,HEM = read_LVIS_HDF5_file(input_file,input_subsetter) - - # convert time from J2000 to days relative to Jan 1, 1992 (48622mjd) - # J2000: seconds since 2000-01-01 12:00:00 UTC - t = pyTMD.time.convert_delta_time(dinput['time'], - epoch1=(2000,1,1,12,0,0), epoch2=(1992,1,1,0,0,0), - scale=1.0/86400.0) - # delta time (TT - UT1) file - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - - # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS','ESR'): - amp,ph,D,c = extract_tidal_constants(dinput['lon'], dinput['lat'], - model.grid_file, model.model_file, model.projection, - type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, grid=model.format, apply_flexure=APPLY_FLEXURE) - deltat = np.zeros_like(t) - elif model.format in ('netcdf'): - amp,ph,D,c = extract_netcdf_constants(dinput['lon'], dinput['lat'], - model.grid_file, model.model_file, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, - compressed=model.compressed) - deltat = np.zeros_like(t) - elif (model.format == 'GOT'): - amp,ph,c = extract_GOT_constants(dinput['lon'], dinput['lat'], - model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, t) - elif (model.format == 'FES'): - amp,ph = extract_FES_constants(dinput['lon'], dinput['lat'], - model.model_file, type=model.type, version=model.version, - method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - scale=model.scale, compressed=model.compressed) - # available model constituents - c = model.constituents - # interpolate delta times from calendar dates to tide time - deltat = calc_delta_time(delta_file, t) - - # calculate complex phase in radians for Euler's - cph = -1j*ph*np.pi/180.0 - # calculate constituent oscillation - hc = amp*np.exp(cph) - - # output tidal HDF5 file - # form: rg_NASA_model_TIDES_WGS84_fl1yyyymmddjjjjj.H5 - # where rg is the hemisphere flag (GR or AN) for the region - # model is the tidal model name flag (e.g. CATS0201) - # fl1 and fl2 are the data flags (ATM, LVIS, GLAS) - # yymmddjjjjj is the year, month, day and second of the input file - # output region flags: GR for Greenland and AN for Antarctica - hem_flag = {'N':'GR','S':'AN'} - # use starting second to distinguish between files for the day - JJ1 = np.min(dinput['time']) % 86400 - # flexure flag if being applied - flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' - # output file format - args = (hem_flag[HEM],model.name,flexure_flag,OIB,YY1,MM1,DD1,JJ1) - FILENAME = '{0}_NASA_{1}{2}_TIDES_WGS84_{3}{4}{5}{6}{7:05.0f}.H5'.format(*args) - # print file information - logger.info('\t{0}'.format(FILENAME)) - - # open output HDF5 file - fid = h5py.File(os.path.join(DIRECTORY,FILENAME), 'w') - - # predict tidal elevations at time and infer minor corrections - fill_value = -9999.0 - tide = np.ma.empty((file_lines),fill_value=fill_value) - tide.mask = np.any(hc.mask,axis=1) - tide.data[:] = predict_tide_drift(t, hc, c, - deltat=deltat, corrections=model.format) - minor = infer_minor_corrections(t, hc, c, - deltat=deltat, corrections=model.format) - tide.data[:] += minor.data[:] - # replace invalid values with fill value - tide.data[tide.mask] = tide.fill_value - # copy tide to output variable - dinput[model.variable] = tide.copy() - - # output dictionary with HDF5 variables - h5 = {} - # add variables to output file - for key,attributes in attrib.items(): - # Defining the HDF5 dataset variables for lat/lon - h5[key] = fid.create_dataset(key, (file_lines,), - data=dinput[key][:], dtype=dinput[key].dtype, - compression='gzip') - # add HDF5 variable attributes - for att_name,att_val in attributes.items(): - h5[key].attrs[att_name] = att_val - # attach dimensions - if key not in ('time',): - for i,dim in enumerate(['time']): - h5[key].dims[i].label = 'RECORD_SIZE' - h5[key].dims[i].attach_scale(h5[dim]) - - # HDF5 file attributes - fid.attrs['featureType'] = 'trajectory' - fid.attrs['title'] = 'Tidal_correction_for_elevation_measurements' - fid.attrs['summary'] = ('Tidal_correction_computed_at_elevation_' - 'measurements_using_a_tidal_model_driver.') - fid.attrs['project'] = 'NASA_Operation_IceBridge' - fid.attrs['processing_level'] = '4' - fid.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - # add attributes for input file - fid.attrs['elevation_file'] = os.path.basename(input_file) - fid.attrs['tide_model'] = model.name - # add geospatial and temporal attributes - fid.attrs['geospatial_lat_min'] = dinput['lat'].min() - fid.attrs['geospatial_lat_max'] = dinput['lat'].max() - fid.attrs['geospatial_lon_min'] = dinput['lon'].min() - fid.attrs['geospatial_lon_max'] = dinput['lon'].max() - fid.attrs['geospatial_lat_units'] = "degrees_north" - fid.attrs['geospatial_lon_units'] = "degrees_east" - fid.attrs['geospatial_ellipsoid'] = "WGS84" - fid.attrs['time_type'] = 'UTC' - # convert start/end time from days since 1992-01-01 into Julian days - time_range = np.array([np.min(t),np.max(t)]) - time_julian = 2400000.5 + pyTMD.time.convert_delta_time(time_range, - epoch1=(1992,1,1,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0) - # convert to calendar date - cal = pyTMD.time.convert_julian(time_julian,astype=int) - # add attributes with measurement date start, end and duration - args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) - fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['hour'][-1],cal['minute'][-1],cal['second'][-1]) - fid.attrs['RangeEndingTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) - args = (cal['year'][0],cal['month'][0],cal['day'][0]) - fid.attrs['RangeBeginningDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - args = (cal['year'][-1],cal['month'][-1],cal['day'][-1]) - fid.attrs['RangeEndingDate'] = '{0:4d}-{1:02d}-{2:02d}'.format(*args) - duration = np.round(time_julian[-1]*86400.0 - time_julian[0]*86400.0) - fid.attrs['DurationTimeSeconds'] = '{0:0.0f}'.format(duration) - # close the output HDF5 dataset - fid.close() - # change the permissions level to MODE - os.chmod(os.path.join(DIRECTORY,FILENAME), MODE) - -# PURPOSE: create argument parser -def arguments(): - parser = argparse.ArgumentParser( - description="""Calculates tidal elevations for correcting Operation - IceBridge elevation data - """, - fromfile_prefix_chars="@" - ) - parser.convert_arg_line_to_args = pyTMD.utilities.convert_arg_line_to_args - # command line parameters - group = parser.add_mutually_exclusive_group(required=True) - # input operation icebridge files - parser.add_argument('infile', - type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', - help='Input Operation IceBridge file to run') - # directory with tide data - parser.add_argument('--directory','-D', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - default=os.getcwd(), - help='Working data directory') - # tide model to use - choices = sorted(pyTMD.model.ocean_elevation() + pyTMD.model.load_elevation()) - group.add_argument('--tide','-T', - metavar='TIDE', type=str, - choices=choices, - help='Tide model to use in correction') - parser.add_argument('--atlas-format', - type=str, choices=('OTIS','netcdf'), default='netcdf', - help='ATLAS tide model format') - parser.add_argument('--gzip','-G', - default=False, action='store_true', - help='Tide model files are gzip compressed') - # tide model definition file to set an undefined model - group.add_argument('--definition-file', - type=lambda p: os.path.abspath(os.path.expanduser(p)), - help='Tide model definition file for use as correction') - # interpolation method - parser.add_argument('--interpolate','-I', - metavar='METHOD', type=str, default='spline', - choices=('spline','linear','nearest','bilinear'), - help='Spatial interpolation method') - # extrapolate with nearest-neighbors - parser.add_argument('--extrapolate','-E', - default=False, action='store_true', - help='Extrapolate with nearest-neighbors') - # extrapolation cutoff in kilometers - # set to inf to extrapolate over all points - parser.add_argument('--cutoff','-c', - type=np.float64, default=10.0, - help='Extrapolation cutoff in kilometers') - # apply flexure scaling factors to height constituents - parser.add_argument('--apply-flexure', - default=False, action='store_true', - help='Apply ice flexure scaling factor to height constituents') - # verbosity settings - # verbose will output information about each output file - parser.add_argument('--verbose','-V', - default=False, action='store_true', - help='Output information about each created file') - # permissions mode of the local files (number in octal) - parser.add_argument('--mode','-M', - type=lambda x: int(x,base=8), default=0o775, - help='Permission mode of directories and files created') - # return the parser - return parser - -# This is the main part of the program that calls the individual functions -def main(): - # Read the system arguments listed after the program - parser = arguments() - args,_ = parser.parse_known_args() - - # run for each input Operation IceBridge file - for arg in args.infile: - compute_tides_icebridge_data(args.directory, arg, - TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, - METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, - CUTOFF=args.cutoff, - APPLY_FLEXURE=args.apply_flexure, - VERBOSE=args.verbose, - MODE=args.mode) - -# run main program -if __name__ == '__main__': - main() diff --git a/test/test_equilibrium_tides.py b/test/test_equilibrium_tides.py deleted file mode 100644 index fa2c038a..00000000 --- a/test/test_equilibrium_tides.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python -u""" -test_equilibrium_tides.py (08/2020) -Download an ATL03 and ATL07 file from NSIDC and compare equilibrium tides values -""" -import os -import pytest -import warnings -import numpy as np -import pyTMD.time -import pyTMD.utilities -import pyTMD.calc_delta_time -import pyTMD.compute_equilibrium_tide -import icesat2_toolkit.utilities -from icesat2_toolkit.read_ICESat2_ATL03 import read_HDF5_ATL03 -from icesat2_toolkit.read_ICESat2_ATL07 import read_HDF5_ATL07 - -# PURPOSE: Download an ATL03 file from NSIDC and compare equilibrium tides -def test_ATL03_equilibrium_tides(username,password): - # path to an ATL03 file from NSIDC - HOST = ['https://n5eil01u.ecs.nsidc.org','ATLAS','ATL03.005','2018.10.14', - 'ATL03_20181014000347_02350101_005_01.h5'] - # only download ATL03 file if not currently existing - if not os.access(HOST[-1], os.F_OK): - # download an ATL03 file from NSIDC - icesat2_toolkit.utilities.from_nsidc(HOST,username=username, - password=password,local=HOST[-1],verbose=True) - # read ATL03 file using HDF5 reader - IS2_atl03_mds,IS2_atl03_attrs,IS2_atl03_beams = read_HDF5_ATL03(HOST[-1], - ATTRIBUTES=True, VERBOSE=True) - # verify that data is imported correctly - assert all(gtx in IS2_atl03_mds.keys() for gtx in IS2_atl03_beams) - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl03_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # for each beam - for gtx in IS2_atl03_beams: - # read ICESat-2 delta time and latitude - nref = len(IS2_atl03_mds[gtx]['geolocation']['segment_id']) - delta_time = IS2_atl03_mds[gtx]['geophys_corr']['delta_time'] - latitude = IS2_atl03_mds[gtx]['geolocation']['reference_photon_lat'] - # read ASAS predicted long-period equilibrium tides - fv = IS2_atl03_attrs[gtx]['geophys_corr']['tide_equilibrium']['_FillValue'] - tide_equilibrium = IS2_atl03_mds[gtx]['geophys_corr']['tide_equilibrium'] - # calculate tide time for beam - gps_seconds = atlas_sdp_gps_epoch + delta_time - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = pyTMD.calc_delta_time(delta_file, tide_time) - # calculate long-period equilibrium tides - lpet = pyTMD.compute_equilibrium_tide(tide_time+deltat, latitude) - # calculate differences between computed and data versions - difference = np.ma.zeros((nref)) - difference.data[:] = lpet - tide_equilibrium - difference.mask = (tide_equilibrium == fv) - # will verify differences between outputs are within tolerance - eps = np.finfo(np.float16).eps - if not np.all(difference.mask): - assert np.all(np.abs(difference) < eps) - -# PURPOSE: Download an ATL07 file from NSIDC and compare equilibrium tides -def test_ATL07_equilibrium_tides(username,password): - # path to an ATL07 file from NSIDC - HOST = ['https://n5eil01u.ecs.nsidc.org','ATLAS','ATL07.005','2018.10.14', - 'ATL07-01_20181014000347_02350101_005_03.h5'] - # only download ATL07 file if not currently existing - if not os.access(HOST[-1], os.F_OK): - # download an ATL07 file from NSIDC - icesat2_toolkit.utilities.from_nsidc(HOST,username=username, - password=password,local=HOST[-1],verbose=True) - # read ATL07 file using HDF5 reader - IS2_atl07_mds,IS2_atl07_attrs,IS2_atl07_beams = read_HDF5_ATL07(HOST[-1], - ATTRIBUTES=True, VERBOSE=True) - # verify that data is imported correctly - assert all(gtx in IS2_atl07_mds.keys() for gtx in IS2_atl07_beams) - # number of GPS seconds between the GPS epoch - # and ATLAS Standard Data Product (SDP) epoch - atlas_sdp_gps_epoch = IS2_atl07_mds['ancillary_data']['atlas_sdp_gps_epoch'] - # for each beam - for gtx in IS2_atl07_beams: - # read ICESat-2 sea ice delta time and latitude - nseg = len(IS2_atl07_mds[gtx]['sea_ice_segments']['height_segment_id']) - val = IS2_atl07_mds[gtx]['sea_ice_segments'] - attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'] - delta_time = val['delta_time'] - latitude = val['latitude'] - # read ASAS predicted long-period equilibrium tides - fv = attrs['geophysical']['height_segment_lpe']['_FillValue'] - tide_equilibrium = val['geophysical']['height_segment_lpe'][:] - # calculate tide time for beam - gps_seconds = atlas_sdp_gps_epoch + delta_time - leap_seconds = pyTMD.time.count_leap_seconds(gps_seconds) - tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, - epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - # interpolate delta times from calendar dates to tide time - delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) - deltat = pyTMD.calc_delta_time(delta_file, tide_time) - # calculate long-period equilibrium tides - lpet = pyTMD.compute_equilibrium_tide(tide_time+deltat, latitude) - # calculate differences between computed and data versions - difference = np.ma.zeros((nseg)) - difference.data[:] = lpet - tide_equilibrium - difference.mask = (tide_equilibrium == fv) - # will verify differences between outputs are within tolerance - eps = np.finfo(np.float16).eps - if not np.all(difference.mask): - assert np.all(np.abs(difference) < eps) diff --git a/version.txt b/version.txt index b0f3d96f..66c4c226 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -1.0.8 +1.0.9