Skip to content

Commit

Permalink
Merge pull request #42 from GeoscienceAustralia/tests
Browse files Browse the repository at this point in the history
Add first iteration of tests to repo
  • Loading branch information
vnewey authored Dec 21, 2023
2 parents fa29eda + 06ac857 commit b29dd4b
Show file tree
Hide file tree
Showing 13 changed files with 145 additions and 42 deletions.
24 changes: 23 additions & 1 deletion .github/workflows/dea-intertidal-image.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: DEA Intertidal Image Push
name: Image build and test

env:
IMAGE_NAME: geoscienceaustralia/dea-intertidal
Expand All @@ -11,11 +11,13 @@ on:
paths:
- 'intertidal/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-intertidal-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'setup.py'
- 'docker-compose.yml'
- 'codecov.yaml'

release:
types: [created, edited, published]
Expand All @@ -31,13 +33,33 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 0

- name: Build DEA Intertidal image
timeout-minutes: 20
shell: bash
run: |
docker-compose build
- name: Run tests
run: |
# Download tide modelling files and unzip
# TODO: Replace with S3 sync from dea-non-public-data
wget --no-verbose https://www.dropbox.com/s/uemd8ib2vfw5nad/tide_models.zip?dl=1 -O tide_models.zip
unzip -q tide_models.zip
# Run integration tests using Docker
docker compose run dea_intertidal pytest -v --cov=intertidal --cov-report=xml tests
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

# - name: Setup upterm session
# uses: lhotari/action-upterm@v1

push_ecr:
needs: [test]
runs-on: ubuntu-latest
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

[![DOI](https://img.shields.io/badge/DOI-10.1016/j.ecss.2019.03.006-0e7fbf.svg)](https://doi.org/10.1016/j.ecss.2019.03.006)
[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
[![codecov](https://codecov.io/gh/GeoscienceAustralia/dea-intertidal/branch/main/graph/badge.svg?token=7HXSIPGT5I)](https://codecov.io/gh/GeoscienceAustralia/dea-intertidal)
[![example workflow](https://github.com/GeoscienceAustralia/dea-intertidal/actions/workflows/dea-intertidal-image.yml/badge.svg)](https://github.com/GeoscienceAustralia/dea-intertidal/actions/workflows/dea-intertidal-image.yml)

**License:** The code in this repository is licensed under the [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0). Digital Earth Australia data is licensed under the [Creative Commons by Attribution 4.0 license](https://creativecommons.org/licenses/by/4.0/).

Expand Down
8 changes: 8 additions & 0 deletions codecov.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
ignore:
- "intertidal/validation.py"
- "intertidal/composites.py"

coverage:
precision: 1
round: down
range: "50...90"
6 changes: 5 additions & 1 deletion intertidal/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,8 @@
!*.ipynb
!*.sh
!*.yaml
!*.gitignore
!*.yml
!*.in
!*.gitignore
!*.dockerignore
!*Dockerfile
70 changes: 41 additions & 29 deletions intertidal/elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,7 +820,7 @@ def pixel_dem(interval_ds, ndwi_thresh=0.1, interp_intervals=200, smooth_radius=
smoothed_ds = interval_ds.rolling(
interval=smooth_radius,
center=False,
min_periods=1, #int(smooth_radius / 2.0),
min_periods=1, # int(smooth_radius / 2.0),
).mean()
else:
smoothed_ds = interval_ds
Expand All @@ -844,6 +844,7 @@ def pixel_dem(interval_ds, ndwi_thresh=0.1, interp_intervals=200, smooth_radius=
# Export as xr.Dataset
return dem_flat.to_dataset(name="elevation")


def pixel_dem_debug(
x,
y,
Expand Down Expand Up @@ -1113,7 +1114,7 @@ def clean_edge_pixels(ds):
effects mean that modelled elevations are likely to be inaccurate.
This function uses binary dilation to identify the edges of
intertidal elevation data with greater than 0 elevation. The
intertidal elevation data with greater than 0 elevation. The
resulting mask is applied to the elevation dataset to remove upper
intertidal edge pixels from both elevation and uncertainty datasets.
Expand Down Expand Up @@ -1247,7 +1248,7 @@ def elevation(
ancillary_points="data/raw/tide_correlations_2017-2019.geojson",
# ancillary_points="data/raw/tide_correlation_points_spearmanndwi_nt.geojson",
top_n=3,
reduce_method='mean',
reduce_method="mean",
resolution=3000,
)

Expand Down Expand Up @@ -1347,14 +1348,14 @@ def elevation(
@click.option(
"--start_date",
type=str,
default="2020",
default="2019",
help="The start date of satellite data to load from the "
"datacube. This can be any date format accepted by datacube. ",
)
@click.option(
"--end_date",
type=str,
default="2022",
default="2021",
help="The end date of satellite data to load from the "
"datacube. This can be any date format accepted by datacube. ",
)
Expand Down Expand Up @@ -1494,44 +1495,55 @@ def intertidal_cli(
configure_s3_access(cloud_defaults=True, aws_unsigned=aws_unsigned)

# Create output folder. If it doesn't exist, create it
output_dir = f"data/interim/{study_area}/{start_date}-{end_date}-spearman"
output_dir = f"data/interim/{study_area}/{start_date}-{end_date}"
os.makedirs(output_dir, exist_ok=True)

try:
log.info(f"Study area {study_area}: Loading satellite data")

# Connect to datacube to load data
dc = datacube.Datacube(app="Intertidal_CLI")


# Create local dask cluster to improve data load time
client = create_local_dask_cluster(return_client=True)

satellite_ds = load_data(
dc=dc,
study_area=study_area,
time_range=(start_date, end_date),
resolution=resolution,
crs="EPSG:3577",
include_s2=True,
include_ls=True,
filter_gqa=True,
max_cloudcover=90,
skip_broken_datasets=True,
)
if study_area == "testing":
log.info(f"Running in testing mode")
import pickle

# Load data
satellite_ds.load()
with open("tests/data/satellite_ds.pickle", "rb") as handle:
satellite_ds = pickle.load(handle)
valid_mask = None

# Load data from GA's Australian Bathymetry and Topography Grid 2009
topobathy_ds = load_topobathy(
dc, satellite_ds, product="ga_multi_ausbath_0", resampling="bilinear"
)
else:

# Connect to datacube to load data
dc = datacube.Datacube(app="Intertidal_CLI")

satellite_ds = load_data(
dc=dc,
study_area=study_area,
time_range=(start_date, end_date),
resolution=resolution,
crs="EPSG:3577",
include_s2=True,
include_ls=True,
filter_gqa=True,
max_cloudcover=90,
skip_broken_datasets=True,
)

# Load data
satellite_ds.load()

# Load data from GA's Australian Bathymetry and Topography Grid 2009
topobathy_ds = load_topobathy(
dc, satellite_ds, product="ga_multi_ausbath_0", resampling="bilinear"
)
valid_mask = topobathy_ds.height_depth > -20

# Calculate elevation
log.info(f"Study area {study_area}: Calculating Intertidal Elevation")
ds, ds_aux, tide_m = elevation(
satellite_ds,
valid_mask=topobathy_ds.height_depth > -20,
valid_mask=valid_mask,
ndwi_thresh=ndwi_thresh,
min_freq=min_freq,
max_freq=max_freq,
Expand Down
2 changes: 0 additions & 2 deletions intertidal/exposure.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
# from pyproj import Transformer
# from scipy.signal import argrelmax, argrelmin

from scipy.interpolate import interp1d

from dea_tools.coastal import pixel_tides, model_tides
from intertidal.tide_modelling import pixel_tides_ensemble
# from intertidal.elevation import pixel_tides_ensemble
Expand Down
5 changes: 1 addition & 4 deletions intertidal/extents.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def extents(
freq,
dem,
corr,
land_use_mask="/gdata1/data/land_use/ABARES_CLUM/geotiff_clum_50m1220m/clum_50m1220m.tif",
land_use_mask="https://dea-public-data-dev.s3-ap-southeast-2.amazonaws.com/abares_clum_2020/clum_50m1220m.tiff",
):
"""
Classify coastal ecosystems into broad classes based
Expand Down Expand Up @@ -147,9 +147,6 @@ def extents(
crosses zero)
"""
## Connect to datacube to load `ocean_da`
dc = datacube.Datacube(app="ocean_masking")

# Load the land use dataset to mask out misclassified extents classes caused by urban land class
landuse_da = load_reproject(
path=land_use_mask,
Expand Down
6 changes: 3 additions & 3 deletions notebooks/Intertidal_CLI.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 4,
"id": "58c0e260-cd76-45d9-9dc3-5e4294be31c6",
"metadata": {
"tags": []
Expand All @@ -92,7 +92,7 @@
"tide_model_dir = \"/gdata1/data/tide_models_clipped\" # Directory containing tide model files\n",
"\n",
"# Intertidal Exposure params\n",
"modelled_freq = \"30min\"\n",
"modelled_freq = \"3h\"\n",
"\n",
"# Intertidal Composites params\n",
"threshold_lowtide = 0.2\n",
Expand All @@ -102,7 +102,7 @@
"# for testing elevation code, and outputting optional auxiliary outputs\n",
"# for debugging (defaults are to calculate exposure/offsets, and not\n",
"# output auxiliary files.\n",
"exposure_offsets = \"--no-exposure_offsets\"\n",
"exposure_offsets = \"\" #\"--no-exposure_offsets\"\n",
"output_auxiliaries = \"--output_auxiliaries\""
]
},
Expand Down
4 changes: 4 additions & 0 deletions requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,11 @@ pyTMD==2.0.5
pandas==1.5.3
pygeos==0.14
pyproj==3.4.1
pytest
pytest-dependency
pytest-cov
pytz==2023.3
rioxarray
rasterio==1.3.4
seaborn==0.13.0
scikit_image==0.19.3
Expand Down
7 changes: 5 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
"pygeos",
"pyproj",
"pyTMD>=2.0.0",
"pytest",
"pytest-dependency",
"pytest-cov",
"pytz",
"rasterio",
"setuptools-scm",
Expand All @@ -43,8 +46,8 @@
NAME = "dea_intertidal"
DESCRIPTION = "Tools for running Digital Earth Australia Intertidal"
URL = "https://github.com/GeoscienceAustralia/dea-intertidal"
EMAIL = "Robbi.BishopTaylor@ga.gov.au"
AUTHOR = "Robbi Bishop-Taylor"
EMAIL = "earth.observation@ga.gov.au"
AUTHOR = "Geoscience Australia"
REQUIRES_PYTHON = ">=3.8.0"

# Setup kwargs
Expand Down
Empty file added tests/__init__.py
Empty file.
Binary file added tests/data/satellite_ds.pickle
Binary file not shown.
53 changes: 53 additions & 0 deletions tests/test_intertidal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import pytest
from click.testing import CliRunner
from intertidal.elevation import intertidal_cli, elevation
import pickle


@pytest.fixture()
def satellite_ds():
with open("tests/data/satellite_ds.pickle", "rb") as handle:
return pickle.load(handle)


def test_intertidal_cli():
runner = CliRunner()
result = runner.invoke(
intertidal_cli,
[
"--study_area",
"testing",
"--start_date",
"2020",
"--end_date",
"2022",
"--modelled_freq",
"3h",
],
)
assert result.exit_code == 0


def test_elevation(satellite_ds):
ds, ds_aux, tide_m = elevation(
satellite_ds,
valid_mask=None,
ndwi_thresh=0.1,
min_freq=0.01,
max_freq=0.99,
min_correlation=0.15,
windows_n=20,
window_prop_tide=0.15,
max_workers=None,
tide_model="FES2014",
tide_model_dir="/var/share/tide_models",
study_area=None,
log=None,
)

# Verify that ds contains correct variables
assert "elevation" in ds.data_vars
assert "elevation_uncertainty" in ds.data_vars

# Verify that ds is a single layer
assert "time" not in ds.dims

0 comments on commit b29dd4b

Please sign in to comment.