Skip to content

Commit

Permalink
Merge pull request #163 from Open-EO/hv_orgainze_test
Browse files Browse the repository at this point in the history
split up unit and integration tests
  • Loading branch information
HansVRP authored Sep 27, 2024
2 parents e5db130 + 7b654f5 commit 55249f9
Show file tree
Hide file tree
Showing 75 changed files with 49 additions and 31 deletions.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@ def test_patch_feature_udf(backend: Backend):
connection = BACKEND_CONNECTIONS[backend]()
backend_context = BackendContext(backend=backend)

output_path = Path(__file__).parent / f"results/patch_features_{backend.value}.nc/"
output_path = (
Path(__file__).parent.parent / f"results/patch_features_{backend.value}.nc/"
)

bands_to_extract = ["S2-L2A-B04", "S2-L2A-B03", "S2-L2A-B02"]

Expand Down Expand Up @@ -140,7 +142,8 @@ def test_s1_rescale(backend: Backend):
connection = BACKEND_CONNECTIONS[backend]()
backend_context = BackendContext(backend=backend)
output_path = (
Path(__file__).parent / f"results/s1_rescaled_features_{backend.value}.nc"
Path(__file__).parent.parent
/ f"results/s1_rescaled_features_{backend.value}.nc"
)

REDUCED_TEMPORAL_CONTEXT = TemporalContext(
Expand Down Expand Up @@ -183,7 +186,9 @@ def test_s1_rescale(backend: Backend):
def test_latlon_extractor(backend: Backend):
connection = BACKEND_CONNECTIONS[backend]()
backend_context = BackendContext(backend=backend)
output_path = Path(__file__).parent / f"results/latlon_features_{backend.value}.nc"
output_path = (
Path(__file__).parent.parent / f"results/latlon_features_{backend.value}.nc"
)

REDUCED_TEMPORAL_CONTEXT = TemporalContext(
start_date="2023-06-01", end_date="2023-06-30"
Expand Down Expand Up @@ -225,7 +230,7 @@ def test_latlon_extractor(backend: Backend):

# TODO; will local processing be part of the API?
def test_patch_feature_local():
input_path = Path(__file__).parent / "resources/test_optical_cube.nc"
input_path = Path(__file__).parent.parent / "resources/test_optical_cube.nc"

inds = (
xr.open_dataset(input_path)
Expand All @@ -239,6 +244,6 @@ def test_patch_feature_local():
DummyPatchExtractor, inds, parameters={"GEO-EPSG": 32631}
)

features.to_netcdf(Path(__file__).parent / "results/patch_features_local.nc")
features.to_netcdf(Path(__file__).parent.parent / "results/patch_features_local.nc")

assert set(features.bands.values) == set(["red", "green", "blue"])
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
import rioxarray
import xarray as xr

# TODO to centralize
# Retrieve the test parameters from the s2 fetcher tests
from test_s2_fetchers import POINT_EXTRACTION_DF, test_backends, test_configurations

from openeo_gfmap import Backend, BackendContext, SpatialContext, TemporalContext
from openeo_gfmap.backend import BACKEND_CONNECTIONS
from openeo_gfmap.fetching import (
Expand All @@ -25,9 +29,6 @@
select_sar_bands,
)

# Retrieve the test parameters from the s2 fetcher tests
from .test_s2_fetchers import POINT_EXTRACTION_DF, test_backends, test_configurations


# integration test checks if the output S1 cube has the correct band names;
class TestS1Extractors:
Expand Down Expand Up @@ -66,7 +67,7 @@ def sentinel1_grd(
cube = compress_backscatter_uint16(context, cube)

output_file = (
Path(__file__).parent / f"results/{backend.value}_sentinel1_grd.nc"
Path(__file__).parent.parent / f"results/{backend.value}_sentinel1_grd.nc"
)

job = cube.create_job(
Expand Down Expand Up @@ -101,7 +102,8 @@ def compare_sentinel1_tiles():
loaded_tiles = []
for backend in backend_types:
tile_path = (
Path(__file__).parent / f"results/{backend.value}_sentinel1_grd.nc"
Path(__file__).parent.parent
/ f"results/{backend.value}_sentinel1_grd.nc"
)
loaded_tiles.append(xr.open_dataset(tile_path))

Expand Down Expand Up @@ -172,7 +174,8 @@ def sentinel1_grd_point_based(
cube = cube.aggregate_spatial(spatial_context, reducer="mean")

output_file = (
Path(__file__).parent / f"results/points_{backend.value}_sentinel1_grd.nc"
Path(__file__).parent.parent
/ f"results/points_{backend.value}_sentinel1_grd.nc"
)

cube.download(output_file, format="JSON")
Expand Down Expand Up @@ -223,7 +226,9 @@ def sentinel1_grd_polygon_based(
cube = extractor.get_cube(connection, spatial_context, temporal_context)
cube = compress_backscatter_uint16(context, cube)

output_folder = Path(__file__).parent / f"results/polygons_s1_{backend.value}/"
output_folder = (
Path(__file__).parent.parent / f"results/polygons_s1_{backend.value}/"
)
output_folder.mkdir(exist_ok=True, parents=True)

job = cube.create_job(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,12 @@

# Dataset of polygons for POINT based extraction
POINT_EXTRACTION_DF = (
Path(__file__).parent / "resources/malawi_extraction_polygons.gpkg"
Path(__file__).parent.parent / "resources/malawi_extraction_polygons.gpkg"
)

# Datase of polygons for Polygon based extraction
POLYGON_EXTRACTION_DF = (
Path(__file__).parent / "resources/puglia_extraction_polygons.gpkg"
Path(__file__).parent.parent / "resources/puglia_extraction_polygons.gpkg"
)

# test_backends = [Backend.TERRASCOPE, Backend.CDSE]
Expand Down Expand Up @@ -102,7 +102,7 @@ def sentinel2_l2a(
cube = extractor.get_cube(connection, spatial_extent, temporal_extent)

output_file = (
Path(__file__).parent / f"results/{backend.value}_sentinel2_l2a.nc"
Path(__file__).parent.parent / f"results/{backend.value}_sentinel2_l2a.nc"
)

cube.download(output_file, format="NetCDF")
Expand All @@ -127,7 +127,8 @@ def compare_sentinel2_tiles():
if backend == Backend.EODC: # TODO fix EDOC backend first
continue
tile_path = (
Path(__file__).parent / f"results/{backend.value}_sentinel2_l2a.nc"
Path(__file__).parent.parent
/ f"results/{backend.value}_sentinel2_l2a.nc"
)
loaded_tiles.append(xr.open_dataset(tile_path))

Expand Down Expand Up @@ -190,7 +191,8 @@ def sentinel2_l2a_point_based(
cube = cube.aggregate_spatial(spatial_context, reducer="mean")

output_file = (
Path(__file__).parent / f"results/points_{backend.value}_sentinel2_l2a.json"
Path(__file__).parent.parent
/ f"results/points_{backend.value}_sentinel2_l2a.json"
)

cube.download(output_file, format="JSON")
Expand Down Expand Up @@ -231,7 +233,9 @@ def sentinel2_l2a_polygon_based(

cube = extractor.get_cube(connection, spatial_context, temporal_context)

output_folder = Path(__file__).parent / f"results/polygons_s2_{backend.value}/"
output_folder = (
Path(__file__).parent.parent / f"results/polygons_s2_{backend.value}/"
)
output_folder.mkdir(exist_ok=True, parents=True)

job = cube.create_job(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@
)
from openeo_gfmap.preprocessing.cloudmasking import mask_scl_dilation
from openeo_gfmap.preprocessing.compositing import median_compositing

from .utils import load_dataarray_url
from tests.utils.helpers import load_dataarray_url

spatial_context = BoundingBoxExtent(
west=5.0, south=51.2, east=5.025, north=51.225, epsg=4326
Expand Down Expand Up @@ -61,7 +60,8 @@ def test_onnx_inference_local():
assert output.shape == (1, 256, 256)
assert len(np.unique(output.values)) == 3

output_path = Path(__file__).parent / "results/test_onnx_inference_local.nc"
output_path = Path(__file__).parent.parent / "results/test_onnx_inference_local.nc"
print(output_path)
output.to_netcdf(output_path)


Expand Down Expand Up @@ -113,7 +113,7 @@ def test_onnx_inference():
],
)

output_path = Path(__file__).parent / "results/test_onnx_inference.tif"
output_path = Path(__file__).parent.parent / "results/test_onnx_inference.tif"

# Download the results as tif file.
job = cube.create_job(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def test_bap_score(backend: Backend):
for asset in job.get_results().get_assets():
if asset.metadata["type"].startswith("application/x-netcdf"):
asset.download(
Path(__file__).parent / f"results/bap_score_{backend.value}.nc"
Path(__file__).parent.parent / f"results/bap_score_{backend.value}.nc"
)


Expand Down Expand Up @@ -122,7 +122,8 @@ def test_bap_masking(backend: Backend):
for asset in job.get_results().get_assets():
if asset.metadata["type"].startswith("application/x-netcdf"):
asset.download(
Path(__file__).parent / f"results/bap_composited_{backend.value}.nc"
Path(__file__).parent.parent
/ f"results/bap_composited_{backend.value}.nc"
)


Expand Down Expand Up @@ -213,5 +214,5 @@ def test_bap_quintad(backend: Backend):
for asset in job.get_results().get_assets():
if asset.metadata["type"].startswith("application/x-netcdf"):
asset.download(
Path(__file__).parent / f"results/bap_quintad_{backend.value}.nc"
Path(__file__).parent.parent / f"results/bap_quintad_{backend.value}.nc"
)

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.

Large diffs are not rendered by default.

Binary file not shown.

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ def mock_query_cdse_catalogue(
hash_value = hashlib.sha256(combined_arguments.encode()).hexdigest()

src_path = (
Path(__file__).parent / f"resources/{hash_value[:8]}_query_cdse_results.json"
Path(__file__).parent.parent
/ f"resources/{hash_value[:8]}_query_cdse_results.json"
)

if not src_path.exists():
Expand Down
Empty file added tests/tests_unit/__init__.py
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@
_get_s2_l2a_default_fetcher,
_get_s2_l2a_default_processor,
)

from .utils import create_test_datacube
from tests.utils.helpers import create_test_datacube

# Mock constants for the tests
BANDS = ["S2-L2A-B01", "S2-L2A-B02"]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Test the job splitters and managers of OpenEO GFMAP."""


import geopandas as gpd
from shapely.geometry import Point, Polygon

Expand Down
Binary file added tests/tests_unit/results/bap_composited_cdse.nc
Binary file not shown.
Binary file added tests/tests_unit/results/bap_quintad_cdse.nc
Binary file not shown.
Binary file added tests/tests_unit/results/bap_score_cdse.nc
Binary file not shown.
Binary file added tests/tests_unit/results/cdse_sentinel1_grd.nc
Binary file not shown.
Binary file added tests/tests_unit/results/cdse_sentinel2_l2a.nc
Binary file not shown.
Binary file added tests/tests_unit/results/latlon_features_cdse.nc
Binary file not shown.
Binary file added tests/tests_unit/results/patch_features_cdse.nc
Binary file not shown.
Binary file added tests/tests_unit/results/patch_features_local.nc
Binary file not shown.
Binary file not shown.
1 change: 1 addition & 0 deletions tests/tests_unit/results/points_cdse_sentinel2_l2a.json

Large diffs are not rendered by default.

Binary file not shown.
1 change: 1 addition & 0 deletions tests/tests_unit/results/polygons_s2_cdse/job-results.json

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added tests/tests_unit/results/test_onnx_inference.tif
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@
BASE_SENTINEL2_L2A_MAPPING,
ELEMENT84_SENTINEL2_L2A_MAPPING,
)

from .utils import create_test_datacube
from tests.utils.helpers import create_test_datacube

# band names

Expand Down
File renamed without changes.
Empty file added tests/utils/__init__.py
Empty file.
File renamed without changes.

0 comments on commit 55249f9

Please sign in to comment.