Skip to content

Regression

Regression #87

GitHub Actions / Regression test results for ops failed Jun 25, 2024 in 0s

44 fail, 530 pass in 44m 53s

574 tests   530 ✅  44m 53s ⏱️
  1 suites    0 💤
  1 files     44 ❌

Results for commit 2ac3719.

Annotations

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2832224417-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 23s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2832224417-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832224417-POCLOUD', 'concept-id': 'G3071779083-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
>       lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)

verify_collection.py:398: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

dataset = <xarray.Dataset> Size: 232B
Dimensions:                 (ydim_grid: 1, xdim_grid: 1, look: 1,
                        ...                                 -0.43
    history_json:                                       [{"date_time": "2024-...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410/69127672_RSS_SMAP_SSS_L2C_r49943_20240607T102958_2024159_NRT_V06.0_001.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]

    def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict]):
        # Try getting it from UMM-Var first
        lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
        lat_var_name = get_variable_name_from_umm_json(lat_var_json)
        lon_var_name = get_variable_name_from_umm_json(lon_var_json)
    
        if lat_var_name and lon_var_name:
            return lat_var_name, lon_var_name
    
        logging.warning("Unable to find lat/lon vars in UMM-Var")
    
        # If that doesn't work, try using cf-xarray to infer lat/lon variable names
        try:
            latitude = [lat for lat in dataset.cf.coordinates['latitude']
                             if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
            longitude = [lon for lon in dataset.cf.coordinates['longitude']
                             if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
            return latitude, longitude
        except:
            logging.warning("Unable to find lat/lon vars using cf_xarray")
    
        # If that still doesn't work, try using l2ss-py directly
        try:
            # file not able to be flattened unless locally downloaded
            shutil.copy(file_to_subset, 'my_copy_file.nc')
            nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
            # flatten the dataset
            nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, 'my_copy_file.nc')
    
            args = {
                    'decode_coords': False,
                    'mask_and_scale': False,
                    'decode_times': False
                    }
    
            with xarray.open_dataset(
                xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
                **args
                ) as flat_dataset:
                    # use l2ss-py to find lat and lon names
                    lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
    
            os.remove('my_copy_file.nc')
            if lat_var_names and lon_var_names:
                lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
                lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
                return lat_var_name, lon_var_name
    
        except ValueError:
            logging.warning("Unable to find lat/lon vars using l2ss-py")
    
        # Still no dice, try using the 'units' variable attribute
        for coord_name, coord in dataset.coords.items():
            if 'units' not in coord.attrs:
                continue
            if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
                lat_var_name = coord_name
            if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
                lon_var_name = coord_name
        if lat_var_name and lon_var_name:
            return lat_var_name, lon_var_name
        else:
            logging.warning("Unable to find lat/lon vars using 'units' attribute")
    
        # Out of options, fail the test because we couldn't determine lat/lon variables
>       pytest.fail(f"Unable to find latitude and longitude variables.")
E       Failed: Unable to find latitude and longitude variables.

verify_collection.py:358: Failed
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3071779083-POCLOUD for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2832224417-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.28062499999999%3A82.08362499999998%29&subset=lon%284.504874999999998%3A175.500125%29&granuleId=G3071779083-POCLOUD
INFO     root:verify_collection.py:385 Submitted harmony job 3eaccd87-678c-4f8b-aa73-37b788ce609a
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2832224410/69127672_RSS_SMAP_SSS_L2C_r49943_20240607T102958_2024159_NRT_V06.0_001.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING  root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING  root:verify_collection.py:342 Unable to find lat/lon vars using l2ss-py
WARNING  root:verify_collection.py:355 Unable to find lat/lon vars using 'units' attribute

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1918210023-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 51s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f7cdb366240>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f7cdf923eb0>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C1918210023-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918210023-GES_DISC', 'concept-id': 'G3130184867-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918210023-GES_DISC'}]}, 'meta': {'association-details': {'collect...RL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/qa_value', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1918210020')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:432: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:423: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f7cdb366240>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f7cdf923eb0>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3130184867-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1918210023-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.42954999999999%3A-59.996449999999996%29&subset=lon%28-159.497975%3A127.364975%29&granuleId=G3130184867-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job 98df1aef-124e-4fc8-9c10-b0d6f960a44d
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1918210020/69127677_S5P_OFFL_L2_HCHO_20240624T031300_20240624T045430_34699_03_020601_20240625T190508_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2087131083-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 31s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0e68a99640>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f0e712f97e0>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C2087131083-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087131083-GES_DISC', 'concept-id': 'G3130317459-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087131083-GES_DISC'}]}, 'meta': {'association-details': {'collect.../variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C2087131080')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:432: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:423: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0e68a99640>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f0e712f97e0>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3130317459-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087131083-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.41945%3A-59.41255%29&subset=lon%2880.11795000000001%3A154.33004999999997%29&granuleId=G3130317459-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job 2dea2b36-1273-422a-b493-ea1ced1113bf
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C2087131080/69127708_S5P_OFFL_L2_AER_AI_20240624T063600_20240624T081730_34701_03_020600_20240625T202645_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2832221740-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 43s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2832221740-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832221740-POCLOUD', 'concept-id': 'G3059748792-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832221740-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2832221740')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7faa29a36830>
job_id = 'ff27470e-668f-4f9d-83c7-fc75d24e19d9', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3059748792-POCLOUD for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2832221740-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.29042474999999%3A82.03317474999999%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3059748792-POCLOUD
INFO     root:verify_collection.py:385 Submitted harmony job ff27470e-668f-4f9d-83c7-fc75d24e19d9

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2936721448-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 23s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2936721448-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2936721448-POCLOUD', 'concept-id': 'G3062447313-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2936721448-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2936721440')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
>       lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)

verify_collection.py:398: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

dataset = <xarray.Dataset> Size: 240B
Dimensions:                 (ydim_grid: 1, xdim_grid: 1, look: 1,
                        ...                                 -0.43
    history_json:                                           [{"date_time": "2...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2936721440/69127848_RSS_SMAP_SSS_L2C_r47700_20240106T014035_2024006_FNL_V05.3.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2936721448-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]

    def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict]):
        # Try getting it from UMM-Var first
        lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
        lat_var_name = get_variable_name_from_umm_json(lat_var_json)
        lon_var_name = get_variable_name_from_umm_json(lon_var_json)
    
        if lat_var_name and lon_var_name:
            return lat_var_name, lon_var_name
    
        logging.warning("Unable to find lat/lon vars in UMM-Var")
    
        # If that doesn't work, try using cf-xarray to infer lat/lon variable names
        try:
            latitude = [lat for lat in dataset.cf.coordinates['latitude']
                             if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
            longitude = [lon for lon in dataset.cf.coordinates['longitude']
                             if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
            return latitude, longitude
        except:
            logging.warning("Unable to find lat/lon vars using cf_xarray")
    
        # If that still doesn't work, try using l2ss-py directly
        try:
            # file not able to be flattened unless locally downloaded
            shutil.copy(file_to_subset, 'my_copy_file.nc')
            nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
            # flatten the dataset
            nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, 'my_copy_file.nc')
    
            args = {
                    'decode_coords': False,
                    'mask_and_scale': False,
                    'decode_times': False
                    }
    
            with xarray.open_dataset(
                xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
                **args
                ) as flat_dataset:
                    # use l2ss-py to find lat and lon names
                    lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
    
            os.remove('my_copy_file.nc')
            if lat_var_names and lon_var_names:
                lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
                lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
                return lat_var_name, lon_var_name
    
        except ValueError:
            logging.warning("Unable to find lat/lon vars using l2ss-py")
    
        # Still no dice, try using the 'units' variable attribute
        for coord_name, coord in dataset.coords.items():
            if 'units' not in coord.attrs:
                continue
            if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
                lat_var_name = coord_name
            if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
                lon_var_name = coord_name
        if lat_var_name and lon_var_name:
            return lat_var_name, lon_var_name
        else:
            logging.warning("Unable to find lat/lon vars using 'units' attribute")
    
        # Out of options, fail the test because we couldn't determine lat/lon variables
>       pytest.fail(f"Unable to find latitude and longitude variables.")
E       Failed: Unable to find latitude and longitude variables.

verify_collection.py:358: Failed
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3062447313-POCLOUD for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2936721448-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.29044999999999%3A82.08044999999998%29&subset=lon%284.51755%3A175.50045%29&granuleId=G3062447313-POCLOUD
INFO     root:verify_collection.py:385 Submitted harmony job d4997e1d-83f0-4f2c-a0e1-a041a5110a4d
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2936721440/69127848_RSS_SMAP_SSS_L2C_r47700_20240106T014035_2024006_FNL_V05.3.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING  root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING  root:verify_collection.py:342 Unable to find lat/lon vars using l2ss-py
WARNING  root:verify_collection.py:355 Unable to find lat/lon vars using 'units' attribute

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2799465526-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 49s]
Raw output
RuntimeError: NetCDF: Can't open HDF5 attribute
collection_concept_id = 'C2799465526-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2799465526-POCLOUD', 'concept-id': 'G3127099964-POCLOUD', 'concept-type': 'granul...DateTime': '2024-06-24T11:13:37.759Z'}, 'GranuleUR': 'SWOT_IPN_2PfP017_138_20240623_230318_20240623_235445_swot', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2799465526-POCLOUD'}]}, 'meta': {'association-details': {'collecti...a: off nadir angle from Ku band', 'Dimensions': [{'Name': 'time', 'Size': 2690, 'Type': 'TIME_DIMENSION'}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C2799465520')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
>       lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)

verify_collection.py:398: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:318: in get_lat_lon_var_names
    nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
src/netCDF4/_netCDF4.pyx:2495: in netCDF4._netCDF4.Dataset.__init__
    ???
src/netCDF4/_netCDF4.pyx:1891: in netCDF4._netCDF4._get_grps
    ???
src/netCDF4/_netCDF4.pyx:3616: in netCDF4._netCDF4.Group.__init__
    ???
src/netCDF4/_netCDF4.pyx:1927: in netCDF4._netCDF4._get_vars
    ???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

>   ???
E   RuntimeError: NetCDF: Can't open HDF5 attribute

src/netCDF4/_netCDF4.pyx:2034: RuntimeError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3127099964-POCLOUD for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2799465526-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%2867.742205%3A77.420995%29&subset=lon%28124.586265%3A178.579135%29&granuleId=G3127099964-POCLOUD
INFO     root:verify_collection.py:385 Submitted harmony job 1b889b2a-a4b3-4c47-b268-b0f064d46e74
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C2799465520/69127852_SWOT_IPN_2PfP017_138_20240623_230318_20240623_235445_subsetted.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING  root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1251101457-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 22s]
Raw output
OSError: [Errno -101] NetCDF: HDF error: 'my_copy_file.nc'
collection_concept_id = 'C1251101457-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1251101457-GES_DISC', 'concept-id': 'G3056242015-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1251101457-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -999.989990234375}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1251101450')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
>       lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)

verify_collection.py:398: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:318: in get_lat_lon_var_names
    nc_dataset = netCDF4.Dataset('my_copy_file.nc', mode='r+')
src/netCDF4/_netCDF4.pyx:2469: in netCDF4._netCDF4.Dataset.__init__
    ???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

>   ???
E   OSError: [Errno -101] NetCDF: HDF error: 'my_copy_file.nc'

src/netCDF4/_netCDF4.pyx:2028: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3056242015-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1251101457-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3056242015-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job 47511a19-be88-4d10-884c-a581135a41e7
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1251101450/69127854_MLS-Aura_L2GP-HCN_v04-25-c01_2024d152_subsetted.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING  root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516300-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 37s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0e68986d40>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f0e712f97e0>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C1627516300-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516300-GES_DISC', 'concept-id': 'G1902371249-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516300-GES_DISC'}]}, 'meta': {'association-details': {'collect...asa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ozone_total_vertical_column_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1627516300')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:432: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:423: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0e68986d40>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f0e712f97e0>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G1902371249-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516300-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1902371249-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job fbc5152a-4f33-4502-aa27-4c3cfd3557ea
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1627516300/69127942_S5P_OFFL_L2_O3_20200712T224601_20200713T002730_14238_01_010108_20200715T122623_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1918209846-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 38s]
Raw output
ValueError: did not find a match in any of xarray's currently installed IO backends ['netcdf4']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:
https://docs.xarray.dev/en/stable/getting-started-guide/installing.html
https://docs.xarray.dev/en/stable/user-guide/io.html
collection_concept_id = 'C1918209846-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918209846-GES_DISC', 'concept-id': 'G3130461149-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918209846-GES_DISC'}]}, 'meta': {'association-details': {'collect...tracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': 9.969209968386869e+36}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1918209840')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
>       subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)

verify_collection.py:395: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:552: in open_dataset
    engine = plugins.guess_engine(filename_or_obj)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

store_spec = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1918209840/69128024_S5P_OFFL_L2_O3_20240623T235001_20240624T013130_34697_03_020601_20240625T154916_subsetted.nc4')

    def guess_engine(
        store_spec: str | os.PathLike[Any] | BufferedIOBase | AbstractDataStore,
    ) -> str | type[BackendEntrypoint]:
        engines = list_engines()
    
        for engine, backend in engines.items():
            try:
                if backend.guess_can_open(store_spec):
                    return engine
            except PermissionError:
                raise
            except Exception:
                warnings.warn(f"{engine!r} fails while guessing", RuntimeWarning)
    
        compatible_engines = []
        for engine, (_, backend_cls) in BACKEND_ENTRYPOINTS.items():
            try:
                backend = backend_cls()
                if backend.guess_can_open(store_spec):
                    compatible_engines.append(engine)
            except Exception:
                warnings.warn(f"{engine!r} fails while guessing", RuntimeWarning)
    
        installed_engines = [k for k in engines if k != "store"]
        if not compatible_engines:
            if installed_engines:
                error_msg = (
                    "did not find a match in any of xarray's currently installed IO "
                    f"backends {installed_engines}. Consider explicitly selecting one of the "
                    "installed engines via the ``engine`` parameter, or installing "
                    "additional IO dependencies, see:\n"
                    "https://docs.xarray.dev/en/stable/getting-started-guide/installing.html\n"
                    "https://docs.xarray.dev/en/stable/user-guide/io.html"
                )
            else:
                error_msg = (
                    "xarray is unable to open this file because it has no currently "
                    "installed IO backends. Xarray's read/write support requires "
                    "installing optional IO dependencies, see:\n"
                    "https://docs.xarray.dev/en/stable/getting-started-guide/installing.html\n"
                    "https://docs.xarray.dev/en/stable/user-guide/io"
                )
        else:
            error_msg = (
                "found the following matches with the input file in xarray's IO "
                f"backends: {compatible_engines}. But their dependencies may not be installed, see:\n"
                "https://docs.xarray.dev/en/stable/user-guide/io.html \n"
                "https://docs.xarray.dev/en/stable/getting-started-guide/installing.html"
            )
    
>       raise ValueError(error_msg)
E       ValueError: did not find a match in any of xarray's currently installed IO backends ['netcdf4']. Consider explicitly selecting one of the installed engines via the ``engine`` parameter, or installing additional IO dependencies, see:
E       https://docs.xarray.dev/en/stable/getting-started-guide/installing.html
E       https://docs.xarray.dev/en/stable/user-guide/io.html

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/plugins.py:197: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3130461149-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1918209846-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.430375%3A-59.420625%29&subset=lon%28-165.206125%3A170.823125%29&granuleId=G3130461149-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job 6818adb1-5fa5-4dbe-9f89-ee5c2f7cbf2b
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1918209840/69128024_S5P_OFFL_L2_O3_20240623T235001_20240624T013130_34697_03_020601_20240625T154916_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2251464495-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 10m 0s]
Raw output
Failed: Timeout >600.0s
collection_concept_id = 'C2251464495-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2251464495-POCLOUD', 'concept-id': 'G3123215106-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2251464495-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ame': 'ddm', 'Size': 5, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2251464490')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7faa278fe380>
job_id = '7e4b270b-f6bb-40b9-9142-7b79cffdb062', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
                        raise ProcessingFailedException(job_id, message)
                    if status == 'canceled':
                        print('Job has been canceled.')
                        break
                    if status == 'paused':
                        print('\nJob has been paused. Call `resume()` to resume.', file=sys.stderr)
                        break
                    if (not running_w_errors_logged and status == 'running_with_errors'):
                        print('\nJob is running with errors.', file=sys.stderr)
                        running_w_errors_logged = True
    
                    # This gets around an issue with progressbar. If we update() with 0, the
                    # output shows up as "N/A". If we update with, e.g. 0.1, it rounds down or
                    # truncates to 0 but, importantly, actually displays that.
                    if progress == 0:
                        progress = 0.1
    
                    for _ in range(intervals):
                        bar.update(progress)  # causes spinner to rotate even when no data change
                        sys.stdout.flush()  # ensures correct behavior in Jupyter notebooks
                        if progress >= 100:
                            break
                        else:
>                           time.sleep(ui_update_interval)
E                           Failed: Timeout >600.0s

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:1009: Failed
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3123215106-POCLOUD for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2251464495-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-38.0%3A38.0%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3123215106-POCLOUD
INFO     root:verify_collection.py:385 Submitted harmony job 7e4b270b-f6bb-40b9-9142-7b79cffdb062

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2087216100-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 1m 3s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe90a5a2f40>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7fe90ee56950>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C2087216100-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087216100-GES_DISC', 'concept-id': 'G3130460866-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087216100-GES_DISC'}]}, 'meta': {'association-details': {'collect...m/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_mid_pressure_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C2087216100')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:432: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:423: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe90a5a2f40>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7fe90ee56950>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G3130460866-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C2087216100-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.4551%3A-59.9669%29&subset=lon%28-159.4374%3A127.6754%29&granuleId=G3130460866-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job 9497fa2c-327e-4740-83a0-99da7aae0801
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C2087216100/69128064_S5P_OFFL_L2_AER_LH_20240624T031300_20240624T045430_34699_03_020600_20240625T190510_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2068529568-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 35s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2068529568-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2068529568-POCLOUD', 'concept-id': 'G2586738585-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2068529568-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 4193, 'Type': 'ALONG_TRACK_DIMENSION'}, {'Name': 'ni', 'Size': 243, 'Type': 'CROSS_TRACK_DIMENSION'}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_temporal_subset_C206852950')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f456f85a860>
job_id = 'b075414e-5686-4a2a-9996-79eac2efd639', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2586738585-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2068529568-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222023-01-11T10%3A04%3A49%22%3A%222023-01-11T10%3A54%3A15%22%29&granuleId=G2586738585-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job b075414e-5686-4a2a-9996-79eac2efd639

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2724057189-LARC_CLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 3s]
Raw output
ValueError: time data '2023-12-30T23:24:23+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'
collection_concept_id = 'C2724057189-LARC_CLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2724057189-LARC_CLOUD', 'concept-id': 'G2829371222-LARC_CLOUD', 'concept-type': '... 'ProductionDateTime': '2023-12-31T03:28:32+00:00'}, 'GranuleUR': 'TEMPO_NO2_L2_V01_20231230T232423Z_S011G06.nc', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2724057189-LARC_CLOUD'}]}, 'meta': {'association-details': {'colle...acted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -1.0000000150474662e+30}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_temporal_subset_C272405710')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
>       temporal_subset = get_half_temporal_extent(start_time, end_time)

verify_collection.py:515: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:167: in get_half_temporal_extent
    start_dt = datetime.strptime(start, '%Y-%m-%dT%H:%M:%S.%fZ')
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:568: in _strptime_datetime
    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

data_string = '2023-12-30T23:24:23+00:00', format = '%Y-%m-%dT%H:%M:%S.%fZ'

    def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
        """Return a 2-tuple consisting of a time struct and an int containing
        the number of microseconds based on the input string and the
        format string."""
    
        for index, arg in enumerate([data_string, format]):
            if not isinstance(arg, str):
                msg = "strptime() argument {} must be str, not {}"
                raise TypeError(msg.format(index, type(arg)))
    
        global _TimeRE_cache, _regex_cache
        with _cache_lock:
            locale_time = _TimeRE_cache.locale_time
            if (_getlang() != locale_time.lang or
                time.tzname != locale_time.tzname or
                time.daylight != locale_time.daylight):
                _TimeRE_cache = TimeRE()
                _regex_cache.clear()
                locale_time = _TimeRE_cache.locale_time
            if len(_regex_cache) > _CACHE_MAX_SIZE:
                _regex_cache.clear()
            format_regex = _regex_cache.get(format)
            if not format_regex:
                try:
                    format_regex = _TimeRE_cache.compile(format)
                # KeyError raised when a bad format is found; can be specified as
                # \\, in which case it was a stray % but with a space after it
                except KeyError as err:
                    bad_directive = err.args[0]
                    if bad_directive == "\\":
                        bad_directive = "%"
                    del err
                    raise ValueError("'%s' is a bad directive in format '%s'" %
                                        (bad_directive, format)) from None
                # IndexError only occurs when the format string is "%"
                except IndexError:
                    raise ValueError("stray %% in format '%s'" % format) from None
                _regex_cache[format] = format_regex
        found = format_regex.match(data_string)
        if not found:
>           raise ValueError("time data %r does not match format %r" %
                             (data_string, format))
E           ValueError: time data '2023-12-30T23:24:23+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:349: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2829371222-LARC_CLOUD for test

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1442068505-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 34s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe90a507040>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7fe90ee56950>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C1442068505-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068505-GES_DISC', 'concept-id': 'G1628685470-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068505-GES_DISC'}]}, 'meta': {'association-details': {'collect...hdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/methane_mixing_ratio_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1442068500')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:432: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:423: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fe90a507040>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7fe90ee56950>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:365 Using granule G1628685470-GES_DISC for test
INFO     root:verify_collection.py:381 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068505-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.468625%3A-64.100375%29&subset=lon%28-112.00605%3A163.26405%29&granuleId=G1628685470-GES_DISC
INFO     root:verify_collection.py:385 Submitted harmony job 62fae8ea-8939-4193-a914-7f1592db5466
INFO     root:verify_collection.py:391 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1442068500/69128218_S5P_OFFL_L2_CH4_20190806T003836_20190806T022006_09387_01_010302_20190812T015759_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2832221740-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 51s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2832221740-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832221740-POCLOUD', 'concept-id': 'G3059748792-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832221740-POCLOUD'}]}, 'meta': {'association-details': {'collecti...rization_2', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_temporal_subset_C283222170')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f0e687f5e70>
job_id = 'cad06a99-4273-4177-826a-aaf59a36ca2a', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G3059748792-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2832221740-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222024-05-29T21%3A47%3A47.250000%22%3A%222024-05-29T22%3A38%3A39.750000%22%29&granuleId=G3059748792-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job cad06a99-4273-4177-826a-aaf59a36ca2a

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2036877509-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 33s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2036877509-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2036877509-POCLOUD', 'concept-id': 'G3130431005-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2036877509-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ize': 409, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_temporal_subset_C203687750')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f2c1ac7e8c0>
job_id = 'a15868a0-6522-4a39-8c46-2a07f7c465df', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G3130431005-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2036877509-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222024-06-25T19%3A59%3A48.500000%22%3A%222024-06-25T20%3A50%3A27.500000%22%29&granuleId=G3130431005-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job a15868a0-6522-4a39-8c46-2a07f7c465df

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2847232536-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 33s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2847232536-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2847232536-POCLOUD', 'concept-id': 'G3130420622-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2847232536-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3200, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_temporal_subset_C284723250')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f456f61a4d0>
job_id = 'ea525174-d720-4c7a-81c3-689feac9cf5f', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G3130420622-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2847232536-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222024-06-25T20%3A37%3A32.250000%22%3A%222024-06-25T20%3A38%3A14.750000%22%29&granuleId=G3130420622-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job ea525174-d720-4c7a-81c3-689feac9cf5f

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2205618339-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 36s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2205618339-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2205618339-POCLOUD', 'concept-id': 'G2214736338-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2205618339-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 6922, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_temporal_subset_C220561830')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f456f6c5b40>
job_id = '8eb05572-2ca0-4079-93a4-de67ef366398', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2214736338-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2205618339-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222020-06-17T11%3A13%3A21.500000%22%3A%222020-06-17T11%3A41%3A06.500000%22%29&granuleId=G2214736338-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job 8eb05572-2ca0-4079-93a4-de67ef366398

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2596986276-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 10m 0s]
Raw output
Failed: Timeout >600.0s
collection_concept_id = 'C2596986276-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2596986276-POCLOUD', 'concept-id': 'G3130301193-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2596986276-POCLOUD'}]}, 'meta': {'association-details': {'collecti...: 243, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -32768.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_temporal_subset_C259698620')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:984: in wait_for_processing
    progress, status, message = self.progress(job_id)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:955: in progress
    response = session.get(self._status_url(job_id))
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:602: in get
    return self.request("GET", url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
    resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
    r = adapter.send(request, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589: in send
    resp = conn.urlopen(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
    response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
    response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
    httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
    response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
    version, status, reason = self._read_status()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:279: in _read_status
    line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/socket.py:705: in readinto
    return self._sock.recv_into(b)
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1307: in recv_into
    return self.read(nbytes, buffer)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
len = 8192, buffer = <memory at 0x7fbcba67bf40>

    def read(self, len=1024, buffer=None):
        """Read up to LEN bytes and return them.
        Return zero-length string on EOF."""
    
        self._checkClosed()
        if self._sslobj is None:
            raise ValueError("Read on closed or unwrapped SSL socket.")
        try:
            if buffer is not None:
>               return self._sslobj.read(len, buffer)
E               Failed: Timeout >600.0s

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1163: Failed
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G3130301193-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2596986276-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222024-06-25T16%3A58%3A44.500000%22%3A%222024-06-25T17%3A22%3A19.500000%22%29&granuleId=G3130301193-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job 9f86afd6-df16-4e2d-8c98-6049ab0bcd30

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2499940517-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 36s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size
collection_concept_id = 'C2499940517-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2499940517-POCLOUD', 'concept-id': 'G2529918741-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2499940517-POCLOUD'}]}, 'meta': {'association-details': {'collecti..._TRACK_DIMENSION'}, {'Name': 'ni', 'Size': 120, 'Type': 'CROSS_TRACK_DIMENSION'}], 'LongName': 'longitude', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_temporal_subset_C249994050')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7fed3e0ca6b0>
job_id = 'ccdac7c6-32de-409c-9312-ec7b94dbd01e', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2529918741-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2499940517-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222016-02-23T04%3A21%3A40%22%3A%222016-02-23T04%3A23%3A08%22%29&granuleId=G2529918741-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job ccdac7c6-32de-409c-9312-ec7b94dbd01e

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2596983413-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 31s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2596983413-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2596983413-POCLOUD', 'concept-id': 'G3130301128-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2596983413-POCLOUD'}]}, 'meta': {'association-details': {'collecti...: 243, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -32768.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_temporal_subset_C259698340')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f456f871990>
job_id = '27a8cc14-1049-4f04-b2c7-71bf69728a17', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G3130301128-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2596983413-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222024-06-25T10%3A36%3A07%22%3A%222024-06-25T11%3A25%3A33%22%29&granuleId=G3130301128-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job 27a8cc14-1049-4f04-b2c7-71bf69728a17

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2499940520-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 36s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2499940520-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2499940520-POCLOUD', 'concept-id': 'G2521659785-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2499940520-POCLOUD'}]}, 'meta': {'association-details': {'collecti... 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -3.4028235e+38}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_temporal_subset_C249994050')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f0e68a90ac0>
job_id = '1ade3ccf-08cd-41c0-b689-0cbc5f180492', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2521659785-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2499940520-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222015-12-04T05%3A38%3A52.500000%22%3A%222015-12-04T05%3A52%3A37.500000%22%29&granuleId=G2521659785-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job 1ade3ccf-08cd-41c0-b689-0cbc5f180492

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C1996881807-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 32s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C1996881807-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1996881807-POCLOUD', 'concept-id': 'G2011686539-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1996881807-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3200, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_temporal_subset_C199688180')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f9decb40b80>
job_id = 'ebe550d7-e547-42e4-b516-ede67d4ea687', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2011686539-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C1996881807-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222016-02-25T18%3A20%3A46%22%3A%222016-02-25T18%3A21%3A28%22%29&granuleId=G2011686539-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job ebe550d7-e547-42e4-b516-ede67d4ea687

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2036878029-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 45s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error
collection_concept_id = 'C2036878029-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2036878029-POCLOUD', 'concept-id': 'G3130476207-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2036878029-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3712, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_temporal_subset_C203687800')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
    
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:530: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7fbb1c7928f0>
job_id = '057b91e8-d6f3-4333-87f0-2fc4b35d52ca', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G3130476207-POCLOUD for test
INFO     root:verify_collection.py:524 Sending harmony request https://harmony.earthdata.nasa.gov/C2036878029-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=time%28%222024-06-25T21%3A33%3A05.500000%22%3A%222024-06-25T21%3A39%3A16.500000%22%29&granuleId=G3130476207-POCLOUD
INFO     root:verify_collection.py:528 Submitted harmony job 057b91e8-d6f3-4333-87f0-2fc4b35d52ca

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_temporal_subset[C2724037909-LARC_CLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 5s]
Raw output
ValueError: time data '2023-12-30T23:24:23+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'
collection_concept_id = 'C2724037909-LARC_CLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2724037909-LARC_CLOUD', 'concept-id': 'G2829370114-LARC_CLOUD', 'concept-type': '...ProductionDateTime': '2023-12-31T02:12:29+00:00'}, 'GranuleUR': 'TEMPO_CLDO4_L2_V01_20231230T232423Z_S011G06.nc', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2724037909-LARC_CLOUD'}]}, 'meta': {'association-details': {'colle...acted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -1.0000000150474662e+30}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_temporal_subset_C272403790')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...M1SxaPfRwaV4PnJa7zbVFlZLcvBhRvf4vqzuF5YM7NH6FVZLvYlGKaafF6MKL_It0xU_qyGVtYqXMQZPxtw3-X8U0FJk7UpNs1KArBsF0dKaHHXtrfp8lA'

    @pytest.mark.timeout(600)
    def test_temporal_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
>       temporal_subset = get_half_temporal_extent(start_time, end_time)

verify_collection.py:515: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:167: in get_half_temporal_extent
    start_dt = datetime.strptime(start, '%Y-%m-%dT%H:%M:%S.%fZ')
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:568: in _strptime_datetime
    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

data_string = '2023-12-30T23:24:23+00:00', format = '%Y-%m-%dT%H:%M:%S.%fZ'

    def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
        """Return a 2-tuple consisting of a time struct and an int containing
        the number of microseconds based on the input string and the
        format string."""
    
        for index, arg in enumerate([data_string, format]):
            if not isinstance(arg, str):
                msg = "strptime() argument {} must be str, not {}"
                raise TypeError(msg.format(index, type(arg)))
    
        global _TimeRE_cache, _regex_cache
        with _cache_lock:
            locale_time = _TimeRE_cache.locale_time
            if (_getlang() != locale_time.lang or
                time.tzname != locale_time.tzname or
                time.daylight != locale_time.daylight):
                _TimeRE_cache = TimeRE()
                _regex_cache.clear()
                locale_time = _TimeRE_cache.locale_time
            if len(_regex_cache) > _CACHE_MAX_SIZE:
                _regex_cache.clear()
            format_regex = _regex_cache.get(format)
            if not format_regex:
                try:
                    format_regex = _TimeRE_cache.compile(format)
                # KeyError raised when a bad format is found; can be specified as
                # \\, in which case it was a stray % but with a space after it
                except KeyError as err:
                    bad_directive = err.args[0]
                    if bad_directive == "\\":
                        bad_directive = "%"
                    del err
                    raise ValueError("'%s' is a bad directive in format '%s'" %
                                        (bad_directive, format)) from None
                # IndexError only occurs when the format string is "%"
                except IndexError:
                    raise ValueError("stray %% in format '%s'" % format) from None
                _regex_cache[format] = format_regex
        found = format_regex.match(data_string)
        if not found:
>           raise ValueError("time data %r does not match format %r" %
                             (data_string, format))
E           ValueError: time data '2023-12-30T23:24:23+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:349: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:511 Using granule G2829370114-LARC_CLOUD for test