Skip to content

Remove PR old association #63

Remove PR old association

Remove PR old association #63

GitHub Actions / Regression test results for ops failed Sep 16, 2024 in 0s

12 errors, 54 fail, 522 pass in 38m 20s

588 tests   522 ✅  38m 20s ⏱️
  1 suites    0 💤
  1 files     54 ❌  12 🔥

Results for commit facdaab.

Annotations

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2087132178-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 54s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087132178-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087132178-GES_DISC', 'concept-id': 'G3239945906-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087132178-GES_DISC'}]}, 'meta': {'association-details': {'collect...'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/corner', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087132170')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff50d690440>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7ff50ff6fe40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7ff50ff6fd40>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239945906-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2087132178-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.032125%3A-67.86087500000001%29&subset=lon%28-161.582475%3A84.86747499999998%29&granuleId=G3239945906-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job a8bd05d3-510a-4347-9d20-979df1748629
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C2087132170/77185582_S5P_OFFL_L2_CO_20240914T141932_20240914T160101_35869_03_020701_20240916T040528_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516287-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 38s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516287-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516287-GES_DISC', 'concept-id': 'G2084463561-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516287-GES_DISC'}]}, 'meta': {'association-details': {'collect...'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/corner', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd751903640>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd751903140>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fd751903040>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G2084463561-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516287-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.99937499999999%3A-59.951625%29&subset=lon%28-76.6214%3A-1.5866000000000042%29&granuleId=G2084463561-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 14583945-af74-4d26-839b-e1fd71fdbbf2
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1627516280/77185594_S5P_OFFL_L2_CO_20210701T170324_20210701T184453_19257_01_010400_20210703T065107_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1442068491-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 30s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068491-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068491-GES_DISC', 'concept-id': 'G1642673899-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068491-GES_DISC'}]}, 'meta': {'association-details': {'collect...s://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/aerosol_mid_height', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068490')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f879a00dc40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f879a00d740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f879a00d640>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G1642673899-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068491-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.32457500000001%3A-63.840424999999996%29&subset=lon%28-112.0001%3A163.1541%29&granuleId=G1642673899-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job ef37ca8b-a606-40b7-a8fc-d5ac651a83a8
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068490/77185598_S5P_OFFL_L2_AER_LH_20190806T003836_20190806T022006_09387_01_010302_20190812T015801_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1442068511-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 44s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068511-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068511-GES_DISC', 'concept-id': 'G1629705055-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068511-GES_DISC'}]}, 'meta': {'association-details': {'collect...e/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/nitrogendioxide_tropospheric_column_pdf_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1442068510')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f42965e6b40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f42965e6640>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f42965e6540>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G1629705055-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068511-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1629705055-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 25f7fbf1-c1b0-4976-b7d5-e9dfcd31949d
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1442068510/77185612_S5P_OFFL_L2_NO2_20190806T003836_20190806T022006_09387_01_010302_20190812T015802_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2089270961-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 56s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2089270961-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2089270961-GES_DISC', 'concept-id': 'G3239555041-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2089270961-GES_DISC'}]}, 'meta': {'association-details': {'collect....0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/nitrogendioxide_tropospheric_column_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C2089270960')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f4b6673b640>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f4b649e4940>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f4b649e4840>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239555041-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2089270961-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-83.3412%3A-68.3768%29&subset=lon%28-164.7099%3A88.6019%29&granuleId=G3239555041-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job cae3b06b-bda6-439d-b12f-d4564e55c4b6
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C2089270960/77190836_S5P_OFFL_L2_NO2_20240907T213749_20240907T231918_35774_03_020600_20240909T133909_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2087216530-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 32s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087216530-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087216530-GES_DISC', 'concept-id': 'G3239945710-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087216530-GES_DISC'}]}, 'meta': {'association-details': {'collect...Var', 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/layer'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C2087216530')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f108d410640>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f108d3bae40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f108d3bad40>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239945710-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2087216530-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-84.9538%3A-67.9222%29&subset=lon%28-110.66492499999998%3A91.04192499999999%29&granuleId=G3239945710-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job e8b0732c-b166-41dd-8c3e-044eb177dca7
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C2087216530/77190857_S5P_OFFL_L2_CH4_20240914T022905_20240914T041035_35862_03_020701_20240916T040004_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2087216100-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 31s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087216100-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087216100-GES_DISC', 'concept-id': 'G3239945700-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087216100-GES_DISC'}]}, 'meta': {'association-details': {'collect...m/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_mid_pressure_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2087216100')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f42965e7e40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f42965e6b40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f42965e6c40>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239945700-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2087216100-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-84.830775%3A-67.760225%29&subset=lon%28-110.97979999999998%3A92.17580000000001%29&granuleId=G3239945700-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 5234e77a-4b97-4118-9774-57a470acc596
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C2087216100/77190869_S5P_OFFL_L2_AER_LH_20240914T022905_20240914T041035_35862_03_020701_20240916T040007_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1918210023-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 1m 0s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1918210023-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918210023-GES_DISC', 'concept-id': 'G3239977331-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918210023-GES_DISC'}]}, 'meta': {'association-details': {'collect...RL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/qa_value', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1918210020')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd42b8fe240>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd42b8fdb40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fd42b8fda40>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239977331-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1918210023-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.043275%3A-67.801725%29&subset=lon%28-134.414675%3A57.38367499999999%29&granuleId=G3239977331-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 7d2f09e7-ab40-4fa5-a24a-e963d1b81d3f
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1918210020/77192897_S5P_OFFL_L2_HCHO_20240914T041035_20240914T055204_35863_03_020601_20240916T053125_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516285-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 35s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0ece8f4240>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f0ed3159e10>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C1627516285-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516285-GES_DISC', 'concept-id': 'G2084435970-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516285-GES_DISC'}]}, 'meta': {'association-details': {'collect.../variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:431: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0ece8f4240>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f0ed3159e10>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G2084435970-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516285-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.82889999999999%3A-59.7251%29&subset=lon%28-77.22455%3A-1.6634499999999974%29&granuleId=G2084435970-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 6071797d-9311-4eb7-8d11-1d6d5812d644
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1627516280/77194903_S5P_OFFL_L2_AER_AI_20210701T170324_20210701T184453_19257_01_010400_20210703T065109_subsetted.nc4

Check failure on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1251101828-GES_DISC] (tests.verify_collection) with error

test-results/ops_test_report.xml [took 3s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7ff50d786cb0>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2981785644-GES_DISC&concept_id%5B%5D=V2981786871-GES_DISC&concept_id%5B%...81783910-GES_DISC&concept_id%5B%5D=V2981784959-GES_DISC&concept_id%5B%5D=V2981784369-GES_DISC&page_size=40&page_num=32'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V29817856...C&concept_id%5B%5D=V2981784959-GES_DISC&concept_id%5B%5D=V2981784369-GES_DISC&page_size=40&page_num=32', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(  # type: ignore[override]
        self,
        method: str,
        url: str,
        body: _TYPE_BODY | None = None,
        headers: typing.Mapping[str, str] | None = None,
        retries: Retry | bool | int | None = None,
        redirect: bool = True,
        assert_same_host: bool = True,
        timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
        pool_timeout: int | None = None,
        release_conn: bool | None = None,
        chunked: bool = False,
        body_pos: _TYPE_BODY_POSITION | None = None,
        preload_content: bool = True,
        decode_content: bool = True,
        **response_kw: typing.Any,
    ) -> BaseHTTPResponse:
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method
           such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param bool preload_content:
            If True, the response's body will be preloaded into memory.
    
        :param bool decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of ``preload_content``
            which defaults to ``True``.
    
        :param bool chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
        """
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = preload_content
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = to_str(_encode_target(url))
        else:
            url = to_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()  # type: ignore[attr-defined]
            headers.update(self.proxy_headers)  # type: ignore[union-attr]
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout  # type: ignore[assignment]
    
            # Is this a closed/new connection that requires CONNECT tunnelling?
            if self.proxy is not None and http_tunnel_required and conn.is_closed:
                try:
                    self._prepare_proxy(conn)
                except (BaseSSLError, OSError, SocketTimeout) as e:
                    self._raise_timeout(
                        err=e, url=self.proxy.url, timeout_value=conn.timeout
                    )
                    raise
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Make the request on the HTTPConnection object
>           response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
                retries=retries,
                response_conn=response_conn,
                preload_content=preload_content,
                decode_content=decode_content,
                **response_kw,
            )

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
    response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
    httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
    response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
    version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7ff50c2e0790>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           http.client.RemoteDisconnected: Remote end closed connection without response

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7ff50c4e3eb0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection_with_tls_context(
                request, verify, proxies=proxies, cert=cert
            )
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
>           resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
    retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
    raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
    raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
    response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
    response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
    httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
    response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
    version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7ff50c2e0790>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError

During handling of the above exception, another exception occurred:

cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C1251101828-GES_DISC', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.fixture(scope="function")
    def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
        collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
        variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
    
        collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
        collection_associations = collection_res.get("associations")
        variable_concept_ids = collection_associations.get("variables")
    
        if variable_concept_ids is None and env == 'uat':
            pytest.skip('There are no umm-v associated with this collection in UAT')
    
        variables = []
        for i in range(0, len(variable_concept_ids), 40):
            variables_items = variable_query \
                .concept_id(variable_concept_ids[i:i + 40]) \
                .token(bearer_token) \
                .format('umm_json') \
>               .get_all()

verify_collection.py:159: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
    return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
    response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
    return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
    return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
    resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7ff50c4e3eb0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection_with_tls_context(
                request, verify, proxies=proxies, cert=cert
            )
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )
    
        except (ProtocolError, OSError) as err:
>           raise ConnectionError(err, request=request)
E           requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516292-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 55s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516292-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516292-GES_DISC', 'concept-id': 'G1898261144-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516292-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/layer', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1627516290')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd42b894f40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd42b894840>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fd42b894740>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G1898261144-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516292-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1898261144-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job c5075bac-bc72-4d1d-9c93-2c008bc34369
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1627516290/77207232_S5P_OFFL_L2_HCHO_20200712T224601_20200713T002730_14238_01_010108_20200715T122623_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1442068510-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 46s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1442068510-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068510-GES_DISC', 'concept-id': 'G1628685468-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068510-GES_DISC'}]}, 'meta': {'association-details': {'collect...mm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/averaging_kernel', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068510')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f8798d8e140>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f8798d8da40>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f8798d8d940>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G1628685468-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068510-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628685468-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job a7720156-6cc2-45a5-bd92-7b2d01d2c5cc
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068510/77207239_S5P_OFFL_L2_HCHO_20190806T003836_20190806T022006_09387_01_010107_20190812T015759_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1918210292-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 1m 9s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1918210292-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1918210292-GES_DISC', 'concept-id': 'G3239977748-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1918210292-GES_DISC'}]}, 'meta': {'association-details': {'collect...v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/number_of_slant_columns_win2', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1918210290')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f2c4cd22440>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f2c4cd23740>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f2c4cd23b40>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239977748-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1918210292-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-84.6996%3A-67.8124%29&subset=lon%28-35.0227%3A168.21269999999998%29&granuleId=G3239977748-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job df09ec8d-005d-4b22-9578-28f9cadd568f
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1918210290/77207247_S5P_OFFL_L2_SO2_20240913T212437_20240913T230607_35859_03_020601_20240916T054739_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1442068508-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 1m 4s]
Raw output
OSError: [Errno group not found: PRODUCT] 'PRODUCT'
ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f8798d54240>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f879e7e1e10>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
>                   ds = ds.groups[key]
E                   KeyError: 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:190: KeyError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C1442068508-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1442068508-GES_DISC', 'concept-id': 'G1628710396-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1442068508-GES_DISC'}]}, 'meta': {'association-details': {'collect...v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/SUPPORT_DATA/DETAILED_RESULTS/fitted_radiance_squeeze_win3', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068500')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:431: in group_walk
    subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/api.py:571: in open_dataset
    backend_ds = backend.open_dataset(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:646: in open_dataset
    store = NetCDF4DataStore.open(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:409: in open
    return cls(manager, group=group, mode=mode, lock=lock, autoclose=autoclose)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:356: in __init__
    self.format = self.ds.data_model
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:418: in ds
    return self._acquire()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:413: in _acquire
    ds = _nc4_require_group(root, self._group, self._mode)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

ds = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f8798d54240>
group = '/METADATA/PRODUCT', mode = 'r'
create_group = <function _netcdf4_create_group at 0x7f879e7e1e10>

    def _nc4_require_group(ds, group, mode, create_group=_netcdf4_create_group):
        if group in {None, "", "/"}:
            # use the root group
            return ds
        else:
            # make sure it's a string
            if not isinstance(group, str):
                raise ValueError("group must be a string or None")
            # support path-like syntax
            path = group.strip("/").split("/")
            for key in path:
                try:
                    ds = ds.groups[key]
                except KeyError as e:
                    if mode != "r":
                        ds = create_group(ds, key)
                    else:
                        # wrap error to provide slightly more helpful message
>                       raise OSError(f"group not found: {key}", e)
E                       OSError: [Errno group not found: PRODUCT] 'PRODUCT'

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/xarray/backends/netCDF4_.py:196: OSError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G1628710396-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1442068508-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-82.265975%3A-63.873025000000005%29&subset=lon%28-112.057275%3A162.74827499999998%29&granuleId=G1628710396-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 1223cb74-a7bd-4b5e-a2a7-3d70a714287d
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1442068500/77207266_S5P_OFFL_L2_SO2_20190806T003836_20190806T022006_09387_01_010107_20190812T085130_subsetted.nc4

Check failure on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1968979566-POCLOUD] (tests.verify_collection) with error

test-results/ops_test_report.xml [took 15s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f2c4cf30c10>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2054030330-POCLOUD&concept_id%5B%5D=V2054030206-POCLOUD&concept_id%5B%5D...=V2054030408-POCLOUD&concept_id%5B%5D=V2054031234-POCLOUD&concept_id%5B%5D=V2054030112-POCLOUD&page_size=15&page_num=3'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V20540303...LOUD&concept_id%5B%5D=V2054031234-POCLOUD&concept_id%5B%5D=V2054030112-POCLOUD&page_size=15&page_num=3', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(  # type: ignore[override]
        self,
        method: str,
        url: str,
        body: _TYPE_BODY | None = None,
        headers: typing.Mapping[str, str] | None = None,
        retries: Retry | bool | int | None = None,
        redirect: bool = True,
        assert_same_host: bool = True,
        timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
        pool_timeout: int | None = None,
        release_conn: bool | None = None,
        chunked: bool = False,
        body_pos: _TYPE_BODY_POSITION | None = None,
        preload_content: bool = True,
        decode_content: bool = True,
        **response_kw: typing.Any,
    ) -> BaseHTTPResponse:
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method
           such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param bool preload_content:
            If True, the response's body will be preloaded into memory.
    
        :param bool decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of ``preload_content``
            which defaults to ``True``.
    
        :param bool chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
        """
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = preload_content
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = to_str(_encode_target(url))
        else:
            url = to_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()  # type: ignore[attr-defined]
            headers.update(self.proxy_headers)  # type: ignore[union-attr]
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout  # type: ignore[assignment]
    
            # Is this a closed/new connection that requires CONNECT tunnelling?
            if self.proxy is not None and http_tunnel_required and conn.is_closed:
                try:
                    self._prepare_proxy(conn)
                except (BaseSSLError, OSError, SocketTimeout) as e:
                    self._raise_timeout(
                        err=e, url=self.proxy.url, timeout_value=conn.timeout
                    )
                    raise
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Make the request on the HTTPConnection object
>           response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
                retries=retries,
                response_conn=response_conn,
                preload_content=preload_content,
                decode_content=decode_content,
                **response_kw,
            )

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:491: in _make_request
    raise new_e
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:467: in _make_request
    self._validate_conn(conn)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:1099: in _validate_conn
    conn.connect()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:653: in connect
    sock_and_verified = _ssl_wrap_socket_and_match_hostname(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:806: in _ssl_wrap_socket_and_match_hostname
    ssl_sock = ssl_wrap_socket(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:465: in ssl_wrap_socket
    ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:509: in _ssl_wrap_socket_impl
    return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:513: in wrap_socket
    return self.sslsocket_class._create(
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1104: in _create
    self.do_handshake()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           ConnectionResetError: [Errno 104] Connection reset by peer

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1375: ConnectionResetError

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7f2c4cd0a7d0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection_with_tls_context(
                request, verify, proxies=proxies, cert=cert
            )
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
>           resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
    retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
    raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
    raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
    response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:491: in _make_request
    raise new_e
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:467: in _make_request
    self._validate_conn(conn)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:1099: in _validate_conn
    conn.connect()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:653: in connect
    sock_and_verified = _ssl_wrap_socket_and_match_hostname(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:806: in _ssl_wrap_socket_and_match_hostname
    ssl_sock = ssl_wrap_socket(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:465: in ssl_wrap_socket
    ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:509: in _ssl_wrap_socket_impl
    return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:513: in wrap_socket
    return self.sslsocket_class._create(
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1104: in _create
    self.do_handshake()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1375: ProtocolError

During handling of the above exception, another exception occurred:

cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C1968979566-POCLOUD', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.fixture(scope="function")
    def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
        collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
        variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
    
        collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
        collection_associations = collection_res.get("associations")
        variable_concept_ids = collection_associations.get("variables")
    
        if variable_concept_ids is None and env == 'uat':
            pytest.skip('There are no umm-v associated with this collection in UAT')
    
        variables = []
        for i in range(0, len(variable_concept_ids), 40):
            variables_items = variable_query \
                .concept_id(variable_concept_ids[i:i + 40]) \
                .token(bearer_token) \
                .format('umm_json') \
>               .get_all()

verify_collection.py:159: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
    return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
    response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
    return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
    return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
    resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7f2c4cd0a7d0>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection_with_tls_context(
                request, verify, proxies=proxies, cert=cert
            )
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )
    
        except (ProtocolError, OSError) as err:
>           raise ConnectionError(err, request=request)
E           requests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2746966927-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 9s]
Raw output
Exception: ('Unprocessable Entity', 'Error: the requested combination of operations: spatial subsetting on C2746966927-POCLOUD is unsupported')
collection_concept_id = 'C2746966927-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2746966927-POCLOUD', 'concept-id': 'G2816914995-POCLOUD', 'concept-type': 'granul..._XOverCal_20230709T115434_20230710T082110_PIB0_01.nc', 'SWOT_GranulePolygons_Cal_20230213T142800_v05.json', ...], ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2746966927-POCLOUD'}]}, 'meta': {'association-details': {'collecti...pixels', 'Size': 69, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': 2147483647}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C2746966920')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:392: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f108d305d20>
response = <Response [422]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Unprocessable Entity', 'Error: the requested combination of operations: spatial subsetting on C2746966927-POCLOUD is unsupported')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G2816914995-POCLOUD for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2746966927-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&granuleId=G2816914995-POCLOUD

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516290-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 23s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516290-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516290-GES_DISC', 'concept-id': 'G2897233694-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516290-GES_DISC'}]}, 'meta': {'association-details': {'collect...: 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': 9.969209968386869e+36}], ...}}]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1627516290')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f42951b6d40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f42951b5340>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f42951b5040>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G2897233694-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516290-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.855425%3A-59.695575%29&subset=lon%28-76.722325%3A-1.608674999999998%29&granuleId=G2897233694-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job b745c751-4b57-44aa-a104-e933e4d69f04
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1627516290/77207292_S5P_OFFL_L2_AER_LH_20210701T170324_20210701T184453_19257_01_010400_20210703T102339_subsetted.nc4

Check failure on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2596983413-POCLOUD] (tests.verify_collection) with error

test-results/ops_test_report.xml [took 1s]
Raw output
failed on setup with "requests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))"
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f2c4cd0ab60>
method = 'GET'
url = '/search/variables.umm_json?concept_id%5B%5D=V2600354773-POCLOUD&concept_id%5B%5D=V2600354755-POCLOUD&concept_id%5B%5D...V2600354803-POCLOUD&concept_id%5B%5D=V2600354767-POCLOUD&concept_id%5B%5D=V2600354708-POCLOUD&page_size=16&page_num=12'
body = None
headers = {'User-Agent': 'python-requests/2.32.2', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-alive'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/search/variables.umm_json', query='concept_id%5B%5D=V26003547...OUD&concept_id%5B%5D=V2600354767-POCLOUD&concept_id%5B%5D=V2600354708-POCLOUD&page_size=16&page_num=12', fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(  # type: ignore[override]
        self,
        method: str,
        url: str,
        body: _TYPE_BODY | None = None,
        headers: typing.Mapping[str, str] | None = None,
        retries: Retry | bool | int | None = None,
        redirect: bool = True,
        assert_same_host: bool = True,
        timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
        pool_timeout: int | None = None,
        release_conn: bool | None = None,
        chunked: bool = False,
        body_pos: _TYPE_BODY_POSITION | None = None,
        preload_content: bool = True,
        decode_content: bool = True,
        **response_kw: typing.Any,
    ) -> BaseHTTPResponse:
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method
           such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param bool preload_content:
            If True, the response's body will be preloaded into memory.
    
        :param bool decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of ``preload_content``
            which defaults to ``True``.
    
        :param bool chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
        """
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = preload_content
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = to_str(_encode_target(url))
        else:
            url = to_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()  # type: ignore[attr-defined]
            headers.update(self.proxy_headers)  # type: ignore[union-attr]
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout  # type: ignore[assignment]
    
            # Is this a closed/new connection that requires CONNECT tunnelling?
            if self.proxy is not None and http_tunnel_required and conn.is_closed:
                try:
                    self._prepare_proxy(conn)
                except (BaseSSLError, OSError, SocketTimeout) as e:
                    self._raise_timeout(
                        err=e, url=self.proxy.url, timeout_value=conn.timeout
                    )
                    raise
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Make the request on the HTTPConnection object
>           response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
                retries=retries,
                response_conn=response_conn,
                preload_content=preload_content,
                decode_content=decode_content,
                **response_kw,
            )

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:491: in _make_request
    raise new_e
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:467: in _make_request
    self._validate_conn(conn)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:1099: in _validate_conn
    conn.connect()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:653: in connect
    sock_and_verified = _ssl_wrap_socket_and_match_hostname(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:806: in _ssl_wrap_socket_and_match_hostname
    ssl_sock = ssl_wrap_socket(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:465: in ssl_wrap_socket
    ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:509: in _ssl_wrap_socket_impl
    return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:513: in wrap_socket
    return self.sslsocket_class._create(
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1104: in _create
    self.do_handshake()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           ConnectionResetError: [Errno 104] Connection reset by peer

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1375: ConnectionResetError

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7f2c4cd0b220>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection_with_tls_context(
                request, verify, proxies=proxies, cert=cert
            )
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
>           resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:589: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
    retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
    raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
    raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
    response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:491: in _make_request
    raise new_e
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:467: in _make_request
    self._validate_conn(conn)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:1099: in _validate_conn
    conn.connect()
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:653: in connect
    sock_and_verified = _ssl_wrap_socket_and_match_hostname(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/connection.py:806: in _ssl_wrap_socket_and_match_hostname
    ssl_sock = ssl_wrap_socket(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:465: in ssl_wrap_socket
    ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/urllib3/util/ssl_.py:509: in _ssl_wrap_socket_impl
    return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:513: in wrap_socket
    return self.sslsocket_class._create(
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1104: in _create
    self.do_handshake()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ssl.SSLSocket [closed] fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
block = False

    @_sslcopydoc
    def do_handshake(self, block=False):
        self._check_connected()
        timeout = self.gettimeout()
        try:
            if timeout == 0.0 and block:
                self.settimeout(None)
>           self._sslobj.do_handshake()
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/ssl.py:1375: ProtocolError

During handling of the above exception, another exception occurred:

cmr_mode = 'https://cmr.earthdata.nasa.gov/search/'
collection_concept_id = 'C2596983413-POCLOUD', env = 'ops'
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.fixture(scope="function")
    def collection_variables(cmr_mode, collection_concept_id, env, bearer_token):
        collection_query = cmr.queries.CollectionQuery(mode=cmr_mode)
        variable_query = cmr.queries.VariableQuery(mode=cmr_mode)
    
        collection_res = collection_query.concept_id(collection_concept_id).token(bearer_token).get()[0]
        collection_associations = collection_res.get("associations")
        variable_concept_ids = collection_associations.get("variables")
    
        if variable_concept_ids is None and env == 'uat':
            pytest.skip('There are no umm-v associated with this collection in UAT')
    
        variables = []
        for i in range(0, len(variable_concept_ids), 40):
            variables_items = variable_query \
                .concept_id(variable_concept_ids[i:i + 40]) \
                .token(bearer_token) \
                .format('umm_json') \
>               .get_all()

verify_collection.py:159: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:127: in get_all
    return self.get(self.hits())
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/cmr/queries.py:949: in get
    response = requests.get(
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
    return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
    return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
    resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7f2c4cd0b220>
request = <PreparedRequest [GET]>, stream = False
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection_with_tls_context(
                request, verify, proxies=proxies, cert=cert
            )
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )
    
        except (ProtocolError, OSError) as err:
>           raise ConnectionError(err, request=request)
E           requests.exceptions.ConnectionError: ('Connection aborted.', ConnectionResetError(104, 'Connection reset by peer'))

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/requests/adapters.py:604: ConnectionError
--------------------------------- Captured Log ---------------------------------

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2746966657-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 3s]
Raw output
Exception: ('Unprocessable Entity', 'Error: the requested combination of operations: spatial subsetting on C2746966657-POCLOUD is unsupported')
collection_concept_id = 'C2746966657-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2746966657-POCLOUD', 'concept-id': 'G2816914997-POCLOUD', 'concept-type': 'granul..._XOverCal_20230709T115434_20230710T082110_PIB0_01.nc', 'SWOT_GranulePolygons_Cal_20230213T142800_v05.json', ...], ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2746966657-POCLOUD'}]}, 'meta': {'association-details': {'collecti...pixels', 'Size': 69, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': 2147483647}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C2746966650')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:392: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f8798cbae60>
response = <Response [422]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Unprocessable Entity', 'Error: the requested combination of operations: spatial subsetting on C2746966657-POCLOUD is unsupported')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G2816914997-POCLOUD for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2746966657-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&granuleId=G2816914997-POCLOUD

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2832224417-POCLOUD] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 28s]
Raw output
Failed: Unable to find latitude and longitude variables.
collection_concept_id = 'C2832224417-POCLOUD', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2832224417-POCLOUD', 'concept-id': 'G3229857008-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C2832224410')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
>       lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)

verify_collection.py:406: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

dataset = <xarray.Dataset> Size: 232B
Dimensions:                 (ydim_grid: 1, xdim_grid: 1, look: 1,
                        ...                                 -0.43
    history_json:                                       [{"date_time": "2024-...
file_to_subset = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C2832224410/77207313_RSS_SMAP_SSS_L2C_r51281_20240906T221222_2024250_NRT_V06.0_001.nc4')
collection_variable_list = [{'associations': {'collections': [{'concept-id': 'C2832224417-POCLOUD'}]}, 'meta': {'association-details': {'collecti...me': 'look', 'Size': 2, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
collection_concept_id = 'C2832224417-POCLOUD'

    def get_lat_lon_var_names(dataset: xarray.Dataset, file_to_subset: str, collection_variable_list: List[Dict], collection_concept_id: str):
        # Try getting it from UMM-Var first
        lat_var_json, lon_var_json, _ = get_coordinate_vars_from_umm(collection_variable_list)
        lat_var_name = get_variable_name_from_umm_json(lat_var_json)
        lon_var_name = get_variable_name_from_umm_json(lon_var_json)
    
        if lat_var_name and lon_var_name:
            return lat_var_name, lon_var_name
    
        logging.warning("Unable to find lat/lon vars in UMM-Var")
    
        # If that doesn't work, try using cf-xarray to infer lat/lon variable names
        try:
            latitude = [lat for lat in dataset.cf.coordinates['latitude']
                             if lat.lower() in VALID_LATITUDE_VARIABLE_NAMES][0]
            longitude = [lon for lon in dataset.cf.coordinates['longitude']
                             if lon.lower() in VALID_LONGITUDE_VARIABLE_NAMES][0]
            return latitude, longitude
        except:
            logging.warning("Unable to find lat/lon vars using cf_xarray")
    
        # If that still doesn't work, try using l2ss-py directly
        try:
            # file not able to be flattened unless locally downloaded
            filename = f'my_copy_file_{collection_concept_id}.nc'
            shutil.copy(file_to_subset, filename)
            nc_dataset = netCDF4.Dataset(filename, mode='r+')
            # flatten the dataset
            nc_dataset_flattened = podaac.subsetter.group_handling.transform_grouped_dataset(nc_dataset, filename)
    
            args = {
                    'decode_coords': False,
                    'mask_and_scale': False,
                    'decode_times': False
                    }
    
            with xarray.open_dataset(
                xarray.backends.NetCDF4DataStore(nc_dataset_flattened),
                **args
                ) as flat_dataset:
                    # use l2ss-py to find lat and lon names
                    lat_var_names, lon_var_names = podaac.subsetter.subset.compute_coordinate_variable_names(flat_dataset)
    
            os.remove(filename)
            if lat_var_names and lon_var_names:
                lat_var_name = lat_var_names.split('__')[-1] if isinstance(lat_var_names, str) else lat_var_names[0].split('__')[-1]
                lon_var_name = lon_var_names.split('__')[-1] if isinstance(lon_var_names, str) else lon_var_names[0].split('__')[-1]
                return lat_var_name, lon_var_name
    
        except ValueError:
            logging.warning("Unable to find lat/lon vars using l2ss-py")
    
        # Still no dice, try using the 'units' variable attribute
        for coord_name, coord in dataset.coords.items():
            if 'units' not in coord.attrs:
                continue
            if coord.attrs['units'] == 'degrees_north' and lat_var_name is None:
                lat_var_name = coord_name
            if coord.attrs['units'] == 'degrees_east' and lon_var_name is None:
                lon_var_name = coord_name
        if lat_var_name and lon_var_name:
            return lat_var_name, lon_var_name
        else:
            logging.warning("Unable to find lat/lon vars using 'units' attribute")
    
        # Out of options, fail the test because we couldn't determine lat/lon variables
>       pytest.fail(f"Unable to find latitude and longitude variables.")
E       Failed: Unable to find latitude and longitude variables.

verify_collection.py:359: Failed
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3229857008-POCLOUD for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2832224417-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-83.83995%3A21.117949999999993%29&subset=lon%284.515600000000006%3A175.5004%29&granuleId=G3229857008-POCLOUD
INFO     root:verify_collection.py:393 Submitted harmony job 9465a8dc-8181-4185-8daa-a3433cde10b7
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C2832224410/77207313_RSS_SMAP_SSS_L2C_r51281_20240906T221222_2024250_NRT_V06.0_001.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING  root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray
WARNING  root:verify_collection.py:343 Unable to find lat/lon vars using l2ss-py
WARNING  root:verify_collection.py:356 Unable to find lat/lon vars using 'units' attribute

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1729925806-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 24s]
Raw output
assert False
collection_concept_id = 'C1729925806-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1729925806-GES_DISC', 'concept-id': 'G3239893367-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1729925806-GES_DISC'}]}, 'meta': {'association-details': {'collect...': [{'Type': 'SCIENCE_FILLVALUE', 'Value': 513}], 'LongName': 'HDFEOS/SWATHS/O3 column/Data Fields/Status', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1729925800')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
            group_walk(f.groups, f, '')
    
        assert lat_var_name and lon_var_name
    
        var_ds = None
        msk = None
    
        science_vars = get_science_vars(collection_variables)
        if science_vars:
            for var in science_vars:
                science_var_name = var['umm']['Name']
                var_ds = find_variable(subsetted_ds_new, science_var_name)
                if var_ds is not None:
                    try:
                        msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
                        break
                    except Exception:
                        continue
            else:
                var_ds, msk = None, None
        else:
            for science_var_name in subsetted_ds_new.variables:
                if (str(science_var_name) not in lat_var_name and
                    str(science_var_name) not in lon_var_name and
                    'time' not in str(science_var_name)):
    
                    var_ds = find_variable(subsetted_ds_new, science_var_name)
                    if var_ds is not None:
                        try:
                            msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
                            break
                        except Exception:
                            continue
            else:
                var_ds, msk = None, None
    
        if var_ds is None or msk is None:
            pytest.fail("Unable to find variable from umm-v to use as science variable.")
    
        try:
            msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
            llat = subsetted_ds_new[lat_var_name].where(msk)
            llon = subsetted_ds_new[lon_var_name].where(msk)
        except ValueError:
    
            llat = subsetted_ds_new[lat_var_name]
            llon = subsetted_ds_new[lon_var_name]
    
        lat_max = llat.max()
        lat_min = llat.min()
    
        lon_min = llon.min()
        lon_max = llon.max()
    
        lon_min = (lon_min + 180) % 360 - 180
        lon_max = (lon_max + 180) % 360 - 180
    
        lat_var_fill_value = subsetted_ds_new[lat_var_name].encoding.get('_FillValue')
        lon_var_fill_value = subsetted_ds_new[lon_var_name].encoding.get('_FillValue')
    
        if lat_var_fill_value:
            if (lat_max <= north or np.isclose(lat_max, north)) and (lat_min >= south or np.isclose(lat_min, south)):
                logging.info("Successful Latitude subsetting")
            elif np.isnan(lat_max) and np.isnan(lat_min):
                logging.info("Partial Lat Success - no Data")
            else:
                assert False
    
        if lon_var_fill_value:
            if (lon_max <= east or np.isclose(lon_max, east)) and (lon_min >= west or np.isclose(lon_min, west)):
                logging.info("Successful Longitude subsetting")
            elif np.isnan(lon_max) and np.isnan(lon_min):
                logging.info("Partial Lon Success - no Data")
            else:
>               assert False
E               assert False

verify_collection.py:522: AssertionError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239893367-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1729925806-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&granuleId=G3239893367-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job fd51361a-dd4a-42e4-b4e8-0aa4a8ba0f2e
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1729925800/77207318_MLS-Aura_L2GP-O3_v05-03-c01_2024d258_subsetted.nc4
INFO     root:verify_collection.py:510 Successful Latitude subsetting

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516300-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 42s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516300-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516300-GES_DISC', 'concept-id': 'G1902371249-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516300-GES_DISC'}]}, 'meta': {'association-details': {'collect...asa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ozone_total_vertical_column_precision', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1627516300')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0ece685a40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0ece685440>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f0ece685340>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G1902371249-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516300-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.0453%3A-60.6907%29&subset=lon%28-164.82465%3A-84.66935000000001%29&granuleId=G1902371249-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job bbeb53ff-318f-42fd-a1a2-10c17678736f
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1627516300/77207348_S5P_OFFL_L2_O3_20200712T224601_20200713T002730_14238_01_010108_20200715T122623_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2179081549-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 49s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2179081549-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2179081549-GES_DISC', 'concept-id': 'G3239562979-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2179081549-GES_DISC'}]}, 'meta': {'association-details': {'collect...escription': 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -9999}], ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2179081540')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'Swath': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f0ece687340>}
nc_d = <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f0ece687240>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
>                   data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
E                   IndexError: list index out of range

verify_collection.py:435: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239562979-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2179081549-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-66.24765325%3A-60.47557675%29&subset=lon%28-118.05262675%3A-87.77828325%29&granuleId=G3239562979-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 10ed61e0-318f-45f9-a1bd-45288ec7e0cb
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C2179081540/77207360_2A.GPM.DPR.GPM-SLH.20240914-S214806-E232119.059895.V07C_subsetted.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var
WARNING  root:verify_collection.py:312 Unable to find lat/lon vars using cf_xarray

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C2087131083-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 37s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C2087131083-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C2087131083-GES_DISC', 'concept-id': 'G3239885569-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C2087131083-GES_DISC'}]}, 'meta': {'association-details': {'collect.../variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_index_354_388_histogram_bounds'}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C2087131080')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd42b895a40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7fd42b895540>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7fd42b895640>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G3239885569-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C2087131083-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-84.81162499999999%3A-67.735375%29&subset=lon%28-160.156275%3A84.439275%29&granuleId=G3239885569-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 9b381d3e-7f6b-4b92-ba4b-902252419b18
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C2087131080/77207400_S5P_OFFL_L2_AER_AI_20240914T141932_20240914T160101_35869_03_020701_20240916T040533_subsetted.nc4

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for ops

test_spatial_subset[C1627516288-GES_DISC] (tests.verify_collection) failed

test-results/ops_test_report.xml [took 37s]
Raw output
IndexError: list index out of range
collection_concept_id = 'C1627516288-GES_DISC', env = 'ops'
granule_json = {'meta': {'collection-concept-id': 'C1627516288-GES_DISC', 'concept-id': 'G2085128317-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1627516288-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/ground_pixel', ...}}, ...]
harmony_env = <Environment.PROD: 4>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1627516280')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfb3BzIiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...Hcf0QWqtHsCuvOtj5tczYDaCn691RlCxRjaMlZBPYm2O9z5cTN31ynn1hy4h8lXYRR_I6DfCAdmdtrIdlLaMNL-ZbKOjYgx5kEqU8ClqAQnFPDVYJL29Hw'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']])
    
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables, collection_concept_id)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        lat_group = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                        # add a science variable to the dataset if other groups are in the lat/lon group
                        # some GPM collections won't have any other variables in the same group as lat/lon
                        if len(list(nc_d.groups[g].groups.keys())) > 0:
                            data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                            g_data = lat_group+'/'+data_group
                            subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
                            sci_var = list(subsetted_ds_data.variables.keys())[0]
                            subsetted_ds_new['science_test'] = subsetted_ds_data[sci_var]
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
>           group_walk(f.groups, f, '')

verify_collection.py:448: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

groups = {'METADATA': <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f108d3b8a40>, 'PRODUCT': <[RuntimeError('NetCDF: Not a valid ID') raised in repr()] Group object at 0x7f108d3ba640>}
nc_d = <[AttributeError('NetCDF: Not a valid ID') raised in repr()] Dataset object at 0x7f108d3ba240>
current_group = ''

    def group_walk(groups, nc_d, current_group):
        global subsetted_ds_new
        subsetted_ds_new = None
        # check if the top group has lat or lon variable
        if lat_var_name in list(nc_d.variables.keys()):
            subsetted_ds_new = subsetted_ds
        else:
            # if not then we'll need to keep track of the group layers
            group_list.append(current_group)
    
        # loop through the groups in the current layer
        for g in groups:
            # end the loop if we've already found latitude
            if subsetted_ds_new:
                break
            # check if the groups have latitude, define the dataset and end the loop if found
            if lat_var_name in list(nc_d.groups[g].variables.keys()):
                group_list.append(g)
                lat_group = '/'.join(group_list)
                subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=lat_group, decode_times=False)
                # add a science variable to the dataset if other groups are in the lat/lon group
                # some GPM collections won't have any other variables in the same group as lat/lon
                if len(list(nc_d.groups[g].groups.keys())) > 0:
                    data_group = [v for v in list(nc_d.groups[g].groups.keys()) if 'time' not in str(v).lower()][0]
                    g_data = lat_group+'/'+data_group
                    subsetted_ds_data = xarray.open_dataset(subsetted_filepath, group=g_data, decode_times=False)
>                   sci_var = list(subsetted_ds_data.variables.keys())[0]
E                   IndexError: list index out of range

verify_collection.py:438: IndexError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:373 Using granule G2085128317-GES_DISC for test
INFO     root:verify_collection.py:389 Sending harmony request https://harmony.earthdata.nasa.gov/C1627516288-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.99937499999999%3A-59.951625%29&subset=lon%28-76.6214%3A-1.5866000000000042%29&granuleId=G2085128317-GES_DISC
INFO     root:verify_collection.py:393 Submitted harmony job 7c9346c6-81cf-4482-844c-67b462730309
INFO     root:verify_collection.py:399 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1627516280/77207432_S5P_OFFL_L2_CH4_20210701T170324_20210701T184453_19257_01_010400_20210703T102338_subsetted.nc4