Skip to content

Regression

Regression #70

GitHub Actions / Regression test results for uat failed Jun 10, 2024 in 0s

73 fail, 7 skipped, 201 pass in 24m 50s

281 tests   201 ✅  24m 50s ⏱️
  1 suites    7 💤
  1 files     73 ❌

Results for commit de4b3ac.

Annotations

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1254855648-LARC_CLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 3s]
Raw output
ValueError: time data '2024-01-23T23:33:50+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'
collection_concept_id = 'C1254855648-LARC_CLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1254855648-LARC_CLOUD', 'concept-id': 'G1261886590-LARC_CLOUD', 'concept-type': '...'ProductionDateTime': '2024-01-24T03:08:57+00:00'}, 'GranuleUR': 'TEMPO_HCHO_L2_V01_20240123T233350Z_S013G06.nc', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1254855648-LARC_CLOUD'}]}, 'meta': {'association-details': {'colle...acted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -1.0000000150474662e+30}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1254855640')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
>       temporal_subset = get_half_temporal_extent(start_time, end_time)

verify_collection.py:374: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:167: in get_half_temporal_extent
    start_dt = datetime.strptime(start, '%Y-%m-%dT%H:%M:%S.%fZ')
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:568: in _strptime_datetime
    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

data_string = '2024-01-23T23:33:50+00:00', format = '%Y-%m-%dT%H:%M:%S.%fZ'

    def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
        """Return a 2-tuple consisting of a time struct and an int containing
        the number of microseconds based on the input string and the
        format string."""
    
        for index, arg in enumerate([data_string, format]):
            if not isinstance(arg, str):
                msg = "strptime() argument {} must be str, not {}"
                raise TypeError(msg.format(index, type(arg)))
    
        global _TimeRE_cache, _regex_cache
        with _cache_lock:
            locale_time = _TimeRE_cache.locale_time
            if (_getlang() != locale_time.lang or
                time.tzname != locale_time.tzname or
                time.daylight != locale_time.daylight):
                _TimeRE_cache = TimeRE()
                _regex_cache.clear()
                locale_time = _TimeRE_cache.locale_time
            if len(_regex_cache) > _CACHE_MAX_SIZE:
                _regex_cache.clear()
            format_regex = _regex_cache.get(format)
            if not format_regex:
                try:
                    format_regex = _TimeRE_cache.compile(format)
                # KeyError raised when a bad format is found; can be specified as
                # \\, in which case it was a stray % but with a space after it
                except KeyError as err:
                    bad_directive = err.args[0]
                    if bad_directive == "\\":
                        bad_directive = "%"
                    del err
                    raise ValueError("'%s' is a bad directive in format '%s'" %
                                        (bad_directive, format)) from None
                # IndexError only occurs when the format string is "%"
                except IndexError:
                    raise ValueError("stray %% in format '%s'" % format) from None
                _regex_cache[format] = format_regex
        found = format_regex.match(data_string)
        if not found:
>           raise ValueError("time data %r does not match format %r" %
                             (data_string, format))
E           ValueError: time data '2024-01-23T23:33:50+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:349: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1261886590-LARC_CLOUD for test

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1262900002-LARC_CLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 5s]
Raw output
ValueError: time data '2024-03-29T14:36:39+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'
collection_concept_id = 'C1262900002-LARC_CLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1262900002-LARC_CLOUD', 'concept-id': 'G1264396026-LARC_CLOUD', 'concept-type': '...roductionDateTime': '2024-05-09T15:28:39+00:00'}, 'GranuleUR': 'TEMPO_O3PROF_L2_V03_20240329T143639Z_S006G09.nc', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1262900002-LARC_CLOUD'}]}, 'meta': {'association-details': {'colle...acted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -1.2676506002282294e+30}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1262900000')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
>       temporal_subset = get_half_temporal_extent(start_time, end_time)

verify_collection.py:374: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:167: in get_half_temporal_extent
    start_dt = datetime.strptime(start, '%Y-%m-%dT%H:%M:%S.%fZ')
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:568: in _strptime_datetime
    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

data_string = '2024-03-29T14:36:39+00:00', format = '%Y-%m-%dT%H:%M:%S.%fZ'

    def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
        """Return a 2-tuple consisting of a time struct and an int containing
        the number of microseconds based on the input string and the
        format string."""
    
        for index, arg in enumerate([data_string, format]):
            if not isinstance(arg, str):
                msg = "strptime() argument {} must be str, not {}"
                raise TypeError(msg.format(index, type(arg)))
    
        global _TimeRE_cache, _regex_cache
        with _cache_lock:
            locale_time = _TimeRE_cache.locale_time
            if (_getlang() != locale_time.lang or
                time.tzname != locale_time.tzname or
                time.daylight != locale_time.daylight):
                _TimeRE_cache = TimeRE()
                _regex_cache.clear()
                locale_time = _TimeRE_cache.locale_time
            if len(_regex_cache) > _CACHE_MAX_SIZE:
                _regex_cache.clear()
            format_regex = _regex_cache.get(format)
            if not format_regex:
                try:
                    format_regex = _TimeRE_cache.compile(format)
                # KeyError raised when a bad format is found; can be specified as
                # \\, in which case it was a stray % but with a space after it
                except KeyError as err:
                    bad_directive = err.args[0]
                    if bad_directive == "\\":
                        bad_directive = "%"
                    del err
                    raise ValueError("'%s' is a bad directive in format '%s'" %
                                        (bad_directive, format)) from None
                # IndexError only occurs when the format string is "%"
                except IndexError:
                    raise ValueError("stray %% in format '%s'" % format) from None
                _regex_cache[format] = format_regex
        found = format_regex.match(data_string)
        if not found:
>           raise ValueError("time data %r does not match format %r" %
                             (data_string, format))
E           ValueError: time data '2024-03-29T14:36:39+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:349: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1264396026-LARC_CLOUD for test

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1220280440-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 6s]
Raw output
Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')
collection_concept_id = 'C1220280440-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1220280440-GES_DISC', 'concept-id': 'G1256524037-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1220280440-GES_DISC'}]}, 'meta': {'association-details': {'collect.../umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/aerosol_mid_pressure_pdf_axis', ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1220280440')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7fdf5ad2be80>
response = <Response [404]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1256524037-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1220280440-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-72.00995%3A-56.276050000000005%29&subset=lon%28-153.63479999999998%3A140.6828%29&subset=time%28%222019-02-10T13%3A14%3A22.250000%22%3A%2A%29&granuleId=G1256524037-GES_DISC

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1261072645-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 47s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size
collection_concept_id = 'C1261072645-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1261072645-POCLOUD', 'concept-id': 'G1261389677-POCLOUD', 'concept-type': 'granul...121T192909_PIB0_01.nc', 'SWOT_L2_LR_PreCalSSH_WindWave_006_539_20231121T183819_20231121T192726_PIB0_01.nc', ...], ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1261072645-POCLOUD'}]}, 'meta': {'association-details': {'collecti...pixels', 'Size': 69, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': 2147483647}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1261072640')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f8faed55420>
job_id = '021623ed-83d4-4501-81ef-36626a9e2101', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1261389677-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1261072645-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-78.1363741592625%3A-74.34618744123749%29&subset=lon%28152.828804597025%3A179.303302681975%29&subset=time%28%222023-11-21T18%3A50%3A26.243500%22%3A%2A%29&granuleId=G1261389677-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 021623ed-83d4-4501-81ef-36626a9e2101

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1240739709-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 42s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1240739709-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1240739709-POCLOUD', 'concept-id': 'G1243177196-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1240739709-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 6922, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1240739700')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f1a13280730>
job_id = 'a8bf8e1a-8d15-42a7-99d4-327fa1556bff', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1243177196-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1240739709-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&subset=time%28%222020-02-29T23%3A53%3A37.750000%22%3A%2A%29&granuleId=G1243177196-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job a8bf8e1a-8d15-42a7-99d4-327fa1556bff

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1256507988-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 53s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size
collection_concept_id = 'C1256507988-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1256507988-POCLOUD', 'concept-id': 'G1257402445-POCLOUD', 'concept-type': 'granul...419T183559_PIA1_01.nc', 'SWOT_L2_LR_PreCalSSH_WindWave_495_028_20230419T174453_20230419T183600_PIA1_01.nc', ...], ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1256507988-POCLOUD'}]}, 'meta': {'association-details': {'collecti...pixels', 'Size': 71, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': 2147483647}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1256507980')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f7bb650d030>
job_id = '2d025927-ac1e-4f69-bc45-86c7dc617642', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1257402445-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1256507988-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%2870.74622575000001%3A78.07930424999999%29&subset=lon%28132.80792325%3A178.78994675%29&subset=time%28%222023-04-19T17%3A57%3A33.138500%22%3A%2A%29&granuleId=G1257402445-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 2d025927-ac1e-4f69-bc45-86c7dc617642

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1233154410-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 1m 1s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1233154410-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1233154410-GES_DISC', 'concept-id': 'G1241748632-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1233154410-GES_DISC'}]}, 'meta': {'association-details': {'collect...location/vertex_dim', 'Size': 4, 'Type': 'OTHER'}], 'LongName': 'Geolocation/footprint_longitude_vertices', ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1233154410')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7fd77e511930>
job_id = 'ab8d336a-9178-4e06-9804-d8afa63212df', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1241748632-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1233154410-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&subset=time%28%222021-04-01T06%3A00%3A00%22%3A%2A%29&granuleId=G1241748632-GES_DISC
INFO     root:verify_collection.py:387 Submitted harmony job ab8d336a-9178-4e06-9804-d8afa63212df

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1240921715-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 8s]
Raw output
Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')
collection_concept_id = 'C1240921715-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1240921715-GES_DISC', 'concept-id': 'G1262646682-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1240921715-GES_DISC'}]}, 'meta': {'association-details': {'collect....nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/methane_mixing_ratio_bias_corrected', ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1240921710')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f19e5569c00>
response = <Response [404]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1262646682-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1240921715-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-77.02635%3A-59.977650000000004%29&subset=lon%28-2.3436499999999967%3A72.87164999999999%29&subset=time%28%222024-02-23T22%3A03%3A10%22%3A%2A%29&granuleId=G1262646682-GES_DISC

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1259115177-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 54s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size
collection_concept_id = 'C1259115177-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1259115177-POCLOUD', 'concept-id': 'G1259973528-POCLOUD', 'concept-type': 'granul..._XOverCal_20230410T112911_20230412T111047_PIB0_01.nc', 'SWOT_GranulePolygons_Cal_20230213T142800_v05.json', ...], ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1259115177-POCLOUD'}]}, 'meta': {'association-details': {'collecti...pixels', 'Size': 69, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': 2147483647}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1259115170')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f19ecf2b4f0>
job_id = '6a74852d-bedb-4a31-9f97-641d37d10dd1', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: Could not get result item file size

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1259973528-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1259115177-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%2870.74622575000001%3A78.07930424999999%29&subset=lon%28132.80792325%3A178.78994675%29&subset=time%28%222023-04-11T19%3A12%3A37.071250%22%3A%2A%29&granuleId=G1259973528-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 6a74852d-bedb-4a31-9f97-641d37d10dd1

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1234666374-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 21s]
Raw output
assert False
collection_concept_id = 'C1234666374-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1234666374-GES_DISC', 'concept-id': 'G1261572742-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1234666374-GES_DISC'}]}, 'meta': {'association-details': {'collect... 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -999.989990234375}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1234666370')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
            group_walk(f.groups, f, '')
    
        assert lat_var_name and lon_var_name
    
        var_ds = None
        msk = None
    
        if science_vars := get_science_vars(collection_variables):
            for idx, value in enumerate(science_vars):
                science_var_name = science_vars[idx]['umm']['Name']
                try:
                    var_ds = subsetted_ds_new[science_var_name]
                    msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
                    break
                except Exception:
                    try:
                        # if the variable couldn't be found because the name includes a group, e.g.,
                        # `geolocation/relative_azimuth_angle`,
                        # then try to access the variable after removing the group name.
                        var_ds = subsetted_ds_new[science_var_name.rsplit("/", 1)[-1]]
                        msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
                        break
                    except Exception:
                        var_ds = None
                        msk = None
    
            if var_ds is None and msk is None:
                pytest.fail(f"Unable to find variable from umm-v to use as science variable.")
    
        else:
            # Can't find a science var in UMM-V, just pick one
    
            science_var_name = next(iter([v for v in subsetted_ds_new.variables if
                                        str(v) not in lat_var_name and str(v) not in lon_var_name and 'time' not in str(v)]))
    
            var_ds = subsetted_ds_new[science_var_name]
    
        try:
            msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
            llat = subsetted_ds_new[lat_var_name].where(msk)
            llon = subsetted_ds_new[lon_var_name].where(msk)
        except ValueError:
    
            llat = subsetted_ds_new[lat_var_name]
            llon = subsetted_ds_new[lon_var_name]
    
        lat_max = llat.max()
        lat_min = llat.min()
    
        lon_min = llon.min()
        lon_max = llon.max()
    
        lon_min = (lon_min + 180) % 360 - 180
        lon_max = (lon_max + 180) % 360 - 180
    
        lat_var_fill_value = subsetted_ds_new[lat_var_name].encoding.get('_FillValue')
        lon_var_fill_value = subsetted_ds_new[lon_var_name].encoding.get('_FillValue')
    
        if lat_var_fill_value:
            if (lat_max <= north or np.isclose(lat_max, north)) and (lat_min >= south or np.isclose(lat_min, south)):
                logging.info("Successful Latitude subsetting")
            elif np.isnan(lat_max) and np.isnan(lat_min):
                logging.info("Partial Lat Success - no Data")
            else:
                assert False
    
        if lon_var_fill_value:
            if (lon_max <= east or np.isclose(lon_max, east)) and (lon_min >= west or np.isclose(lon_min, west)):
                logging.info("Successful Longitude subsetting")
            elif np.isnan(lon_max) and np.isnan(lon_min):
                logging.info("Partial Lon Success - no Data")
            else:
>               assert False
E               assert False

verify_collection.py:506: AssertionError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1261572742-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1234666374-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&subset=time%28%222024-01-02T05%3A59%3A59.999750%22%3A%2A%29&granuleId=G1261572742-GES_DISC
INFO     root:verify_collection.py:387 Submitted harmony job 3b502c38-1c66-462f-89a0-d5d18691b679
INFO     root:verify_collection.py:393 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1234666370/4320727_MLS-Aura_L2GP-Temperature_v05-02-c01_2024d002_subsetted.nc4
INFO     root:verify_collection.py:494 Successful Latitude subsetting

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1220280439-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 8s]
Raw output
Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')
collection_concept_id = 'C1220280439-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1220280439-GES_DISC', 'concept-id': 'G1256524035-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1220280439-GES_DISC'}]}, 'meta': {'association-details': {'collect...RL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/qa_value', ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1220280430')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f068d647160>
response = <Response [404]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1256524035-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1220280439-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-72.74415%3A-56.40985%29&subset=lon%2836.011775%3A98.779225%29&subset=time%28%222019-02-10T20%3A01%3A54.750000%22%3A%2A%29&granuleId=G1256524035-GES_DISC

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1238658050-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 21s]
Raw output
assert False
collection_concept_id = 'C1238658050-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1238658050-POCLOUD', 'concept-id': 'G1240841888-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1238658050-POCLOUD'}]}, 'meta': {'association-details': {'collecti... 'NUMCELLS', 'Size': 42, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -32767}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1238658050')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
        harmony_client.wait_for_processing(job_id, show_progress=True)
        subsetted_filepath = None
        for filename in [file_future.result()
                         for file_future
                         in harmony_client.download_all(job_id, directory=f'{tmp_path}', overwrite=True)]:
            logging.info(f'Downloaded: %s', filename)
            subsetted_filepath = pathlib.Path(filename)
    
        # Verify spatial subset worked
        subsetted_ds = xarray.open_dataset(subsetted_filepath, decode_times=False)
        group = None
        # Try to read group in file
        lat_var_name, lon_var_name = get_lat_lon_var_names(subsetted_ds, subsetted_filepath, collection_variables)
        lat_var_name = lat_var_name.split('/')[-1]
        lon_var_name = lon_var_name.split('/')[-1]
    
        with netCDF4.Dataset(subsetted_filepath) as f:
            group_list = []
            def group_walk(groups, nc_d, current_group):
                global subsetted_ds_new
                subsetted_ds_new = None
                # check if the top group has lat or lon variable
                if lat_var_name in list(nc_d.variables.keys()):
                    subsetted_ds_new = subsetted_ds
                else:
                    # if not then we'll need to keep track of the group layers
                    group_list.append(current_group)
    
                # loop through the groups in the current layer
                for g in groups:
                    # end the loop if we've already found latitude
                    if subsetted_ds_new:
                        break
                    # check if the groups have latitude, define the dataset and end the loop if found
                    if lat_var_name in list(nc_d.groups[g].variables.keys()):
                        group_list.append(g)
                        g = '/'.join(group_list)
                        subsetted_ds_new = xarray.open_dataset(subsetted_filepath, group=g, decode_times=False)
                        break
                    # recall the function on a group that has groups in it and didn't find latitude
                    # this is going 'deeper' into the groups
                    if len(list(nc_d.groups[g].groups.keys())) > 0:
                        group_walk(nc_d.groups[g].groups, nc_d.groups[g], g)
                    else:
                        continue
    
            group_walk(f.groups, f, '')
    
        assert lat_var_name and lon_var_name
    
        var_ds = None
        msk = None
    
        if science_vars := get_science_vars(collection_variables):
            for idx, value in enumerate(science_vars):
                science_var_name = science_vars[idx]['umm']['Name']
                try:
                    var_ds = subsetted_ds_new[science_var_name]
                    msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
                    break
                except Exception:
                    try:
                        # if the variable couldn't be found because the name includes a group, e.g.,
                        # `geolocation/relative_azimuth_angle`,
                        # then try to access the variable after removing the group name.
                        var_ds = subsetted_ds_new[science_var_name.rsplit("/", 1)[-1]]
                        msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
                        break
                    except Exception:
                        var_ds = None
                        msk = None
    
            if var_ds is None and msk is None:
                pytest.fail(f"Unable to find variable from umm-v to use as science variable.")
    
        else:
            # Can't find a science var in UMM-V, just pick one
    
            science_var_name = next(iter([v for v in subsetted_ds_new.variables if
                                        str(v) not in lat_var_name and str(v) not in lon_var_name and 'time' not in str(v)]))
    
            var_ds = subsetted_ds_new[science_var_name]
    
        try:
            msk = np.logical_not(np.isnan(var_ds.data.squeeze()))
            llat = subsetted_ds_new[lat_var_name].where(msk)
            llon = subsetted_ds_new[lon_var_name].where(msk)
        except ValueError:
    
            llat = subsetted_ds_new[lat_var_name]
            llon = subsetted_ds_new[lon_var_name]
    
        lat_max = llat.max()
        lat_min = llat.min()
    
        lon_min = llon.min()
        lon_max = llon.max()
    
        lon_min = (lon_min + 180) % 360 - 180
        lon_max = (lon_max + 180) % 360 - 180
    
        lat_var_fill_value = subsetted_ds_new[lat_var_name].encoding.get('_FillValue')
        lon_var_fill_value = subsetted_ds_new[lon_var_name].encoding.get('_FillValue')
    
        if lat_var_fill_value:
            if (lat_max <= north or np.isclose(lat_max, north)) and (lat_min >= south or np.isclose(lat_min, south)):
                logging.info("Successful Latitude subsetting")
            elif np.isnan(lat_max) and np.isnan(lat_min):
                logging.info("Partial Lat Success - no Data")
            else:
>               assert False
E               assert False

verify_collection.py:498: AssertionError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1240841888-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1238658050-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%282.9020500000000027%3A86.74335%29&subset=lon%28-175.9757025%3A-23.052397499999998%29&subset=time%28%222020-02-01T23%3A01%3A29%22%3A%2A%29&granuleId=G1240841888-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 83753152-20d6-4943-a0cc-4b670173ef9f
INFO     root:verify_collection.py:393 Downloaded: /tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1238658050/4320737_ascat_20200201_223600_metopa_68950_eps_o_250_3202_ovw.l2_subsetted.nc4
WARNING  root:verify_collection.py:302 Unable to find lat/lon vars in UMM-Var

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1240739573-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 38s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1240739573-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1240739573-POCLOUD', 'concept-id': 'G1262742901-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1240739573-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ize': 409, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw9/test_spatial_subset_C1240739570')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f19e4276740>
job_id = '8ae93a07-88c1-4f3b-9f05-9eb3a05d2e23', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1262742901-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1240739573-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.13719999999999%3A85.1332%29&subset=lon%28-171.000025%3A170.99902500000002%29&subset=time%28%222024-03-25T16%3A40%3A21%22%3A%2A%29&granuleId=G1262742901-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 8ae93a07-88c1-4f3b-9f05-9eb3a05d2e23

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1220280433-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 9s]
Raw output
Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')
collection_concept_id = 'C1220280433-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1220280433-GES_DISC', 'concept-id': 'G1256524039-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1220280433-GES_DISC'}]}, 'meta': {'association-details': {'collect...ov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'METADATA/QA_STATISTICS/nitrogendioxide_total_column_pdf'}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1220280430')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f1731b65270>
response = <Response [404]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1256524039-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1220280433-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-72.68005%3A-56.265950000000004%29&subset=lon%28-171.89765%3A-108.42435%29&subset=time%28%222019-02-10T09%3A52%3A53.250000%22%3A%2A%29&granuleId=G1256524039-GES_DISC

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1256946216-ASDC_DEV2] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 0s]
Raw output
ValueError: time data '2021-12-31T22:53:23.666666+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'
collection_concept_id = 'C1256946216-ASDC_DEV2', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1256946216-ASDC_DEV2', 'concept-id': 'G1262213008-ASDC_DEV2', 'concept-type': 'gr...me': '2024-02-27T19:58:40+00:00'}, 'GranuleUR': 'CPF_LEO_N20_INTERCAL_RAD_L1A.STC_b001.20211231.225323.PT04M48S', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1256946216-ASDC_DEV2'}]}, 'meta': {'association-details': {'collec...scription': 'Extracted from _FillValue metadata attribute', 'Type': 'SCIENCE_FILLVALUE', 'Value': -999.0}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw2/test_spatial_subset_C1256946210')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
>       temporal_subset = get_half_temporal_extent(start_time, end_time)

verify_collection.py:374: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:167: in get_half_temporal_extent
    start_dt = datetime.strptime(start, '%Y-%m-%dT%H:%M:%S.%fZ')
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:568: in _strptime_datetime
    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

data_string = '2021-12-31T22:53:23.666666+00:00'
format = '%Y-%m-%dT%H:%M:%S.%fZ'

    def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
        """Return a 2-tuple consisting of a time struct and an int containing
        the number of microseconds based on the input string and the
        format string."""
    
        for index, arg in enumerate([data_string, format]):
            if not isinstance(arg, str):
                msg = "strptime() argument {} must be str, not {}"
                raise TypeError(msg.format(index, type(arg)))
    
        global _TimeRE_cache, _regex_cache
        with _cache_lock:
            locale_time = _TimeRE_cache.locale_time
            if (_getlang() != locale_time.lang or
                time.tzname != locale_time.tzname or
                time.daylight != locale_time.daylight):
                _TimeRE_cache = TimeRE()
                _regex_cache.clear()
                locale_time = _TimeRE_cache.locale_time
            if len(_regex_cache) > _CACHE_MAX_SIZE:
                _regex_cache.clear()
            format_regex = _regex_cache.get(format)
            if not format_regex:
                try:
                    format_regex = _TimeRE_cache.compile(format)
                # KeyError raised when a bad format is found; can be specified as
                # \\, in which case it was a stray % but with a space after it
                except KeyError as err:
                    bad_directive = err.args[0]
                    if bad_directive == "\\":
                        bad_directive = "%"
                    del err
                    raise ValueError("'%s' is a bad directive in format '%s'" %
                                        (bad_directive, format)) from None
                # IndexError only occurs when the format string is "%"
                except IndexError:
                    raise ValueError("stray %% in format '%s'" % format) from None
                _regex_cache[format] = format_regex
        found = format_regex.match(data_string)
        if not found:
>           raise ValueError("time data %r does not match format %r" %
                             (data_string, format))
E           ValueError: time data '2021-12-31T22:53:23.666666+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:349: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1262213008-ASDC_DEV2 for test

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1240739508-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 31s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1240739508-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1240739508-POCLOUD', 'concept-id': 'G1262749557-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1240739508-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ize': 409, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1240739500')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f1a0e6cff10>
job_id = '61c8354c-6b17-4625-9d29-c5f31a7386b0', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1262749557-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1240739508-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.39499999999998%3A79.67699999999999%29&subset=lon%28-171.0%3A171.0%29&subset=time%28%222024-03-25T18%3A03%3A01.500000%22%3A%2A%29&granuleId=G1262749557-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 61c8354c-6b17-4625-9d29-c5f31a7386b0

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1229246434-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 7s]
Raw output
Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')
collection_concept_id = 'C1229246434-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1229246434-GES_DISC', 'concept-id': 'G1256524043-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1229246434-GES_DISC'}]}, 'meta': {'association-details': {'collect...RL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': 'PRODUCT/scanline', ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw1/test_spatial_subset_C1229246431')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
>       job_id = harmony_client.submit(harmony_request)

verify_collection.py:386: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:851: in submit
    self._handle_error_response(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f1c5bb5d0c0>
response = <Response [404]>

    def _handle_error_response(self, response: Response):
        """Raises the appropriate exception based on the response
        received from Harmony. Tries to pull out an error message
        from a Harmony JSON response when possible.
    
        Args:
            response: The Response from Harmony
    
        Raises:
            Exception with a Harmony error message or a more generic
            HTTPError
        """
        if 'application/json' in response.headers.get('Content-Type', ''):
            exception_message = None
            try:
                response_json = response.json()
                if hasattr(response_json, 'get'):
                    exception_message = response_json.get('description')
                    if not exception_message:
                        exception_message = response_json.get('error')
            except JSONDecodeError:
                pass
            if exception_message:
>               raise Exception(response.reason, exception_message)
E               Exception: ('Not Found', 'Error: EULA 2f16e13a-e648-4920-9276-d578f30707c6 could not be found.')

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:784: Exception
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1256524043-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1229246434-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-63.787499999999994%3A-48.530499999999996%29&subset=lon%2858.247075%3A105.673925%29&subset=time%28%222019-12-13T19%3A20%3A55.250000%22%3A%2A%29&granuleId=G1256524043-GES_DISC

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1243658323-GES_DISC] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 47s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1243658323-GES_DISC', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1243658323-GES_DISC', 'concept-id': 'G1256529750-GES_DISC', 'concept-type': 'gran...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1243658323-GES_DISC'}]}, 'meta': {'association-details': {'collect...ion': {'Name': 'UMM-Var', 'URL': 'https://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1243658320')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f068efe30a0>
job_id = '26158e7d-fa7c-4c20-8106-d36b40eff06e', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1256529750-GES_DISC for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1243658323-GES_DISC/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-85.5%3A85.5%29&subset=lon%28-171.0%3A171.0%29&subset=time%28%222022-01-15T06%3A00%3A00%22%3A%2A%29&granuleId=G1256529750-GES_DISC
INFO     root:verify_collection.py:387 Submitted harmony job 26158e7d-fa7c-4c20-8106-d36b40eff06e

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1240739526-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 37s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1240739526-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1240739526-POCLOUD', 'concept-id': 'G1261584421-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1240739526-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ny_dim_1', 'Size': 812, 'Type': 'OTHER'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -9999.0}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw6/test_spatial_subset_C1240739520')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f8fac53b910>
job_id = '310d82ba-00ba-488a-b42a-52aa08751533', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1261584421-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1240739526-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-81.88637500000002%3A79.295375%29&subset=lon%28-170.913275%3A171.000275%29&subset=time%28%222024-01-08T13%3A10%3A31.750000%22%3A%2A%29&granuleId=G1261584421-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 310d82ba-00ba-488a-b42a-52aa08751533

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1238687546-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 35s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1238687546-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1238687546-POCLOUD', 'concept-id': 'G1241753455-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1238687546-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3200, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw5/test_spatial_subset_C1238687540')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f1a2bbdf970>
job_id = '12b10934-7284-4163-8125-893cf8ac67a2', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1241753455-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1238687546-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%2837.767744%3A47.635736%29&subset=lon%2811.666462750000004%3A48.44828724999999%29&subset=time%28%222016-01-02T23%3A59%3A48%22%3A%2A%29&granuleId=G1241753455-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 12b10934-7284-4163-8125-893cf8ac67a2

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1238621178-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 35s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1238621178-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1238621178-POCLOUD', 'concept-id': 'G1256099471-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1238621178-POCLOUD'}]}, 'meta': {'association-details': {'collecti... 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -3.4028235e+38}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw4/test_spatial_subset_C1238621170')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f172f1e3070>
job_id = 'b629dac4-39ff-4197-9fea-41f8259c4f83', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1256099471-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1238621178-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-17.4381%3A64.06809999999999%29&subset=lon%28-146.49374999999998%3A-3.7942500000000052%29&subset=time%28%222013-10-31T23%3A48%3A34.500000%22%3A%2A%29&granuleId=G1256099471-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job b629dac4-39ff-4197-9fea-41f8259c4f83

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1238687534-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 35s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1238687534-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1238687534-POCLOUD', 'concept-id': 'G1262749578-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1238687534-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3200, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw7/test_spatial_subset_C1238687530')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f06896fc280>
job_id = '89becdc3-bf7d-423d-97c9-2bd6bfcf62ac', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1262749578-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1238687534-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-53.812239500000004%3A-43.233020499999995%29&subset=lon%2871.04933199999999%3A112.99608800000001%29&subset=time%28%222024-03-25T18%3A36%3A21%22%3A%2A%29&granuleId=G1262749578-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 89becdc3-bf7d-423d-97c9-2bd6bfcf62ac

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1238621185-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 38s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1238621185-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1238621185-POCLOUD', 'concept-id': 'G1242349398-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1238621185-POCLOUD'}]}, 'meta': {'association-details': {'collecti...ze': 3712, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -128}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw8/test_spatial_subset_C1238621180')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7fdf53e17130>
job_id = '44041ede-1174-462b-b743-a61c94fe7177', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1242349398-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1238621185-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-76.859025%3A63.66402499999999%29&subset=lon%28-71.35319999999999%3A71.3672%29&subset=time%28%222018-01-15T23%3A48%3A05.500000%22%3A%2A%29&granuleId=G1242349398-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 44041ede-1174-462b-b743-a61c94fe7177

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1256420925-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 38s]
Raw output
harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error
collection_concept_id = 'C1256420925-POCLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1256420925-POCLOUD', 'concept-id': 'G1261790188-POCLOUD', 'concept-type': 'granul...pecification': {'Name': 'UMM-G', 'URL': 'https://cdn.earthdata.nasa.gov/umm/granule/v1.6.6', 'Version': '1.6.6'}, ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1256420925-POCLOUD'}]}, 'meta': {'association-details': {'collecti...: 243, 'Type': 'CROSS_TRACK_DIMENSION'}], 'FillValues': [{'Type': 'SCIENCE_FILLVALUE', 'Value': -32768.0}], ...}}, ...]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw0/test_spatial_subset_C1256420920')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
        temporal_subset = get_half_temporal_extent(start_time, end_time)
    
        # Build harmony request
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        request_bbox = harmony.BBox(w=west, s=south, e=east, n=north)
        request_collection = harmony.Collection(id=collection_concept_id)
        harmony_request = harmony.Request(collection=request_collection, spatial=request_bbox,
                                          granule_id=[granule_json['meta']['concept-id']],
                                          temporal=temporal_subset)
        logging.info("Sending harmony request %s", harmony_client.request_as_url(harmony_request))
    
        # Submit harmony request and download result
        job_id = harmony_client.submit(harmony_request)
        logging.info("Submitted harmony job %s", job_id)
>       harmony_client.wait_for_processing(job_id, show_progress=True)

verify_collection.py:388: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <harmony.harmony.Client object at 0x7f0268b8c1c0>
job_id = '9ae1061c-b5b0-4e6b-8635-ad98e9a8c574', show_progress = True

    def wait_for_processing(self, job_id: str, show_progress: bool = False) -> None:
        """Retrieve a submitted job's completion status in percent.
    
        Args:
            job_id: UUID string for the job you wish to interrogate.
    
        Returns:
            The job's processing progress as a percentage.
    
        :raises
            Exception: This can happen if an invalid job_id is provided or Harmony services
            can't be reached.
        """
        # How often to refresh the screen for progress updates and animating spinners.
        ui_update_interval = 0.33  # in seconds
        running_w_errors_logged = False
    
        intervals = round(self.check_interval / ui_update_interval)
        if show_progress:
            with progressbar.ProgressBar(max_value=100, widgets=progressbar_widgets) as bar:
                progress = 0
                while progress < 100:
                    progress, status, message = self.progress(job_id)
                    if status == 'failed':
>                       raise ProcessingFailedException(job_id, message)
E                       harmony.harmony.ProcessingFailedException: WorkItem failed: podaac/l2ss-py:2.10.0rc8: Service request failed with an unknown error

../../../../.cache/pypoetry/virtualenvs/l2ss-py-autotest-iYz8Sff2-py3.10/lib/python3.10/site-packages/harmony/harmony.py:986: ProcessingFailedException
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1261790188-POCLOUD for test
INFO     root:verify_collection.py:383 Sending harmony request https://harmony.uat.earthdata.nasa.gov/C1256420925-POCLOUD/ogc-api-coverages/1.0.0/collections/all/coverage/rangeset?forceAsync=true&subset=lat%28-84.8785%3A84.6585%29&subset=lon%28-170.99025%3A171.00025%29&subset=time%28%222024-01-29T15%3A59%3A23.750000%22%3A%2A%29&granuleId=G1261790188-POCLOUD
INFO     root:verify_collection.py:387 Submitted harmony job 9ae1061c-b5b0-4e6b-8635-ad98e9a8c574

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_spatial_subset[C1254854453-LARC_CLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 0s]
Raw output
ValueError: time data '2024-01-23T23:33:50+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'
collection_concept_id = 'C1254854453-LARC_CLOUD', env = 'uat'
granule_json = {'meta': {'collection-concept-id': 'C1254854453-LARC_CLOUD', 'concept-id': 'G1261886308-LARC_CLOUD', 'concept-type': '... 'ProductionDateTime': '2024-01-24T03:32:00+00:00'}, 'GranuleUR': 'TEMPO_NO2_L2_V01_20240123T233350Z_S013G06.nc', ...}}
collection_variables = [{'associations': {'collections': [{'concept-id': 'C1254854453-LARC_CLOUD'}]}, 'meta': {'association-details': {'colle...s://cdn.earthdata.nasa.gov/umm/variable/v1.9.0', 'Version': '1.9.0'}, 'Name': '/product/vertical_column_troposphere'}}]
harmony_env = <Environment.UAT: 3>
tmp_path = PosixPath('/tmp/pytest-of-runner/pytest-0/popen-gw3/test_spatial_subset_C1254854450')
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...OKzBv8m-ip7LOVDcr4m1sSkBhX2fxIHDTZN8PCoFfkbXg1EJIMMK_T_n9U_lnCAufgR5X8MuM9rNcSfQ2A1ftJLFfsWn60h7EQLQeqpea9qV1ovWdqOt5g'

    @pytest.mark.timeout(600)
    def test_spatial_subset(collection_concept_id, env, granule_json, collection_variables,
                            harmony_env, tmp_path: pathlib.Path, bearer_token):
        test_spatial_subset.__doc__ = f"Verify spatial subset for {collection_concept_id} in {env}"
    
        logging.info("Using granule %s for test", granule_json['meta']['concept-id'])
    
        # Compute a box that is smaller than the granule extent bounding box
        north, south, east, west = get_bounding_box(granule_json)
        east, west, north, south = create_smaller_bounding_box(east, west, north, south, .95)
    
        start_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["BeginningDateTime"]
        end_time = granule_json['umm']["TemporalExtent"]["RangeDateTime"]["EndingDateTime"]
>       temporal_subset = get_half_temporal_extent(start_time, end_time)

verify_collection.py:374: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:167: in get_half_temporal_extent
    start_dt = datetime.strptime(start, '%Y-%m-%dT%H:%M:%S.%fZ')
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:568: in _strptime_datetime
    tt, fraction, gmtoff_fraction = _strptime(data_string, format)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

data_string = '2024-01-23T23:33:50+00:00', format = '%Y-%m-%dT%H:%M:%S.%fZ'

    def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
        """Return a 2-tuple consisting of a time struct and an int containing
        the number of microseconds based on the input string and the
        format string."""
    
        for index, arg in enumerate([data_string, format]):
            if not isinstance(arg, str):
                msg = "strptime() argument {} must be str, not {}"
                raise TypeError(msg.format(index, type(arg)))
    
        global _TimeRE_cache, _regex_cache
        with _cache_lock:
            locale_time = _TimeRE_cache.locale_time
            if (_getlang() != locale_time.lang or
                time.tzname != locale_time.tzname or
                time.daylight != locale_time.daylight):
                _TimeRE_cache = TimeRE()
                _regex_cache.clear()
                locale_time = _TimeRE_cache.locale_time
            if len(_regex_cache) > _CACHE_MAX_SIZE:
                _regex_cache.clear()
            format_regex = _regex_cache.get(format)
            if not format_regex:
                try:
                    format_regex = _TimeRE_cache.compile(format)
                # KeyError raised when a bad format is found; can be specified as
                # \\, in which case it was a stray % but with a space after it
                except KeyError as err:
                    bad_directive = err.args[0]
                    if bad_directive == "\\":
                        bad_directive = "%"
                    del err
                    raise ValueError("'%s' is a bad directive in format '%s'" %
                                        (bad_directive, format)) from None
                # IndexError only occurs when the format string is "%"
                except IndexError:
                    raise ValueError("stray %% in format '%s'" % format) from None
                _regex_cache[format] = format_regex
        found = format_regex.match(data_string)
        if not found:
>           raise ValueError("time data %r does not match format %r" %
                             (data_string, format))
E           ValueError: time data '2024-01-23T23:33:50+00:00' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/_strptime.py:349: ValueError
--------------------------------- Captured Log ---------------------------------
INFO     root:verify_collection.py:366 Using granule G1261886308-LARC_CLOUD for test