Phase 2024 012_023790_IW1 VV 20x4 3 #3
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# n+1, n+2, n+3 pairs for a given burst + year | |
name: InSAR_Timeseries | |
run-name: Phase ${{ inputs.year }} ${{ inputs.burstId }} ${{ inputs.polarization }} ${{ inputs.looks }} ${{ inputs.npairs }} | |
on: | |
workflow_dispatch: | |
inputs: | |
burstId: | |
type: string | |
required: true | |
description: ESA Burst Identifier (RelativeObit, ID, Subswath) | |
default: '012_023790_IW1' | |
polarization: | |
type: choice | |
required: true | |
description: Polarization | |
default: 'VV' | |
options: ['VV', 'VH', 'HH'] | |
looks: | |
type: choice | |
required: true | |
description: Range x Azimuth Looks | |
default: '20x4' | |
options: ['20x4','10x2','5x1'] | |
year: | |
type: string | |
required: true | |
description: Year | |
default: '2024' | |
npairs: | |
type: choice | |
required: true | |
description: Number of Pairs per Reference | |
default: '3' | |
options: ['3','2','1'] | |
# Must duplicate inputs for workflow_call (https://github.com/orgs/community/discussions/39357) | |
workflow_call: | |
inputs: | |
burstId: | |
type: string | |
required: true | |
polarization: | |
type: string | |
required: true | |
year: | |
type: string | |
required: true | |
looks: | |
type: string | |
required: true | |
npairs: | |
type: string | |
required: true | |
# Convert inputs to environment variables for all job steps | |
env: | |
burstId: ${{ inputs.burstId }} | |
Year: ${{ inputs.year }} | |
Polarization: ${{ inputs.polarization }} | |
Looks: ${{ inputs.looks }} | |
NPairs: ${{ inputs.npairs }} | |
jobs: | |
searchASF: | |
runs-on: ubuntu-latest | |
# Map a step output to a job output | |
outputs: | |
BURST_IDS: ${{ steps.asf-search.outputs.BURST_IDS }} | |
MATRIX: ${{ steps.asf-search.outputs.MATRIX_PARAMS_COMBINATIONS }} | |
defaults: | |
run: | |
shell: bash -el {0} | |
steps: | |
- name: Checkout Repository | |
uses: actions/checkout@v4 | |
- name: Install Conda environment with Micromamba | |
uses: mamba-org/setup-micromamba@v1 | |
with: | |
cache-environment: true | |
environment-file: environment.yml | |
# https://words.yuvi.in/post/python-in-github-actions/ | |
- name: Search ASF for bursts | |
id: asf-search | |
shell: bash -el -c "python -u {0}" | |
run: | | |
import asf_search as asf | |
import fsspec | |
import geopandas as gpd | |
import json | |
import os | |
# Parse Workflow inputs from environment variables | |
START = int(os.environ['Year']) | |
END = START+1 | |
POL = os.environ['Polarization'] | |
FULLBURSTID = os.environ['burstId'] | |
RELORB,BURSTID,SUBSWATH = FULLBURSTID.split('_') | |
print(RELORB,BURSTID,SUBSWATH) | |
# Get centroid of burst from database | |
url = 'https://github.com/relativeorbit/s1burstids/raw/main/burst_map_IW_000001_375887_brotli.parquet' | |
with fsspec.open(url) as file: | |
gfb = gpd.read_parquet(file, | |
filters=[('burst_id', '=', int(BURSTID)), | |
('subswath_name', '=', SUBSWATH)] | |
) | |
print(gfb) | |
# Search for SLCs | |
results = asf.search(platform=[asf.PLATFORM.SENTINEL1], | |
processingLevel=asf.PRODUCT_TYPE.SLC, | |
beamMode=asf.BEAMMODE.IW, | |
intersectsWith=gfb.iloc[0].geometry.centroid.wkt, | |
relativeOrbit=int(RELORB), | |
start=f"{START}-01-01", | |
end=f"{END}-03-01", #march to ensure we get some overlapping coverage for each year | |
) | |
gf = gpd.GeoDataFrame.from_features(results.geojson(), crs=4326) | |
print('Results:', len(gf)) | |
# For case of frame overlap, ensure SLCs contain full burst | |
def get_overlap_area(gf, gfREF): | |
frame_area = gfREF.iloc[0].geometry.area | |
overlaps = gf.geometry.map(lambda x: x.intersection(gfREF.geometry.iloc[0]).area/frame_area) | |
return overlaps | |
gf['overlap'] = get_overlap_area(gf, gfb) | |
gf = gf.query('overlap >= 0.80').reset_index(drop=True) | |
# Sort chronological ascending | |
gf['datetime'] = gpd.pd.to_datetime(gf.startTime) | |
gf = gf.sort_values(by='datetime', ignore_index=True) | |
print('Number of Acquisitions: ', len(gf)) | |
burstIDs = gf.sceneName.to_list() | |
print('\n'.join(burstIDs)) | |
# Create Matrix Job Mapping (JSON Array) | |
idx_end_of_year = gf.index[gf.datetime.dt.year == START][-1] | |
pairs = [] | |
for r in range(idx_end_of_year + 1): | |
for s in range(1, ${{ inputs.npairs }} + 1 ): | |
try: | |
ref = burstIDs[r] | |
sec = burstIDs[r+s] | |
shortname = f'{ref[17:25]}_{sec[17:25]}' | |
pairs.append({'reference': ref, 'secondary': sec, 'name':shortname}) | |
except IndexError as e: | |
print(f'ASF Search did not return a n+{s} pair for {ref}') | |
matrixJSON = f'{{"include":{json.dumps(pairs)}}}' | |
print(f'Number of Interferograms: {len(pairs)}') | |
print(matrixJSON) | |
with open(os.environ['GITHUB_OUTPUT'], 'a') as f: | |
print(f'BURST_IDS={burstIDs}', file=f) | |
print(f'MATRIX_PARAMS_COMBINATIONS={matrixJSON}', file=f) | |
hyp3-isce2: | |
needs: searchASF | |
strategy: | |
matrix: ${{ fromJson(needs.searchASF.outputs.MATRIX) }} | |
uses: ./.github/workflows/insar_pair.yml | |
with: | |
reference: ${{ matrix.reference }} | |
secondary: ${{ matrix.secondary }} | |
burstId: ${{ inputs.burstId }} | |
year: ${{ matrix.year }} | |
polarization: ${{ inputs.polarization }} | |
looks: ${{ inputs.looks }} | |
jobname: ${{ matrix.name }} | |
secrets: inherit |