-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
11 changed files
with
1,176 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,231 @@ | ||
# ~n+dt in years for a given date | ||
name: FufitersOffsets | ||
run-name: Offsets ${{ inputs.fullBurstID }} ${{ inputs.dt }} ${{ inputs.polarization }} ${{ inputs.nlooks }} | ||
|
||
on: | ||
workflow_dispatch: | ||
inputs: | ||
fullBurstID: | ||
type: string | ||
required: true | ||
description: ESA Burst Identifier (RelativeObit, ID, Subswath) | ||
default: '012_023790_IW1' | ||
polarization: | ||
type: choice | ||
required: true | ||
description: Polarization | ||
default: 'VV' | ||
options: ['VV', 'VH', 'HH'] | ||
dt: | ||
type: string | ||
required: true | ||
description: temporal separate of offset pairs in years (recommend 4,6,8) | ||
default: '8' | ||
nlooks: | ||
type: choice | ||
required: true | ||
description: Range x Azimuth Looks | ||
default: '5x1' | ||
options: ['20x4','10x2','5x1'] | ||
|
||
# Must duplicate inputs for workflow_call (https://github.com/orgs/community/discussions/39357) | ||
workflow_call: | ||
inputs: | ||
fullBurstID: | ||
type: string | ||
required: true | ||
polarization: | ||
type: string | ||
required: true | ||
dt: | ||
type: string | ||
required: true | ||
nlooks: | ||
type: string | ||
required: true | ||
|
||
|
||
# Convert inputs to environment variables for all job steps | ||
env: | ||
FullBurstId: ${{ inputs.fullBurstID }} | ||
Polarization: ${{ inputs.polarization }} | ||
NLooks: ${{ inputs.nlooks }} | ||
DT: ${{ inputs.dt }} | ||
|
||
|
||
jobs: | ||
searchASF: | ||
runs-on: ubuntu-latest | ||
# Map a step output to a job output | ||
outputs: | ||
BURST_IDS: ${{ steps.asf-search.outputs.BURST_IDS }} | ||
MATRIX: ${{ steps.asf-search.outputs.MATRIX_PARAMS_COMBINATIONS }} | ||
defaults: | ||
run: | ||
shell: bash -el {0} | ||
steps: | ||
- name: Checkout Repository | ||
uses: actions/checkout@v4 | ||
|
||
- name: Install Conda environment with Micromamba | ||
uses: mamba-org/setup-micromamba@v1 | ||
with: | ||
cache-environment: true | ||
environment-file: environment.yml | ||
|
||
# https://words.yuvi.in/post/python-in-github-actions/ | ||
- name: Search ASF for bursts | ||
id: asf-search | ||
shell: bash -el -c "python -u {0}" | ||
run: | | ||
import asf_search as asf | ||
import fsspec | ||
import geopandas as gpd | ||
import json | ||
import os | ||
# Parse Workflow inputs from environment variables | ||
DT = int(os.environ['DT']) | ||
POL = os.environ['Polarization'] | ||
FULLBURSTID = os.environ['FullBurstId'] | ||
RELORB,BURSTID,SUBSWATH = FULLBURSTID.split('_') | ||
print(RELORB,BURSTID,SUBSWATH) | ||
# Get centroid of burst from database | ||
url = 'https://github.com/relativeorbit/s1burstids/raw/main/burst_map_IW_000001_375887_brotli.parquet' | ||
with fsspec.open(url) as file: | ||
gfb = gpd.read_parquet(file, | ||
filters=[('burst_id', '=', int(BURSTID)), | ||
('subswath_name', '=', SUBSWATH)] | ||
) | ||
print(gfb) | ||
# Search for SLCs | ||
results = asf.search(platform=[asf.PLATFORM.SENTINEL1], | ||
processingLevel=asf.PRODUCT_TYPE.SLC, | ||
beamMode=asf.BEAMMODE.IW, | ||
intersectsWith=gfb.iloc[0].geometry.centroid.wkt, | ||
relativeOrbit=int(RELORB), | ||
) | ||
gf = gpd.GeoDataFrame.from_features(results.geojson(), crs=4326) | ||
print('Results:', len(gf)) | ||
# For case of frame overlap, ensure SLCs contain full burst | ||
def get_overlap_area(gf, gfREF): | ||
frame_area = gfREF.iloc[0].geometry.area | ||
overlaps = gf.geometry.map(lambda x: x.intersection(gfREF.geometry.iloc[0]).area/frame_area) | ||
return overlaps | ||
gf['overlap'] = get_overlap_area(gf, gfb) | ||
gf = gf.query('overlap >= 0.80').reset_index(drop=True) | ||
# Sort chronological ascending | ||
gf['datetime'] = gpd.pd.to_datetime(gf.startTime) | ||
gf = gf.sort_values(by='datetime', ignore_index=True) | ||
print('Number of Acquisitions: ', len(gf)) | ||
burstIDs = gf.sceneName.to_list() | ||
print('\n'.join(burstIDs)) | ||
# Create Matrix Job Mapping (JSON Array) | ||
pairs = [] | ||
gf.set_index('datetime', inplace=True, drop=False) | ||
for index,row in gf.iterrows(): | ||
dt = gf.index[-1] - index | ||
if dt < gpd.pd.Timedelta(days=365*DT): | ||
print(f'{refname} within {DT} years of last acquisition') | ||
break | ||
else: | ||
refname = row.sceneName | ||
ts = index + gpd.pd.DateOffset(years=DT) | ||
idx = gf.index.get_indexer([ts], method='nearest')[0] | ||
sec = gf.iloc[idx] | ||
secname = sec.sceneName | ||
shortname = f'{refname[17:25]}_{secname[17:25]}' | ||
pairs.append({'reference': refname, 'secondary': secname, 'name':shortname}) | ||
matrixJSON = f'{{"include":{json.dumps(pairs)}}}' | ||
print(f'Number of Interferograms: {len(pairs)}') | ||
print(matrixJSON) | ||
with open(os.environ['GITHUB_OUTPUT'], 'a') as f: | ||
print(f'BURST_IDS={burstIDs}', file=f) | ||
print(f'MATRIX_PARAMS_COMBINATIONS={matrixJSON}', file=f) | ||
hyp3-isce2: | ||
needs: searchASF | ||
runs-on: ubuntu-latest | ||
defaults: | ||
run: | ||
shell: bash -el {0} | ||
strategy: | ||
matrix: ${{ fromJson(needs.searchASF.outputs.MATRIX) }} | ||
continue-on-error: true | ||
name: ${{ matrix.name }} | ||
|
||
steps: | ||
- name: Checkout Repository | ||
uses: actions/checkout@v4 | ||
with: | ||
repository: 'relativeorbit/hyp3-isce2' | ||
ref: 'denseoffsetsSLCs' | ||
|
||
- name: Install Conda environment with Micromamba | ||
uses: mamba-org/setup-micromamba@v1 | ||
with: | ||
cache-environment: true | ||
environment-file: environment.yml | ||
|
||
- name: Development install | ||
run: pip install -e . | ||
|
||
- name: Configure AWS Credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: us-west-2 | ||
|
||
- name: Get Bucket Prefix | ||
env: | ||
REF: ${{ matrix.reference }} | ||
SEC: ${{ matrix.secondary }} | ||
BURST: ${{ inputs.fullBurstID }} | ||
DT: ${{ inputs.dt }} | ||
run: | | ||
PREFIX=${BURST}/${DT}/${REF:17:8}_${SEC:17:8} | ||
echo "PREFIX=${PREFIX}" >> $GITHUB_ENV | ||
- name: Run Hyp3-ISCE2 | ||
env: | ||
EARTHDATA_USERNAME: ${{ secrets.EARTHDATA_USERNAME }} | ||
EARTHDATA_PASSWORD: ${{ secrets.EARTHDATA_PASSWORD}} | ||
ESA_USERNAME: ${{ secrets.ESA_USERNAME }} | ||
ESA_PASSWORD: ${{ secrets.ESA_PASSWORD}} | ||
run: | | ||
python -m hyp3_isce2 ++process insar_tops_fufiters \ | ||
${{ matrix.reference }} \ | ||
${{ matrix.secondary }} \ | ||
--burstId ${{ inputs.fullBurstID }} \ | ||
--polarization ${{ inputs.polarization }} \ | ||
--looks ${{ inputs.nlooks }} \ | ||
--apply-water-mask False | ||
# - name: Create COGs + STAC Metadata | ||
# run: | | ||
# # Just install couple extra dependencies for script | ||
# # pip install pystac rasterio rio-stac jsonschema | ||
# ls | ||
# python contrib/hyp3isce2stac.py | ||
|
||
- name: Upload to AWS S3 | ||
run: | | ||
OUTDIR=`ls -d S1_*` | ||
aws s3 sync $OUTDIR s3://fufiters/backprocess_offsets/$PREFIX/$OUTDIR | ||
# - name: Upload Hyp3 Output to GitHub | ||
# uses: actions/upload-artifact@v4 | ||
# with: | ||
# name: ${{ env.PREFIX }} | ||
# path: S1*.zip |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
# Try running on MacOS instead of linux | ||
name: MacOSTest | ||
|
||
on: | ||
workflow_dispatch: | ||
|
||
jobs: | ||
hyp3-isce2: | ||
runs-on: macos-latest | ||
defaults: | ||
run: | ||
shell: bash -el {0} | ||
|
||
steps: | ||
- name: Checkout Repository | ||
uses: actions/checkout@v4 | ||
with: | ||
repository: 'ASFHyP3/hyp3-isce2' | ||
|
||
- name: Install Conda environment with Micromamba | ||
uses: mamba-org/setup-micromamba@v1 | ||
with: | ||
cache-environment: true | ||
environment-file: environment.yml | ||
|
||
- name: Development install | ||
run: pip install -e . | ||
|
||
- name: Run Example Workflow | ||
env: | ||
EARTHDATA_USERNAME: ${{ secrets.EARTHDATA_USERNAME }} | ||
EARTHDATA_PASSWORD: ${{ secrets.EARTHDATA_PASSWORD}} | ||
ESA_USERNAME: ${{ secrets.ESA_USERNAME }} | ||
ESA_PASSWORD: ${{ secrets.ESA_PASSWORD}} | ||
run: | | ||
python -m hyp3_isce2 ++process insar_tops_burst \ | ||
S1_136231_IW2_20200604T022312_VV_7C85-BURST \ | ||
S1_136231_IW2_20200616T022313_VV_5D11-BURST \ | ||
--looks 20x4 \ | ||
--apply-water-mask False |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
# Single reference + list of secondary bursts | ||
# HYP3-ISCE2 installed from latest commit | ||
name: ParallelPairs | ||
|
||
on: | ||
workflow_dispatch: | ||
inputs: | ||
reference: | ||
type: string | ||
required: true | ||
description: Reference Burst | ||
default: S1_023790_IW1_20231206T121430_VV_D537-BURST | ||
|
||
# TODO: dynamic creation of secondary list (n subsequent pairs) | ||
|
||
jobs: | ||
hyp3-isce2-matrix: | ||
runs-on: ubuntu-latest | ||
defaults: | ||
run: | ||
shell: bash -el {0} | ||
strategy: | ||
matrix: | ||
secondary: [S1_023790_IW1_20231218T121430_VV_B9A6-BURST,S1_023790_IW1_20231230T121429_VV_B9CD-BURST,S1_023790_IW1_20240111T121428_VV_5827-BURST] | ||
|
||
steps: | ||
- name: Checkout Repository | ||
uses: actions/checkout@v4 | ||
with: | ||
repository: 'ASFHyP3/hyp3-isce2' | ||
|
||
- name: Install Conda environment with Micromamba | ||
uses: mamba-org/setup-micromamba@v1 | ||
with: | ||
cache-environment: true | ||
environment-file: environment.yml | ||
|
||
- name: Development install | ||
run: pip install -e . | ||
|
||
- name: Configure AWS Credentials | ||
uses: aws-actions/configure-aws-credentials@v4 | ||
with: | ||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
aws-region: us-west-2 | ||
|
||
- name: Get Bucket Prefix | ||
env: | ||
REF: ${{ inputs.reference }} | ||
SEC: ${{ matrix.secondary }} | ||
run: | | ||
PREFIX=${REF:14:8}_${SEC:14:8} | ||
echo "PREFIX=${PREFIX}" >> $GITHUB_ENV | ||
- name: Run Example Workflow | ||
env: | ||
EARTHDATA_USERNAME: ${{ secrets.EARTHDATA_USERNAME }} | ||
EARTHDATA_PASSWORD: ${{ secrets.EARTHDATA_PASSWORD}} | ||
ESA_USERNAME: ${{ secrets.ESA_USERNAME }} | ||
ESA_PASSWORD: ${{ secrets.ESA_PASSWORD}} | ||
run: | | ||
python -m hyp3_isce2 ++process insar_tops_burst \ | ||
${{ inputs.reference }} \ | ||
${{ matrix.secondary }} \ | ||
--looks 20x4 \ | ||
--apply-water-mask False \ | ||
--bucket fufiters \ | ||
--bucket-prefix $PREFIX | ||
- name: Upload Hyp3 Output | ||
uses: actions/upload-artifact@v4 | ||
with: | ||
name: ${{ env.PREFIX }} | ||
path: S1*.zip |
Oops, something went wrong.