Skip to content

Phase 2023 121_258661_IW2 VV 5x1 3 #16

Phase 2023 121_258661_IW2 VV 5x1 3

Phase 2023 121_258661_IW2 VV 5x1 3 #16

# n+1, n+2, n+3 pairs for a given burst + year
name: TimeseriesSLC
run-name: Phase ${{ inputs.year }} ${{ inputs.fullBurstID }} ${{ inputs.polarization }} ${{ inputs.nlooks }} ${{ inputs.npairs }}
on:
workflow_dispatch:
inputs:
fullBurstID:
type: string
required: true
description: ESA Burst Identifier (RelativeObit, ID, Subswath)
default: '012_023790_IW1'
polarization:
type: choice
required: true
description: Polarization
default: 'VV'
options: ['VV', 'VH', 'HH']
year:
type: string
required: true
description: Year
default: '2024'
nlooks:
type: choice
required: true
description: Range x Azimuth Looks
default: '20x4'
options: ['20x4','10x2','5x1']
npairs:
type: choice
required: true
description: Number of Pairs per Reference
default: '3'
options: ['3','2','1']
# Convert inputs to environment variables for all job steps
env:
FullBurstId: ${{ inputs.fullBurstID }}
Year: ${{ inputs.year }}
Polarization: ${{ inputs.polarization }}
NLooks: ${{ inputs.nlooks }}
NPairs: ${{ inputs.npairs }}
jobs:
searchASF:
runs-on: ubuntu-latest
# Map a step output to a job output
outputs:
BURST_IDS: ${{ steps.asf-search.outputs.BURST_IDS }}
MATRIX: ${{ steps.asf-search.outputs.MATRIX_PARAMS_COMBINATIONS }}
defaults:
run:
shell: bash -el {0}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Install Conda environment with Micromamba
uses: mamba-org/setup-micromamba@v1
with:
cache-environment: true
environment-file: environment.yml
# https://words.yuvi.in/post/python-in-github-actions/
- name: Search ASF for bursts
id: asf-search
shell: bash -el -c "python -u {0}"
run: |
import asf_search as asf
import fsspec
import geopandas as gpd
import json
import os
# Parse Workflow inputs from environment variables
START = int(os.environ['Year'])
END = START+1
POL = os.environ['Polarization']
FULLBURSTID = os.environ['FullBurstId']
RELORB,BURSTID,SUBSWATH = FULLBURSTID.split('_')
print(RELORB,BURSTID,SUBSWATH)
# Get centroid of burst from database
url = 'https://github.com/relativeorbit/s1burstids/raw/main/burst_map_IW_000001_375887_brotli.parquet'
with fsspec.open(url) as file:
gfb = gpd.read_parquet(file,
filters=[('burst_id', '=', int(BURSTID)),
('subswath_name', '=', SUBSWATH)]
)
print(gfb)
# Search for SLCs
results = asf.search(platform=[asf.PLATFORM.SENTINEL1],
processingLevel=asf.PRODUCT_TYPE.SLC,
beamMode=asf.BEAMMODE.IW,
intersectsWith=gfb.iloc[0].geometry.centroid.wkt,
relativeOrbit=int(RELORB),
start=f"{START}-01-01",
end=f"{END}-03-01", #march to ensure we get some overlapping coverage for each year
)
gf = gpd.GeoDataFrame.from_features(results.geojson(), crs=4326)
print('Results:', len(gf))
# For case of frame overlap, ensure SLCs contain full burst
def get_overlap_area(gf, gfREF):
frame_area = gfREF.iloc[0].geometry.area
overlaps = gf.geometry.map(lambda x: x.intersection(gfREF.geometry.iloc[0]).area/frame_area)
return overlaps
gf['overlap'] = get_overlap_area(gf, gfb)
gf = gf.query('overlap >= 0.80').reset_index(drop=True)
# Sort chronological ascending
gf['datetime'] = gpd.pd.to_datetime(gf.startTime)
gf = gf.sort_values(by='datetime', ignore_index=True)
print('Number of Acquisitions: ', len(gf))
burstIDs = gf.sceneName.to_list()
print('\n'.join(burstIDs))
# Create Matrix Job Mapping (JSON Array)
idx_end_of_year = gf.index[gf.datetime.dt.year == START][-1]
pairs = []
for r in range(idx_end_of_year + 1):
for s in range(1, ${{ inputs.npairs }} + 1 ):
ref = burstIDs[r]
sec = burstIDs[r+s]
shortname = f'{ref[17:25]}_{sec[17:25]}'
pairs.append({'reference': ref, 'secondary': sec, 'name':shortname})
matrixJSON = f'{{"include":{json.dumps(pairs)}}}'
print(f'Number of Interferograms: {len(pairs)}')
print(matrixJSON)
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
print(f'BURST_IDS={burstIDs}', file=f)
print(f'MATRIX_PARAMS_COMBINATIONS={matrixJSON}', file=f)
hyp3-isce2:
needs: searchASF
runs-on: ubuntu-latest
defaults:
run:
shell: bash -el {0}
strategy:
matrix: ${{ fromJson(needs.searchASF.outputs.MATRIX) }}
continue-on-error: true
name: ${{ matrix.name }}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
repository: 'relativeorbit/hyp3-isce2'
ref: 'backprocess'
- name: Install Conda environment with Micromamba
uses: mamba-org/setup-micromamba@v1
with:
cache-environment: true
environment-file: environment.yml
- name: Development install
run: pip install -e .
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Get Bucket Prefix
env:
REF: ${{ matrix.reference }}
SEC: ${{ matrix.secondary }}
BURST: ${{ inputs.fullBurstID }}
YEAR: ${{ inputs.year }}
run: |
PREFIX=${BURST}/${YEAR}/${REF:17:8}_${SEC:17:8}
echo "PREFIX=${PREFIX}" >> $GITHUB_ENV
- name: Run Hyp3-ISCE2
env:
EARTHDATA_USERNAME: ${{ secrets.EARTHDATA_USERNAME }}
EARTHDATA_PASSWORD: ${{ secrets.EARTHDATA_PASSWORD}}
ESA_USERNAME: ${{ secrets.ESA_USERNAME }}
ESA_PASSWORD: ${{ secrets.ESA_PASSWORD}}
run: |
python -m hyp3_isce2 ++process insar_tops_fufiters \
${{ matrix.reference }} \
${{ matrix.secondary }} \
--burstId ${{ inputs.fullBurstID }} \
--polarization ${{ inputs.polarization }} \
--looks ${{ inputs.nlooks }} \
--apply-water-mask False
# - name: Create COGs + STAC Metadata
# run: |
# # Just install couple extra dependencies for script
# # pip install pystac rasterio rio-stac jsonschema
# ls
# python contrib/hyp3isce2stac.py
- name: Upload to AWS S3
run: |
OUTDIR=`ls -d S1_*`
aws s3 sync $OUTDIR s3://fufiters/backprocess/$PREFIX/$OUTDIR
# - name: Upload Hyp3 Output to GitHub
# uses: actions/upload-artifact@v4
# with:
# name: ${{ env.PREFIX }}
# path: S1*.zip