Skip to content

Commit

Permalink
Code refactoring to use Stack.PRM() for original subswaths and Stack.…
Browse files Browse the repository at this point in the history
…PRM_merged() for a single virtual raster. There is no need to store the merged PRMs on disk because these can be easily generated on the fly.
  • Loading branch information
AlexeyPechnikov committed Sep 23, 2024
1 parent 9cab208 commit bb176d6
Show file tree
Hide file tree
Showing 11 changed files with 148 additions and 143 deletions.
10 changes: 5 additions & 5 deletions pygmtsar/pygmtsar/IO.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,7 @@ def get_filenames(self, pairs, name, add_subswath=False):
# return ds_scaled.where(ds_scaled != 0)

# 2.5e-07 is Sentinel-1 scale factor
# use original PRM files to get binary subswath file locations
def open_data(self, dates=None, scale=2.5e-07, debug=False):
import xarray as xr
import pandas as pd
Expand All @@ -301,10 +302,6 @@ def open_data(self, dates=None, scale=2.5e-07, debug=False):
if not isinstance(subswaths, (str, int)):
subswaths = ''.join(map(str, subswaths))

# DEM extent in radar coordinates, merged reference PRM required
#print ('minx, miny, maxx, maxy', minx, miny, maxx, maxy)
extent_ra = np.round(self.get_extent_ra(subswath=subswaths[0]).bounds).astype(int)

if len(subswaths) == 1:
# stack single subswath
stack = []
Expand All @@ -320,7 +317,7 @@ def open_data(self, dates=None, scale=2.5e-07, debug=False):
else:

#offsets = {'bottoms': bottoms, 'lefts': lefts, 'rights': rights, 'bottom': minh, 'extent': [maxy, maxx], 'ylims': ylims, 'xlims': xlims}
offsets = self.subswaths_offsets(debug=debug)
offsets = self.prm_offsets(debug=debug)
maxy, maxx = offsets['extent']
minh = offsets['bottom']

Expand Down Expand Up @@ -354,6 +351,9 @@ def open_data(self, dates=None, scale=2.5e-07, debug=False):
stack.append(slc.assign_coords(date=date))
del slc

# DEM extent in radar coordinates, merged reference PRM required
#print ('minx, miny, maxx, maxy', minx, miny, maxx, maxy)
extent_ra = np.round(self.get_extent_ra().bounds).astype(int)
# minx, miny, maxx, maxy = extent_ra
ds = xr.concat(stack, dim='date').assign(date=pd.to_datetime(dates))\
.sel(y=slice(extent_ra[1], extent_ra[3]), x=slice(extent_ra[0], extent_ra[2])) \
Expand Down
110 changes: 4 additions & 106 deletions pygmtsar/pygmtsar/Stack_align.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,10 @@ def _get_topo_llt(self, subswath, degrees, debug=False):
warnings.filterwarnings('ignore')

# add buffer around the cropped area for borders interpolation
dem_area = self.get_dem(subswath)
dem_area = self.get_dem()

# TBD: crop dem to subswath

ny = int(np.round(degrees/dem_area.lat.diff('lat')[0]))
nx = int(np.round(degrees/dem_area.lon.diff('lon')[0]))
if debug:
Expand Down Expand Up @@ -269,111 +272,6 @@ def _align_rep_subswath(self, subswath, date=None, degrees=12.0/3600, debug=Fals
#if os.path.exists(filename):
os.remove(filename)

def subswaths_offsets(self, debug=False):
import xarray as xr
import numpy as np
from scipy import constants

subswaths = self.get_subswaths()
if not isinstance(subswaths, (str, int)):
subswaths = ''.join(map(str, subswaths))

if len(subswaths) == 1:
prm = self.PRM()
maxx, yvalid, num_patch = prm.get('num_rng_bins', 'num_valid_az', 'num_patches')
maxy = yvalid * num_patch
offsets = {'bottom': 0, 'extent': [maxy, maxx]}
if debug:
print ('offsets', offsets)
return offsets

# calculate the offsets to merge subswaths
prms = []
ylims = []
xlims = []
for subswath in subswaths:
#print (subswath)
prm = self.PRM(subswath=subswath)
prms.append(prm)
ylims.append(prm.get('num_valid_az'))
xlims.append(prm.get('num_rng_bins'))

assert len(np.unique([prm.get('PRF') for prm in prms])), 'Image PRFs are not consistent'
assert len(np.unique([prm.get('rng_samp_rate') for prm in prms])), 'Image range sampling rates are not consistent'

bottoms = [0] + [int(np.round(((prm.get('clock_start') - prms[0].get('clock_start')) * 86400 * prms[0].get('PRF')))) for prm in prms[1:]]
# head123: 0, 466, -408
if debug:
print ('bottoms init', bottoms)
# minh: -408
minh = min(bottoms)
if debug:
print ('minh', minh)
#head123: 408, 874, 0
bottoms = np.asarray(bottoms) - minh
if debug:
print ('bottoms', bottoms)

#ovl12,23: 2690, 2558
ovls = [prm1.get('num_rng_bins') - \
int(np.round(((prm2.get('near_range') - prm1.get('near_range')) / (constants.speed_of_light/ prm1.get('rng_samp_rate') / 2)))) \
for (prm1, prm2) in zip(prms[:-1], prms[1:])]
if debug:
print ('ovls', ovls)

#Writing the grid files..Size(69158x13075)...
#maxy: 13075
# for SLC
maxy = max([prm.get('num_valid_az') + bottom for prm, bottom in zip(prms, bottoms)])
if debug:
print ('maxy', maxy)
maxx = sum([prm.get('num_rng_bins') - ovl - 1 for prm, ovl in zip(prms, [-1] + ovls)])
if debug:
print ('maxx', maxx)

#Stitching location n1 = 1045
#Stitching location n2 = 935
ns = [np.ceil(-prm.get('rshift') + prm.get('first_sample') + 150.0).astype(int) for prm in prms[1:]]
ns = [10 if n < 10 else n for n in ns]
if debug:
print ('ns', ns)

# left and right coordinates for every subswath valid area
lefts = []
rights = []

# 1st
xlim = prms[0].get('num_rng_bins') - ovls[0] + ns[0]
lefts.append(0)
rights.append(xlim)

# 2nd
if len(prms) == 2:
xlim = prms[1].get('num_rng_bins') - 1
else:
# for 3 subswaths
xlim = prms[1].get('num_rng_bins') - ovls[1] + ns[1]
lefts.append(ns[0])
rights.append(xlim)

# 3rd
if len(prms) == 3:
xlim = prms[2].get('num_rng_bins') - 2
lefts.append(ns[1])
rights.append(xlim)

# check and merge SLCs
sumx = sum([right-left for right, left in zip(rights, lefts)])
if debug:
print ('assert maxx == sum(...)', maxx, sumx)
assert maxx == sumx, 'Incorrect output grid range dimension size'

offsets = {'bottoms': bottoms, 'lefts': lefts, 'rights': rights, 'bottom': minh, 'extent': [maxy, maxx], 'ylims': ylims, 'xlims': xlims}
if debug:
print ('offsets', offsets)

return offsets

def baseline_table(self, n_jobs=-1, debug=False):
"""
Generates a baseline table for Sentinel-1 data, containing dates and baseline components.
Expand Down
16 changes: 5 additions & 11 deletions pygmtsar/pygmtsar/Stack_dem.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class Stack_dem(Stack_reframe):

buffer_degrees = 0.02

def get_extent_ra(self, subswath=None):
def get_extent_ra(self):
"""
minx, miny, maxx, maxy = np.round(geom.bounds).astype(int)
"""
Expand All @@ -25,7 +25,7 @@ def get_extent_ra(self, subswath=None):

dem = self.get_dem()
df = dem.isel(lon=[0,-1]).to_dataframe().reset_index()
geom = self.geocode(LineString(np.column_stack([df.lon, df.lat])), subswath=subswath)
geom = self.geocode(LineString(np.column_stack([df.lon, df.lat])))
return geom

# def get_extent(self, grid=None, subswath=None):
Expand Down Expand Up @@ -110,14 +110,12 @@ def set_dem(self, dem_filename):
# 0.02 degrees works well worldwide but not in Siberia
# minimum buffer size: 8 arc seconds for 90 m DEM
# subswath argument is required for aligning
def get_dem(self, subswath=None):
def get_dem(self):
"""
Retrieve the digital elevation model (DEM) data.
Parameters
----------
subswath : str, optional
Subswath name. Default is None.
Returns
-------
Expand All @@ -131,12 +129,8 @@ def get_dem(self, subswath=None):
Examples
--------
Get DEM for all the processed subswaths:
topo_ll = stack.get_dem()
Get DEM for a single subswath IW1:
topo_ll = stack.get_dem(1)
Notes
-----
This method retrieves the digital elevation model (DEM) data previously downloaded and stored in a NetCDF file.
Expand Down Expand Up @@ -167,8 +161,8 @@ def get_dem(self, subswath=None):
dem['lat'] = dem.lat.round(8)
dem['lon'] = dem.lon.round(8)

# crop to reference scene and subswath if defined
bounds = self.get_bounds(self.get_reference(subswath))
# crop to reference scene
bounds = self.get_bounds(self.get_reference())
return dem\
.transpose('lat','lon')\
.sel(lat=slice(bounds[1] - self.buffer_degrees, bounds[3] + self.buffer_degrees),
Expand Down
4 changes: 2 additions & 2 deletions pygmtsar/pygmtsar/Stack_geocode.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def compute_geocode(self, coarsen=60.):
# coarsen=4:
# nearest: coords [array([596.42352295]), array([16978.65625])]
# linear: coords [array([597.1080563]), array([16977.35608873])]
def geocode(self, geometry, subswath=None, z_offset=None):
def geocode(self, geometry, z_offset=None):
"""
Inverse geocode input geodataframe with 2D or 3D points.
Expand Down Expand Up @@ -87,7 +87,7 @@ def geocode(self, geometry, subswath=None, z_offset=None):
geometries = [geometry]

dem = self.get_dem()
prm = self.PRM(subswath=subswath)
prm = self.PRM_merged()

def coords_transform(coords):
# uses external variables dem, prm
Expand Down
6 changes: 3 additions & 3 deletions pygmtsar/pygmtsar/Stack_incidence.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ def los_displacement_mm(self, data):

# constant is negative to make LOS = -1 * range change
# constant is (1000 mm) / (4 * pi)
scale = -79.58 * self.PRM().get('radar_wavelength')
scale = -79.58 * self.PRM_merged().get('radar_wavelength')

if isinstance(data, (list, tuple)):
return scale*np.asarray(data)
Expand Down Expand Up @@ -438,7 +438,7 @@ def elevation_m(self, data, baseline=1):

# expected accuracy about 0.01%
#wavelength, slant_range = self.PRM().get('radar_wavelength','SC_height')
wavelength, slant_range_start,slant_range_end = self.PRM().get('radar_wavelength', 'SC_height_start', 'SC_height_end')
wavelength, slant_range_start,slant_range_end = self.PRM_merged().get('radar_wavelength', 'SC_height_start', 'SC_height_end')

incidence_angle = self.incidence_angle()
slant_range = xr.DataArray(np.linspace(slant_range_start,slant_range_end, incidence_angle.shape[1]),
Expand All @@ -465,7 +465,7 @@ def compute_satellite_look_vector(self, interactive=False):
def SAT_look(z, lat, lon):
coords = np.column_stack([lon.ravel(), lat.ravel(), z.ravel()])
# look_E look_N look_U
look = self.PRM().SAT_look(coords, binary=True)\
look = self.PRM_merged().SAT_look(coords, binary=True)\
.astype(np.float32)\
.reshape(z.shape[0], z.shape[1], 6)[...,3:]
return look
Expand Down
116 changes: 115 additions & 1 deletion pygmtsar/pygmtsar/Stack_prm.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,118 @@ def PRM(self, date=None, subswath=None):

prefix = self.multistem_stem(subswath, date)
filename = os.path.join(self.basedir, f'{prefix}.PRM')
return PRM.from_file(filename)
return PRM.from_file(filename)

def PRM_merged(self, date=None, offsets='auto'):

if isinstance(offsets, str) and offsets == 'auto':
offsets = self.prm_offsets()

maxy, maxx = offsets['extent']
minh = offsets['bottom']
return self.PRM(date=date).fix_merged(maxy, maxx, minh)

def prm_offsets(self, debug=False):
import xarray as xr
import numpy as np
from scipy import constants

subswaths = self.get_subswaths()
if not isinstance(subswaths, (str, int)):
subswaths = ''.join(map(str, subswaths))

if len(subswaths) == 1:
prm = self.PRM(subswath=int(subswaths))
maxx, yvalid, num_patch = prm.get('num_rng_bins', 'num_valid_az', 'num_patches')
maxy = yvalid * num_patch
offsets = {'bottom': 0, 'extent': [maxy, maxx]}
if debug:
print ('offsets', offsets)
return offsets

# calculate the offsets to merge subswaths
prms = []
ylims = []
xlims = []
for subswath in subswaths:
#print (subswath)
prm = self.PRM(subswath=subswath)
prms.append(prm)
ylims.append(prm.get('num_valid_az'))
xlims.append(prm.get('num_rng_bins'))

assert len(np.unique([prm.get('PRF') for prm in prms])), 'Image PRFs are not consistent'
assert len(np.unique([prm.get('rng_samp_rate') for prm in prms])), 'Image range sampling rates are not consistent'

bottoms = [0] + [int(np.round(((prm.get('clock_start') - prms[0].get('clock_start')) * 86400 * prms[0].get('PRF')))) for prm in prms[1:]]
# head123: 0, 466, -408
if debug:
print ('bottoms init', bottoms)
# minh: -408
minh = min(bottoms)
if debug:
print ('minh', minh)
#head123: 408, 874, 0
bottoms = np.asarray(bottoms) - minh
if debug:
print ('bottoms', bottoms)

#ovl12,23: 2690, 2558
ovls = [prm1.get('num_rng_bins') - \
int(np.round(((prm2.get('near_range') - prm1.get('near_range')) / (constants.speed_of_light/ prm1.get('rng_samp_rate') / 2)))) \
for (prm1, prm2) in zip(prms[:-1], prms[1:])]
if debug:
print ('ovls', ovls)

#Writing the grid files..Size(69158x13075)...
#maxy: 13075
# for SLC
maxy = max([prm.get('num_valid_az') + bottom for prm, bottom in zip(prms, bottoms)])
if debug:
print ('maxy', maxy)
maxx = sum([prm.get('num_rng_bins') - ovl - 1 for prm, ovl in zip(prms, [-1] + ovls)])
if debug:
print ('maxx', maxx)

#Stitching location n1 = 1045
#Stitching location n2 = 935
ns = [np.ceil(-prm.get('rshift') + prm.get('first_sample') + 150.0).astype(int) for prm in prms[1:]]
ns = [10 if n < 10 else n for n in ns]
if debug:
print ('ns', ns)

# left and right coordinates for every subswath valid area
lefts = []
rights = []

# 1st
xlim = prms[0].get('num_rng_bins') - ovls[0] + ns[0]
lefts.append(0)
rights.append(xlim)

# 2nd
if len(prms) == 2:
xlim = prms[1].get('num_rng_bins') - 1
else:
# for 3 subswaths
xlim = prms[1].get('num_rng_bins') - ovls[1] + ns[1]
lefts.append(ns[0])
rights.append(xlim)

# 3rd
if len(prms) == 3:
xlim = prms[2].get('num_rng_bins') - 2
lefts.append(ns[1])
rights.append(xlim)

# check and merge SLCs
sumx = sum([right-left for right, left in zip(rights, lefts)])
if debug:
print ('assert maxx == sum(...)', maxx, sumx)
assert maxx == sumx, 'Incorrect output grid range dimension size'

offsets = {'bottoms': bottoms, 'lefts': lefts, 'rights': rights, 'bottom': minh, 'extent': [maxy, maxx], 'ylims': ylims, 'xlims': xlims}
if debug:
print ('offsets', offsets)

return offsets
4 changes: 2 additions & 2 deletions pygmtsar/pygmtsar/Stack_sbas.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@ def baseline_table(dates):
import pandas as pd
import numpy as np

prm_ref = self.PRM()
prm_ref = self.PRM_merged()
data = []
for date in dates:
prm_rep = self.PRM(date)
prm_rep = self.PRM_merged(date)
BPL, BPR = prm_ref.SAT_baseline(prm_rep).get('B_parallel', 'B_perpendicular')
data.append({'date':date, 'BPL':BPL, 'BPR':BPR})
df = pd.DataFrame(data).set_index('date')
Expand Down
Loading

0 comments on commit bb176d6

Please sign in to comment.