diff --git a/pygmtsar/pygmtsar/PRM.py b/pygmtsar/pygmtsar/PRM.py index 72a8928..c72cddd 100644 --- a/pygmtsar/pygmtsar/PRM.py +++ b/pygmtsar/pygmtsar/PRM.py @@ -12,6 +12,16 @@ class PRM(datagrid, PRM_gmtsar): + @staticmethod + def to_numeric_or_original(val): + if isinstance(val, str): + try: + float_val = float(val) + return int(float_val) if float_val.is_integer() else float_val + except ValueError: + return val + return val + # my replacement function for GMT based robust 2D trend coefficient calculations: # gmt trend2d r.xyz -Fxyzmw -N1r -V # gmt trend2d r.xyz -Fxyzmw -N2r -V @@ -252,14 +262,8 @@ def _from_io(prm): """ import pandas as pd - def to_numeric_or_original(vals): - try: - return pd.to_numeric(vals) - except ValueError: - return vals - df = pd.read_csv(prm, sep='\s+=\s+', header=None, names=['name', 'value'], engine='python').set_index('name') - df['value'] = df['value'].map(to_numeric_or_original) + df['value'] = df['value'].map(PRM.to_numeric_or_original) return PRM(df) @@ -278,12 +282,6 @@ def __init__(self, prm=None): """ import pandas as pd - def to_numeric_or_original(vals): - try: - return pd.to_numeric(vals) - except ValueError: - return vals - # Initialize an empty DataFrame if prm is None if prm is None: _prm = pd.DataFrame(columns=['name', 'value']) @@ -293,7 +291,7 @@ def to_numeric_or_original(vals): _prm = prm.df.reset_index() # Convert values to numeric where possible, keep original value otherwise - _prm['value'] = _prm['value'].map(to_numeric_or_original) + _prm['value'] = _prm['value'].map(PRM.to_numeric_or_original) # Set the DataFrame for the PRM object self.df = _prm[['name', 'value']].drop_duplicates(keep='last').set_index('name') @@ -732,7 +730,7 @@ def fix_aligned(self): # del buffer, re, im, slc_block # note: only one dimension chunked due to sequential file reading - def read_SLC_int(self, scale=2.5e-07): + def read_SLC_int(self, scale=2.5e-07, shape=None): """ Read SLC (Single Look Complex) data and compute the power of the signal. The method reads binary SLC data file, which contains alternating sequences of real and imaginary parts. @@ -762,7 +760,8 @@ def read_SLC_int(self, scale=2.5e-07): """ import xarray as xr import numpy as np - import dask, dask.array + import dask + import dask.array as da import os import warnings @@ -776,7 +775,9 @@ def read_SLC_block(slc_filename, start, stop): prm = PRM.from_file(self.filename) # num_patches multiplier is omitted - slc_filename, xdim, ydim = prm.get('SLC_file', 'num_rng_bins', 'num_valid_az') + #slc_filename, xdim, ydim = prm.get('SLC_file', 'num_rng_bins', 'num_valid_az') + slc_filename, xdim, ydim, rshift, ashift = prm.get('SLC_file', 'num_rng_bins', 'num_valid_az', 'rshift', 'ashift') + dirname = os.path.dirname(self.filename) slc_filename = os.path.join(dirname, slc_filename) #print (slc_filename, ydim, xdim) @@ -790,19 +791,70 @@ def read_SLC_block(slc_filename, start, stop): for i in range(blocks): start = i * blocksize stop = min((i+1) * blocksize, ydim * xdim) + #assert 0, f'SLC: start={start}, stop={stop}' #print ('start, stop, shape', start, stop, (stop-start)) # use proper output data type for complex data and intensity - block = dask.array.from_delayed(read_SLC_block(slc_filename, start, stop), + block = da.from_delayed(read_SLC_block(slc_filename, start, stop), shape=(2*(stop-start),), dtype=np.int16) res.append(block[::2]) ims.append(block[1::2]) del block # Concatenate the chunks together # Data file can include additional data outside of the specified dimensions - re = dask.array.concatenate(res).reshape((-1, xdim))[:ydim,:] - im = dask.array.concatenate(ims).reshape((-1, xdim))[:ydim,:] + re = da.concatenate(res).reshape((-1, xdim))[:ydim,:] + im = da.concatenate(ims).reshape((-1, xdim))[:ydim,:] del res, ims - coords = {'y': np.arange(ydim) + 0.5, 'x': np.arange(xdim) + 0.5} + + assert re.shape == (ydim, xdim), f'Originated re shape ({ydim},{xdim}), but got {re.shape}' + assert im.shape == (ydim, xdim), f'Originated im width ({ydim},{xdim}), but got {im.shape}' + + # # perform aligning on-the-fly using shifting and padding + # rshift_pad = None + # if abs(rshift) > 0: + # rshift_pad = da.zeros((ydim, abs(rshift)), dtype=np.int16) + # if rshift > 0: + # re = da.concatenate([re[:, rshift:], rshift_pad], axis=1) + # im = da.concatenate([im[:, rshift:], rshift_pad], axis=1) + # elif rshift < 0: + # re = da.concatenate([rshift_pad, re[:, :rshift]], axis=1) + # im = da.concatenate([rshift_pad, im[:, :rshift]], axis=1) + + # ashift_pad = None + # if abs(ashift) > 0: + # ashift_pad = da.zeros((abs(ashift), xdim), dtype=np.int16) + # if ashift > 0: + # re = da.concatenate([re[ashift:, :], ashift_pad], axis=0) + # im = da.concatenate([im[ashift:, :], ashift_pad], axis=0) + # elif ashift < 0: + # re = da.concatenate([ashift_pad, re[:ashift, :]], axis=0) + # im = da.concatenate([ashift_pad, im[:ashift, :]], axis=0) + + # assert re.shape == (ydim, xdim), f'Expected re shape ({ydim},{xdim}), but got {re.shape} with padding {ashift_pad.shape} for ashift={ashift}, rshift={rshift}' + # assert im.shape == (ydim, xdim), f'Expected im width ({ydim},{xdim}), but got {im.shape} with padding {ashift_pad.shape} for ashift={ashift}, rshift={rshift}' + # del rshift_pad, ashift_pad + + # pad to the specified reference frame + if shape is not None: + if shape[1] > xdim: + rpad = da.zeros((ydim, shape[1] - xdim), dtype=np.int16) + re = da.concatenate([re, rpad], axis=1) + im = da.concatenate([im, rpad], axis=1) + elif shape[1] < xdim: + re = re[:,:shape[1]] + im = im[:,:shape[1]] + if shape[0] > ydim: + apad = da.zeros((shape[0] - ydim, shape[1]), dtype=np.int16) + re = da.concatenate([re, apad], axis=0) + im = da.concatenate([im, apad], axis=0) + elif shape[0] < ydim: + re = re[:shape[0]] + im = im[:shape[0]] + + coords = {'y': np.arange(ydim if shape is None else shape[0]) + 0.5, 'x': np.arange(xdim if shape is None else shape[1]) + 0.5} + + #coords = {'y': np.arange(ydim) + 0.5, 'x': np.arange(xdim) + 0.5} + # perform aligning on-the-fly using coordinates + #coords = {'y': np.arange(ydim) + ashift + 0.5, 'x': np.arange(xdim) + rshift + 0.5} re = xr.DataArray(re, coords=coords).rename('re') im = xr.DataArray(im, coords=coords).rename('im') #if intensity: diff --git a/pygmtsar/pygmtsar/Stack_base.py b/pygmtsar/pygmtsar/Stack_base.py index b97e066..7b39214 100644 --- a/pygmtsar/pygmtsar/Stack_base.py +++ b/pygmtsar/pygmtsar/Stack_base.py @@ -43,8 +43,8 @@ def multistem_stem(self, subswath, date=None): assert len(date)==10, 'ERROR: multistem_stem date format is not yyyy-mm-dd' - multistem = f'{date}_F{subswath}' - return multistem + prefix = f'{date}_F{subswath}' + return prefix def set_reference(self, reference): """ @@ -182,10 +182,9 @@ def get_subswaths(self): # # define subswath # return subswaths[0] - # merge multiple subswaths when the function is called before subswaths merging def get_subswath(self, subswath=None): """ - Check and return subswath or return an unique subswath to functions which work with a single subswath only. + Check and return subswath or return the first subswath to functions which work with a single subswath only. Parameters ---------- @@ -199,11 +198,11 @@ def get_subswath(self, subswath=None): """ # detect all the subswaths subswaths = self.get_subswaths() - assert subswath is None or subswath in subswaths, f'ERROR: subswath {subswath} not found' + assert subswath is None or str(subswath) in str(subswaths), f'ERROR: subswath {subswath} not found' if subswath is not None: return subswath # define subswath - return int(''.join(map(str, subswaths))) + return int(str(subswaths[0])[0]) def get_pairs(self, pairs, dates=False): """ diff --git a/pygmtsar/pygmtsar/Stack_dem.py b/pygmtsar/pygmtsar/Stack_dem.py index 7bfea42..11b282e 100644 --- a/pygmtsar/pygmtsar/Stack_dem.py +++ b/pygmtsar/pygmtsar/Stack_dem.py @@ -16,7 +16,7 @@ class Stack_dem(Stack_reframe): buffer_degrees = 0.02 - def get_extent_ra(self): + def get_extent_ra(self, subswath=None): """ minx, miny, maxx, maxy = np.round(geom.bounds).astype(int) """ @@ -25,7 +25,7 @@ def get_extent_ra(self): dem = self.get_dem() df = dem.isel(lon=[0,-1]).to_dataframe().reset_index() - geom = self.geocode(LineString(np.column_stack([df.lon, df.lat]))) + geom = self.geocode(LineString(np.column_stack([df.lon, df.lat])), subswath=subswath) return geom # def get_extent(self, grid=None, subswath=None): diff --git a/pygmtsar/pygmtsar/Stack_geocode.py b/pygmtsar/pygmtsar/Stack_geocode.py index af1208b..dd2f6e9 100644 --- a/pygmtsar/pygmtsar/Stack_geocode.py +++ b/pygmtsar/pygmtsar/Stack_geocode.py @@ -56,7 +56,7 @@ def compute_geocode(self, coarsen=60.): # coarsen=4: # nearest: coords [array([596.42352295]), array([16978.65625])] # linear: coords [array([597.1080563]), array([16977.35608873])] - def geocode(self, geometry, z_offset=None): + def geocode(self, geometry, subswath=None, z_offset=None): """ Inverse geocode input geodataframe with 2D or 3D points. @@ -87,7 +87,7 @@ def geocode(self, geometry, z_offset=None): geometries = [geometry] dem = self.get_dem() - prm = self.PRM() + prm = self.PRM(subswath=subswath) def coords_transform(coords): # uses external variables dem, prm diff --git a/pygmtsar/pygmtsar/Stack_prm.py b/pygmtsar/pygmtsar/Stack_prm.py index 9184cc3..7d94b08 100644 --- a/pygmtsar/pygmtsar/Stack_prm.py +++ b/pygmtsar/pygmtsar/Stack_prm.py @@ -33,7 +33,6 @@ def PRM(self, date=None, subswath=None): import os # check if subswath exists or return a single subswath for None - # workaround for stack_rep_subswath() if subswath is None: subswath = self.get_subswath() @@ -42,5 +41,4 @@ def PRM(self, date=None, subswath=None): prefix = self.multistem_stem(subswath, date) filename = os.path.join(self.basedir, f'{prefix}.PRM') - #print (filename) return PRM.from_file(filename) \ No newline at end of file diff --git a/pygmtsar/pygmtsar/Stack_reframe.py b/pygmtsar/pygmtsar/Stack_reframe.py index b94a8ab..23aa993 100644 --- a/pygmtsar/pygmtsar/Stack_reframe.py +++ b/pygmtsar/pygmtsar/Stack_reframe.py @@ -75,10 +75,11 @@ def _reframe_subswath(self, subswath, date, geometry, debug=False): df = self.get_repeat(subswath, date) if debug: print('DEBUG: reframe scenes: ', len(df)) - stem = self.multistem_stem(subswath, date) - #print ('stem', stem) + prefix = self.multistem_stem(subswath, date) + if debug: + print ('DEBUG: ','prefix', prefix) - old_filename = os.path.join(self.basedir, f'{stem}') + old_filename = os.path.join(self.basedir, f'{prefix}') #print ('old_filename', old_filename) self._make_s1a_tops(subswath, date, debug=debug) diff --git a/pygmtsar/pygmtsar/Stack_reframe_gmtsar.py b/pygmtsar/pygmtsar/Stack_reframe_gmtsar.py index 429666e..dee08e7 100644 --- a/pygmtsar/pygmtsar/Stack_reframe_gmtsar.py +++ b/pygmtsar/pygmtsar/Stack_reframe_gmtsar.py @@ -12,7 +12,7 @@ class Stack_reframe_gmtsar(Stack_orbits): - def _ext_orb_s1a(self, subswath, stem, date=None, debug=False): + def _ext_orb_s1a(self, subswath, date=None, debug=False): """ Extracts orbital data for the Sentinel-1A satellite by running GMTSAR binary `ext_orb_s1a`. @@ -36,13 +36,16 @@ def _ext_orb_s1a(self, subswath, stem, date=None, debug=False): import subprocess if date is None or date == self.reference: + date == self.reference df = self.get_reference(subswath) else: df = self.get_repeat(subswath, date) orbit = os.path.relpath(df['orbitpath'].iloc[0], self.basedir) - argv = ['ext_orb_s1a', f'{stem}.PRM', orbit, stem] + prefix = self.multistem_stem(subswath, date) + + argv = ['ext_orb_s1a', f'{prefix}.PRM', orbit, prefix] if debug: print ('DEBUG: argv', argv) p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8', cwd=self.basedir) @@ -105,9 +108,9 @@ def _make_s1a_tops(self, subswath, date=None, mode=0, rshift_fromfile=None, ashi # TODO: use subswath xmlfile = os.path.relpath(df['metapath'].iloc[0], self.basedir) datafile = os.path.relpath(df['datapath'].iloc[0], self.basedir) - stem = self.multistem_stem(subswath, date) + prefix = self.multistem_stem(subswath, date) - argv = ['make_s1a_tops', xmlfile, datafile, stem, str(mode)] + argv = ['make_s1a_tops', xmlfile, datafile, prefix, str(mode)] if rshift_fromfile is not None: argv.append(rshift_fromfile) if ashift_fromfile is not None: @@ -121,7 +124,7 @@ def _make_s1a_tops(self, subswath, date=None, mode=0, rshift_fromfile=None, ashi if len(stdout_data) > 0 and debug: print ('DEBUG: make_s1a_tops', stdout_data) - self._ext_orb_s1a(subswath, stem, date, debug=debug) + self._ext_orb_s1a(subswath, date, debug=debug) return @@ -170,13 +173,13 @@ def _assemble_tops(self, subswath, date, azi_1, azi_2, debug=False): else: datapaths = [os.path.relpath(path, self.basedir)[:-5] for path in df['datapath']] #print ('datapaths', datapaths) - stem = self.multistem_stem(subswath, date) + prefix = self.multistem_stem(subswath, date) # round values and convert to strings azi_1 = np.round(azi_1).astype(int).astype(str) azi_2 = np.round(azi_2).astype(int).astype(str) - argv = ['assemble_tops', azi_1, azi_2] + datapaths + [stem] + argv = ['assemble_tops', azi_1, azi_2] + datapaths + [prefix] if debug: print ('DEBUG: argv', argv) p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8', cwd=self.basedir)