diff --git a/pygmtsar/pygmtsar/SBAS_sbas.py b/pygmtsar/pygmtsar/SBAS_sbas.py index d254626a..277d6eaa 100644 --- a/pygmtsar/pygmtsar/SBAS_sbas.py +++ b/pygmtsar/pygmtsar/SBAS_sbas.py @@ -50,6 +50,8 @@ def lstsq(x, w, matrix): # ignore pixels where correlation is not defined return np.nan * np.zeros(matrix.shape[1]) + assert x.shape == w.shape, f'Arrays x and w need to have equal shape, x.shape={x.shape}, w.shape={w.shape}' + # fill nans as zeroes and set corresponding weight to 0 nanmask = np.where(np.isnan(x)) if nanmask[0].size > 0: @@ -206,19 +208,19 @@ def lstsq_parallel(self, pairs=None, data=None, weight=None, def lstq_block(ys, xs): # 3D array - data_block = data.sel(y=ys, x=xs).chunk(-1).compute(n_workers=1).values.transpose(1,2,0) + data_block = data.isel(y=ys, x=xs).chunk(-1).compute(n_workers=1).values.transpose(1,2,0) # weights can be defined by multiple ways or be set to None if isinstance(weight, xr.DataArray): # 3D array - weight_block = weight.sel(y=ys, x=xs).chunk(-1).compute(n_workers=1).values.transpose(1,2,0) + weight_block = weight.isel(y=ys, x=xs).chunk(-1).compute(n_workers=1).values.transpose(1,2,0) # Vectorize vec_lstsq - vec_lstsq = np.vectorize(lambda data, weight: self.lstsq(data, weight, matrix), signature='(n),(n)->(m)') + vec_lstsq = np.vectorize(lambda x, w: self.lstsq(x, w, matrix), signature='(n),(n)->(m)') # Apply vec_lstsq to data_block and weight_block and revert the original dimensions order block = vec_lstsq(data_block, weight_block).transpose(2,0,1) del weight_block, vec_lstsq else: # Vectorize vec_lstsq - vec_lstsq = np.vectorize(lambda data: self.lstsq(data, weight, matrix), signature='(n)->(m)') + vec_lstsq = np.vectorize(lambda x: self.lstsq(x, weight, matrix), signature='(n)->(m)') # Apply vec_lstsq to data_block and weight_block and revert the original dimensions order block = vec_lstsq(data_block).transpose(2,0,1) del vec_lstsq @@ -226,8 +228,10 @@ def lstq_block(ys, xs): return block # split to square chunks - ys_blocks = np.array_split(data.y, np.arange(0, data.y.size, chunksize)[1:]) - xs_blocks = np.array_split(data.x, np.arange(0, data.x.size, chunksize)[1:]) + # use indices instead of the coordinate values to prevent the weird error raising occasionally: + # "Reindexing only valid with uniquely valued Index objects" + ys_blocks = np.array_split(np.arange(data.y.size), np.arange(0, data.y.size, chunksize)[1:]) + xs_blocks = np.array_split(np.arange(data.x.size), np.arange(0, data.x.size, chunksize)[1:]) blocks_total = [] for ys_block in ys_blocks: diff --git a/pygmtsar/pygmtsar/SBAS_stl.py b/pygmtsar/pygmtsar/SBAS_stl.py index 799c6937..832f5a07 100644 --- a/pygmtsar/pygmtsar/SBAS_stl.py +++ b/pygmtsar/pygmtsar/SBAS_stl.py @@ -141,7 +141,7 @@ def stl_parallel(self, dates, data, freq='W', periods=52, robust=False, chunksiz def stl_block(lats, lons): # use external variables dt, dt_periodic, periods, robust # 3D array - data_block = data.sel(lat=lats, lon=lons).chunk(-1).compute(n_workers=1).values.transpose(1,2,0) + data_block = data.isel(lat=lats, lon=lons).chunk(-1).compute(n_workers=1).values.transpose(1,2,0) # Vectorize vec_lstsq #vec_stl = np.vectorize(lambda data: self.stl(data, dt, dt_periodic, periods, robust), signature='(n)->(3,m)') vec_stl = np.vectorize(lambda data: self.stl(data, dt, dt_periodic, periods, robust), signature='(n)->(m),(m),(m)') @@ -151,8 +151,10 @@ def stl_block(lats, lons): return np.asarray(block).transpose(0,3,1,2) # split to square chunks - lats_blocks = np.array_split(data.lat, np.arange(0, data.lat.size, chunksize)[1:]) - lons_blocks = np.array_split(data.lon, np.arange(0, data.lon.size, chunksize)[1:]) + # use indices instead of the coordinate values to prevent the weird error raising occasionally: + # "Reindexing only valid with uniquely valued Index objects" + lats_blocks = np.array_split(np.arange(data.lat.size), np.arange(0, data.lat.size, chunksize)[1:]) + lons_blocks = np.array_split(np.arange(data.lon.size), np.arange(0, data.lon.size, chunksize)[1:]) blocks_total = [] for lats_block in lats_blocks: