forked from insarlab/MintPy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
smallbaselineApp.cfg
338 lines (294 loc) · 23.4 KB
/
smallbaselineApp.cfg
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
# vim: set filetype=cfg:
##------------------------ smallbaselineApp.cfg ------------------------##
########## computing resource configuration
mintpy.compute.maxMemory = auto #[float > 0.0], auto for 4, max memory to allocate in GB
## parallel processing with dask
## currently apply to steps: invert_network, correct_topography
## cluster = none to turn off the parallel computing
## numWorker = all to use all of locally available cores (for cluster = local only)
## numWorker = 80% to use 80% of locally available cores (for cluster = local only)
## config = none to rollback to the default name (same as the cluster type; for cluster != local)
mintpy.compute.cluster = auto #[local / slurm / pbs / lsf / none], auto for none, cluster type
mintpy.compute.numWorker = auto #[int > 1 / all / num%], auto for 4 (local) or 40 (slurm / pbs / lsf), num of workers
mintpy.compute.config = auto #[none / slurm / pbs / lsf ], auto for none (same as cluster), config name
########## 1. load_data
##---------add attributes manually
## MintPy requires attributes listed at: https://mintpy.readthedocs.io/en/latest/api/attributes/
## Missing attributes can be added below manually (uncomment #), e.g.
# ORBIT_DIRECTION = ascending
# PLATFORM = CSK
# ...
## a. autoPath - automatic path pattern defined in mintpy.defaults.auto_path.AUTO_PATH_*
## b. load_data.py -H to check more details and example inputs.
## c. compression to save disk usage for ifgramStack.h5 file:
## no - save 0% disk usage, fast [default]
## lzf - save ~57% disk usage, relative slow
## gzip - save ~62% disk usage, very slow [not recommend]
mintpy.load.processor = auto #[isce, aria, hyp3, gmtsar, snap, gamma, roipac, nisar], auto for isce
mintpy.load.autoPath = auto #[yes / no], auto for no, use pre-defined auto path
mintpy.load.updateMode = auto #[yes / no], auto for yes, skip re-loading if HDF5 files are complete
mintpy.load.compression = auto #[gzip / lzf / no], auto for no.
##---------for ISCE only:
mintpy.load.metaFile = auto #[path of common metadata file for the stack], i.e.: ./reference/IW1.xml, ./referenceShelve/data.dat
mintpy.load.baselineDir = auto #[path of the baseline dir], i.e.: ./baselines
##---------interferogram stack:
mintpy.load.unwFile = auto #[path pattern of unwrapped interferogram files]
mintpy.load.corFile = auto #[path pattern of spatial coherence files]
mintpy.load.connCompFile = auto #[path pattern of connected components files], optional but recommended
mintpy.load.intFile = auto #[path pattern of wrapped interferogram files], optional
mintpy.load.magFile = auto #[path pattern of interferogram magnitude files], optional
##---------ionosphere stack (optional):
mintpy.load.ionUnwFile = auto #[path pattern of unwrapped interferogram files]
mintpy.load.ionCorFile = auto #[path pattern of spatial coherence files]
mintpy.load.ionConnCompFile = auto #[path pattern of connected components files], optional but recommended
##---------offset stack (optional):
mintpy.load.azOffFile = auto #[path pattern of azimuth offset file]
mintpy.load.rgOffFile = auto #[path pattern of range offset file]
mintpy.load.azOffStdFile = auto #[path pattern of azimuth offset variance file], optional but recommended
mintpy.load.rgOffStdFile = auto #[path pattern of range offset variance file], optional but recommended
mintpy.load.offSnrFile = auto #[path pattern of offset signal-to-noise ratio file], optional
##---------geometry:
mintpy.load.demFile = auto #[path of DEM file]
mintpy.load.lookupYFile = auto #[path of latitude /row /y coordinate file], not required for geocoded data
mintpy.load.lookupXFile = auto #[path of longitude/column/x coordinate file], not required for geocoded data
mintpy.load.incAngleFile = auto #[path of incidence angle file], optional but recommended
mintpy.load.azAngleFile = auto #[path of azimuth angle file], optional
mintpy.load.shadowMaskFile = auto #[path of shadow mask file], optional but recommended
mintpy.load.waterMaskFile = auto #[path of water mask file], optional but recommended
mintpy.load.bperpFile = auto #[path pattern of 2D perpendicular baseline file], optional
##---------subset (optional):
## if both yx and lalo are specified, use lalo option unless a) no lookup file AND b) dataset is in radar coord
mintpy.subset.yx = auto #[y0:y1,x0:x1 / no], auto for no
mintpy.subset.lalo = auto #[S:N,W:E / no], auto for no
##---------multilook (optional):
## multilook while loading data with the specified method, to reduce dataset size
## method - nearest, mean and median methods are applicable to interferogram/ionosphere/offset stack(s), except for:
## connected components and all geometry datasets, for which nearest is hardwired.
## Use mean / median method with caution! It could smoothen the noise for a better SNR, but it could also smoothen the
## unwrapping errors, breaking the integer 2pi relationship, which is used in the unwrapping error correction.
## If you really want to increase the SNR, consider re-generate your stack of interferograms with more looks instead.
mintpy.multilook.method = auto #[nearest, mean, median], auto for nearest - lines/rows skipping approach
mintpy.multilook.ystep = auto #[int >= 1], auto for 1 - no multilooking
mintpy.multilook.xstep = auto #[int >= 1], auto for 1 - no multilooking
########## 2. modify_network
## 1) Network modification based on temporal/perpendicular baselines, date, num of connections etc.
mintpy.network.tempBaseMax = auto #[1-inf, no], auto for no, max temporal baseline in days
mintpy.network.perpBaseMax = auto #[1-inf, no], auto for no, max perpendicular spatial baseline in meter
mintpy.network.connNumMax = auto #[1-inf, no], auto for no, max number of neighbors for each acquisition
mintpy.network.startDate = auto #[20090101 / no], auto for no
mintpy.network.endDate = auto #[20110101 / no], auto for no
mintpy.network.excludeDate = auto #[20080520,20090817 / no], auto for no
mintpy.network.excludeIfgIndex = auto #[1:5,25 / no], auto for no, list of ifg index (start from 0)
mintpy.network.referenceFile = auto #[date12_list.txt / ifgramStack.h5 / no], auto for no
## 2) Data-driven network modification
## a - Coherence-based network modification = (threshold + MST) by default
## reference: Yunjun et al. (2019, section 4.2 and 5.3.1); Chaussard et al. (2015, GRL)
## It calculates a average coherence for each interferogram using spatial coherence based on input mask (with AOI)
## Then it finds a minimum spanning tree (MST) network with inverse of average coherence as weight (keepMinSpanTree)
## Next it excludes interferograms if a) the average coherence < minCoherence AND b) not in the MST network.
mintpy.network.coherenceBased = auto #[yes / no], auto for no, exclude interferograms with coherence < minCoherence
mintpy.network.minCoherence = auto #[0.0-1.0], auto for 0.7
## b - Effective Coherence Ratio network modification = (threshold + MST) by default
## reference: Kang et al. (2021, RSE)
## It calculates the area ratio of each interferogram that is above a spatial coherence threshold.
## This threshold is defined as the spatial coherence of the interferograms within the input mask.
## It then finds a minimum spanning tree (MST) network with inverse of the area ratio as weight (keepMinSpanTree)
## Next it excludes interferograms if a) the area ratio < minAreaRatio AND b) not in the MST network.
mintpy.network.areaRatioBased = auto #[yes / no], auto for no, exclude interferograms with area ratio < minAreaRatio
mintpy.network.minAreaRatio = auto #[0.0-1.0], auto for 0.75
## Additional common parameters for the 2) data-driven network modification
mintpy.network.keepMinSpanTree = auto #[yes / no], auto for yes, keep interferograms in Min Span Tree network
mintpy.network.maskFile = auto #[file name, no], auto for waterMask.h5 or no [if no waterMask.h5 found]
mintpy.network.aoiYX = auto #[y0:y1,x0:x1 / no], auto for no, area of interest for coherence calculation
mintpy.network.aoiLALO = auto #[S:N,W:E / no], auto for no - use the whole area
########## 3. reference_point
## Reference all interferograms to one common point in space
## auto - randomly select a pixel with coherence > minCoherence
## however, manually specify using prior knowledge of the study area is highly recommended
## with the following guideline (section 4.3 in Yunjun et al., 2019):
## 1) located in a coherence area, to minimize the decorrelation effect.
## 2) not affected by strong atmospheric turbulence, i.e. ionospheric streaks
## 3) close to and with similar elevation as the AOI, to minimize the impact of spatially correlated atmospheric delay
mintpy.reference.yx = auto #[257,151 / auto]
mintpy.reference.lalo = auto #[31.8,130.8 / auto]
mintpy.reference.maskFile = auto #[filename / no], auto for maskConnComp.h5
mintpy.reference.coherenceFile = auto #[filename], auto for avgSpatialCoh.h5
mintpy.reference.minCoherence = auto #[0.0-1.0], auto for 0.85, minimum coherence for auto method
########## quick_overview
## A quick assessment of:
## 1) possible groud deformation
## using the velocity from the traditional interferogram stacking
## reference: Zebker et al. (1997, JGR)
## 2) distribution of phase unwrapping error
## from the number of interferogram triplets with non-zero integer ambiguity of closue phase
## reference: T_int in Yunjun et al. (2019, CAGEO). Related to section 3.2, equation (8-9) and Fig. 3d-e.
########## 4. correct_unwrap_error (optional)
## connected components (mintpy.load.connCompFile) are required for this step.
## SNAPHU (Chem & Zebker,2001) is currently the only unwrapper that provides connected components as far as we know.
## reference: Yunjun et al. (2019, section 3)
## supported methods:
## a. phase_closure - suitable for highly redundant network
## b. bridging - suitable for regions separated by narrow decorrelated features, e.g. rivers, narrow water bodies
## c. bridging+phase_closure - recommended when there is a small percentage of errors left after bridging
mintpy.unwrapError.method = auto #[bridging / phase_closure / bridging+phase_closure / no], auto for no
mintpy.unwrapError.waterMaskFile = auto #[waterMask.h5 / no], auto for waterMask.h5 or no [if not found]
mintpy.unwrapError.connCompMinArea = auto #[1-inf], auto for 2.5e3, discard regions smaller than the min size in pixels
## phase_closure options:
## numSample - a region-based strategy is implemented to speedup L1-norm regularized least squares inversion.
## Instead of inverting every pixel for the integer ambiguity, a common connected component mask is generated,
## for each common conn. comp., numSample pixels are radomly selected for inversion, and the median value of the results
## are used for all pixels within this common conn. comp.
mintpy.unwrapError.numSample = auto #[int>1], auto for 100, number of samples to invert for common conn. comp.
## bridging options:
## ramp - a phase ramp could be estimated based on the largest reliable region, removed from the entire interferogram
## before estimating the phase difference between reliable regions and added back after the correction.
## bridgePtsRadius - half size of the window used to calculate the median value of phase difference
mintpy.unwrapError.ramp = auto #[linear / quadratic], auto for no; recommend linear for L-band data
mintpy.unwrapError.bridgePtsRadius = auto #[1-inf], auto for 50, half size of the window around end points
########## 5. invert_network
## Invert network of interferograms into time-series using weighted least square (WLS) estimator.
## weighting options for least square inversion [fast option available but not best]:
## a. var - use inverse of covariance as weight (Tough et al., 1995; Guarnieri & Tebaldini, 2008) [recommended]
## b. fim - use Fisher Information Matrix as weight (Seymour & Cumming, 1994; Samiei-Esfahany et al., 2016).
## c. coh - use coherence as weight (Perissin & Wang, 2012)
## d. no - uniform weight (Berardino et al., 2002) [fast]
## SBAS (Berardino et al., 2002) = minNormVelocity (yes) + weightFunc (no)
mintpy.networkInversion.weightFunc = auto #[var / fim / coh / no], auto for var
mintpy.networkInversion.waterMaskFile = auto #[filename / no], auto for waterMask.h5 or no [if not found]
mintpy.networkInversion.minNormVelocity = auto #[yes / no], auto for yes, min-norm deformation velocity / phase
## mask options for unwrapPhase of each interferogram before inversion (recommend if weightFunct=no):
## a. coherence - mask out pixels with spatial coherence < maskThreshold
## b. connectComponent - mask out pixels with False/0 value
## c. no - no masking [recommended].
## d. range/azimuthOffsetStd - mask out pixels with offset std. dev. > maskThreshold [for offset]
mintpy.networkInversion.maskDataset = auto #[coherence / connectComponent / rangeOffsetStd / azimuthOffsetStd / no], auto for no
mintpy.networkInversion.maskThreshold = auto #[0-inf], auto for 0.4
mintpy.networkInversion.minRedundancy = auto #[1-inf], auto for 1.0, min num_ifgram for every SAR acquisition
## Temporal coherence is calculated and used to generate the mask as the reliability measure
## reference: Pepe & Lanari (2006, IEEE-TGRS)
mintpy.networkInversion.minTempCoh = auto #[0.0-1.0], auto for 0.7, min temporal coherence for mask
mintpy.networkInversion.minNumPixel = auto #[int > 1], auto for 100, min number of pixels in mask above
mintpy.networkInversion.shadowMask = auto #[yes / no], auto for yes [if shadowMask is in geometry file] or no.
########## correct_LOD
## Local Oscillator Drift (LOD) correction (for Envisat only)
## reference: Marinkovic and Larsen (2013, Proc. LPS)
## automatically applied to Envisat data (identified via PLATFORM attribute)
## and skipped for all the other satellites.
########## correct_SET
## Solid Earth tides (SET) correction [need to install insarlab/PySolid]
## reference: Milbert (2018); Yunjun et al. (2022, IEEE-TGRS)
mintpy.solidEarthTides = auto #[yes / no], auto for no
########## 6. correct_troposphere (optional but recommended)
## correct tropospheric delay using the following methods:
## a. height_correlation - correct stratified tropospheric delay (Doin et al., 2009, J Applied Geop)
## b. pyaps - use Global Atmospheric Models (GAMs) data (Jolivet et al., 2011; 2014)
## ERA5 - ERA5 from ECMWF [need to install PyAPS from GitHub; recommended and turn ON by default]
## MERRA - MERRA-2 from NASA [need to install PyAPS from Caltech/EarthDef]
## NARR - NARR from NOAA [need to install PyAPS from Caltech/EarthDef; recommended for N America]
## c. gacos - use GACOS with the iterative tropospheric decomposition model (Yu et al., 2018, JGR)
## need to manually download GACOS products at http://www.gacos.net for all acquisitions before running this step
mintpy.troposphericDelay.method = auto #[pyaps / height_correlation / gacos / no], auto for pyaps
## Notes for pyaps:
## a. GAM data latency: with the most recent SAR data, there will be GAM data missing, the correction
## will be applied to dates with GAM data available and skipped for the others.
## b. WEATHER_DIR: if you define an environment variable named WEATHER_DIR to contain the path to a
## directory, then MintPy applications will download the GAM files into the indicated directory.
## MintPy application will look for the GAM files in the directory before downloading a new one to
## prevent downloading multiple copies if you work with different dataset that cover the same date/time.
mintpy.troposphericDelay.weatherModel = auto #[ERA5 / MERRA / NARR], auto for ERA5
mintpy.troposphericDelay.weatherDir = auto #[path2directory], auto for WEATHER_DIR or "./"
## Notes for height_correlation:
## Extra multilooking is applied to estimate the empirical phase/elevation ratio ONLY.
## For an dataset with 5 by 15 looks, looks=8 will generate phase with (5*8) by (15*8) looks
## to estimate the empirical parameter; then apply the correction to original phase (with 5 by 15 looks),
## if the phase/elevation correlation is larger than minCorrelation.
mintpy.troposphericDelay.polyOrder = auto #[1 / 2 / 3], auto for 1
mintpy.troposphericDelay.looks = auto #[1-inf], auto for 8, extra multilooking num
mintpy.troposphericDelay.minCorrelation = auto #[0.0-1.0], auto for 0
## Notes for gacos:
## Set the path below to directory that contains the downloaded *.ztd* files
mintpy.troposphericDelay.gacosDir = auto # [path2directory], auto for "./GACOS"
########## 7. deramp (optional)
## Estimate and remove a phase ramp for each acquisition based on the reliable pixels.
## Recommended for localized deformation signals, i.e. volcanic deformation, landslide and land subsidence, etc.
## NOT recommended for long spatial wavelength deformation signals, i.e. co-, post- and inter-seimic deformation.
mintpy.deramp = auto #[no / linear / quadratic], auto for no - no ramp will be removed
mintpy.deramp.maskFile = auto #[filename / no], auto for maskTempCoh.h5, mask file for ramp estimation
########## 8. correct_topography (optional but recommended)
## Topographic residual (DEM error) correction
## reference: Fattahi and Amelung (2013, IEEE-TGRS)
## stepFuncDate - specify stepFuncDate option if you know there are sudden displacement jump in your area,
## e.g. volcanic eruption, or earthquake
## excludeDate - dates excluded for the error estimation
## pixelwiseGeometry - use pixel-wise geometry (incidence angle & slant range distance)
## yes - use pixel-wise geometry if they are available [slow; used by default]
## no - use the mean geometry [fast]
mintpy.topographicResidual = auto #[yes / no], auto for yes
mintpy.topographicResidual.polyOrder = auto #[1-inf], auto for 2, poly order of temporal deformation model
mintpy.topographicResidual.phaseVelocity = auto #[yes / no], auto for no - use phase velocity for minimization
mintpy.topographicResidual.stepFuncDate = auto #[20080529,20190704T1733 / no], auto for no, date of step jump
mintpy.topographicResidual.excludeDate = auto #[20070321 / txtFile / no], auto for exclude_date.txt
mintpy.topographicResidual.pixelwiseGeometry = auto #[yes / no], auto for yes, use pixel-wise geometry info
########## 9.1 residual_RMS (root mean squares for noise evaluation)
## Calculate the Root Mean Square (RMS) of residual phase time-series for each acquisition
## reference: Yunjun et al. (2019, section 4.9 and 5.4)
## To get rid of long spatial wavelength component, a ramp is removed for each acquisition
## Set optimal reference date to date with min RMS
## Set exclude dates (outliers) to dates with RMS > cutoff * median RMS (Median Absolute Deviation)
mintpy.residualRMS.maskFile = auto #[file name / no], auto for maskTempCoh.h5, mask for ramp estimation
mintpy.residualRMS.deramp = auto #[quadratic / linear / no], auto for quadratic
mintpy.residualRMS.cutoff = auto #[0.0-inf], auto for 3
########## 9.2 reference_date
## Reference all time-series to one date in time
## reference: Yunjun et al. (2019, section 4.9)
## no - do not change the default reference date (1st date)
mintpy.reference.date = auto #[reference_date.txt / 20090214 / no], auto for reference_date.txt
########## 10. velocity
## Estimate a suite of time functions [linear velocity by default]
## from final displacement file (and from tropospheric delay file if exists)
mintpy.timeFunc.startDate = auto #[20070101 / no], auto for no
mintpy.timeFunc.endDate = auto #[20101230 / no], auto for no
mintpy.timeFunc.excludeDate = auto #[exclude_date.txt / 20080520,20090817 / no], auto for exclude_date.txt
## Fit a suite of time functions
## reference: Hetland et al. (2012, JGR) equation (2-9)
## polynomial function is defined by its degree in integer. 1 for linear, 2 for quadratic, etc.
## periodic function(s) are defined by a list of periods in decimal years. 1 for annual, 0.5 for semi-annual, etc.
## step function(s) are defined by a list of onset times in str in YYYYMMDD(THHMM) format
## exp & log function(s) are defined by an onset time followed by an charateristic time in integer days.
## Multiple exp and log functions can be overlaied on top of each other, achieved via e.g.:
## 20110311,60,120 - two functions sharing the same onset time OR
## 20110311,60;20170908,120 - separated by ";"
mintpy.timeFunc.polynomial = auto #[int >= 0], auto for 1, degree of the polynomial function
mintpy.timeFunc.periodic = auto #[1,0.5 / list_of_float / no], auto for no, periods in decimal years
mintpy.timeFunc.stepDate = auto #[20110311,20170908 / 20120928T1733 / no], auto for no, step function(s)
mintpy.timeFunc.exp = auto #[20110311,60 / 20110311,60,120 / 20110311,60;20170908,120 / no], auto for no
mintpy.timeFunc.log = auto #[20110311,60 / 20110311,60,120 / 20110311,60;20170908,120 / no], auto for no
## Uncertainty quantification methods:
## a. residue - propagate from fitting residue assuming normal dist. in time (Fattahi & Amelung, 2015, JGR)
## b. covariance - propagate from time series (co)variance matrix
## c. bootstrap - bootstrapping (independently resampling with replacement; Efron & Tibshirani, 1986, Stat. Sci.)
mintpy.timeFunc.uncertaintyQuantification = auto #[residue, covariance, bootstrap], auto for residue
mintpy.timeFunc.timeSeriesCovFile = auto #[filename / no], auto for no, time series covariance file
mintpy.timeFunc.bootstrapCount = auto #[int>1], auto for 400, number of iterations for bootstrapping
########## 11.1 geocode (post-processing)
# for input dataset in radar coordinates only
# commonly used resolution in meters and in degrees (on equator)
# 100, 90, 60, 50, 40, 30, 20, 10
# 0.000925926, 0.000833334, 0.000555556, 0.000462963, 0.000370370, 0.000277778, 0.000185185, 0.000092593
mintpy.geocode = auto #[yes / no], auto for yes
mintpy.geocode.SNWE = auto #[-1.2,0.5,-92,-91 / none ], auto for none, output extent in degree
mintpy.geocode.laloStep = auto #[-0.000555556,0.000555556 / None], auto for None, output resolution in degree
mintpy.geocode.interpMethod = auto #[nearest], auto for nearest, interpolation method
mintpy.geocode.fillValue = auto #[np.nan, 0, ...], auto for np.nan, fill value for outliers.
########## 11.2 google_earth (post-processing)
mintpy.save.kmz = auto #[yes / no], auto for yes, save geocoded velocity to Google Earth KMZ file
########## 11.3 hdfeos5 (post-processing)
mintpy.save.hdfEos5 = auto #[yes / no], auto for no, save time-series to HDF-EOS5 format
mintpy.save.hdfEos5.update = auto #[yes / no], auto for no, put XXXXXXXX as endDate in output filename
mintpy.save.hdfEos5.subset = auto #[yes / no], auto for no, put subset range info in output filename
########## 11.4 plot
# for high-resolution plotting, increase mintpy.plot.maxMemory
# for fast plotting with more parallelization, decrease mintpy.plot.maxMemory
mintpy.plot = auto #[yes / no], auto for yes, plot files generated by default processing to pic folder
mintpy.plot.dpi = auto #[int], auto for 150, number of dots per inch (DPI)
mintpy.plot.maxMemory = auto #[float], auto for 4, max memory used by one call of view.py for plotting.