Skip to content

Commit

Permalink
Refactor to allow ruff to identify bare excepts
Browse files Browse the repository at this point in the history
Replaced `except Exception` and `except BaseException` with just `except`. All are functionally equivalent, but ruff sadly only picks up on bare `except`, so this change was necessary to allow these occurrences to be identified by ruff in the future.
Unfortunately, `except Exception as e` is still not picked up by ruff.
  • Loading branch information
garlic-os committed Aug 7, 2024
1 parent f8fd10d commit ee1e34b
Show file tree
Hide file tree
Showing 9 changed files with 13 additions and 13 deletions.
6 changes: 3 additions & 3 deletions tools/RAiDER/cli/statsPlot.py
Original file line number Diff line number Diff line change
Expand Up @@ -647,7 +647,7 @@ def _binned_vario(self, hEff, rawVario, xBin=None):
warnings.filterwarnings('ignore', message='Mean of empty slice')
hExp.append(np.nanmean(hEff[iBinMask]))
expVario.append(np.nanmean(rawVario[iBinMask]))
except BaseException: # TODO: Which error(s)?
except: # TODO: Which error(s)?
pass

if False in ~np.isnan(hExp):
Expand Down Expand Up @@ -1391,7 +1391,7 @@ def _reader(self):
data = pd.read_csv(self.fname, parse_dates=['Datetime'])
data['Date'] = data['Datetime'].apply(lambda x: x.date())
data['Date'] = data['Date'].apply(lambda x: dt.datetime.strptime(x.strftime('%Y-%m-%d'), '%Y-%m-%d'))
except BaseException:
except:
data = pd.read_csv(self.fname, parse_dates=['Date'])

# check if user-specified key is valid
Expand Down Expand Up @@ -1511,7 +1511,7 @@ def create_DF(self) -> None:
if self.bbox is not None:
try:
self.bbox = [float(val) for val in self.bbox.split()]
except BaseException:
except:
raise Exception(
'Cannot understand the --bounding_box argument. String input is incorrect or path does not exist.'
)
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/getStationDelays.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def get_delays_UNR(stationFile, filename, dateList, returnTime=None) -> None:
trotot, trototSD, trwet, tgetot, tgetotSD, tgntot, tgntotSD, wvapor, wvaporSD, mtemp = (
float(t) for t in split_lines[2:]
)
except BaseException: # TODO: What error(s)?
except: # TODO: What error(s)?
continue
site = split_lines[0]
year, doy, seconds = (int(n) for n in split_lines[1].split(':'))
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/gnss/processDelayFiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ def combineDelayFiles(outName, loc=os.getcwd(), source='model', ext='.csv', ref=
print(f'Combining {source} delay files')
try:
concatDelayFiles(files, sort_list=['ID', 'Datetime'], outName=outName, source=source)
except BaseException:
concatDelayFiles(files, sort_list=['ID', 'Date'], outName=outName, source=source, ref=ref, col_name=col_name)
except:


def addDateTimeToFiles(fileList, force=False, verbose=False) -> None:
Expand Down
6 changes: 3 additions & 3 deletions tools/RAiDER/losreader.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def getLookVectors(self, ht, llh, xyz, yy):
)
sat_xyz, _ = self._orbit.interpolate(aztime)
los[ii, jj, :] = (sat_xyz - inp_xyz) / slant_range
except Exception:
except:
los[ii, jj, :] = np.nan
return los

Expand Down Expand Up @@ -356,10 +356,10 @@ def filter_ESA_orbit_file_p(path: str) -> bool:
for orb_path in los_files:
svs.extend(read_ESA_Orbit_file(orb_path))

except BaseException:
except:
try:
svs = read_shelve(los_file)
except BaseException:
except:
raise ValueError(f'get_sv: I cannot parse the statevector file {los_file}')
except:
raise ValueError(f'get_sv: I cannot parse the statevector file {los_file}')
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/models/ecmwf.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def _get_from_cds(self, lat_min, lat_max, lon_min, lon_max, acqTime, outname) ->

try:
c.retrieve('reanalysis-era5-complete', dataDict, outname)
except Exception:
except:
raise Exception

def _download_ecmwf(self, lat_min, lat_max, lat_step, lon_min, lon_max, lon_step, time, out) -> None:
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/models/gmao.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def _fetch(self, out) -> None:
try:
# Note that lat/lon gets written twice for GMAO because they are the same as y/x
writeWeatherVarsXarray(lats, lons, h, q, p, t, dt, crs, outName=None, NoDataValue=None, chunk=(1, 91, 144))
except Exception:
except:
logger.exception('Unable to save weathermodel to file')

def load_weather(self, f=None) -> None:
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/models/ncmr.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def _download_ncmr_file(self, out, date_time, bounding_box) -> None:

try:
writeWeatherVarsXarray(lats, lons, hgt, q, p, t, self._time, self._proj, outName=out)
except Exception:
except:
logger.exception('Unable to save weathermodel to file')

def _makeDataCubes(self, filename) -> None:
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/models/weatherModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def _uniform_in_z(self, _zlevels=None) -> None:
if _zlevels is None:
try:
_zlevels = self._zlevels
except BaseException:
except:
_zlevels = np.nanmean(self._zs, axis=(0, 1))

new_zs = np.tile(_zlevels, (nx, ny, 1))
Expand Down
2 changes: 1 addition & 1 deletion tools/RAiDER/processWM.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,5 +163,5 @@ def _weather_model_debug(los, lats, lons, ll_bounds, weather_model, wmLoc, time,
)
try:
weather_model.write2NETCDF4(weather_model_file)
except Exception:
except:
logger.exception('Unable to save weathermodel to file')

0 comments on commit ee1e34b

Please sign in to comment.