Skip to content

Commit

Permalink
Merge pull request #9956 from gem/cps
Browse files Browse the repository at this point in the history
Grouping more sources as CollapsedPointSources
  • Loading branch information
micheles authored Sep 14, 2024
2 parents e6eaf66 + e437ecc commit 46e47c1
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 29 deletions.
25 changes: 11 additions & 14 deletions openquake/calculators/preclassical.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from openquake.baselib.general import AccumDict, groupby, block_splitter
from openquake.hazardlib.contexts import read_cmakers
from openquake.hazardlib.geo.surface.multi import build_secparams
from openquake.hazardlib.source.point import grid_point_sources, msr_name
from openquake.hazardlib.source.point import grid_point_sources
from openquake.hazardlib.source.base import get_code2cls
from openquake.hazardlib.sourceconverter import SourceGroup
from openquake.hazardlib.calc.filters import (
Expand Down Expand Up @@ -148,19 +148,16 @@ def preclassical(srcs, sites, cmaker, secparams, monitor):
dic['before'] = len(srcs)
dic['after'] = len(splits)
yield dic
else:
cnt = 0
for msr, block in groupby(splits, msr_name).items():
dic = grid_point_sources(block, spacing, msr, cnt, monitor)
cnt = dic.pop('cnt')
for src in dic[grp_id]:
src.num_ruptures = src.count_ruptures()
# this is also prefiltering the split sources
cmaker.set_weight(dic[grp_id], sf, multiplier, mon)
# print(f'{mon.task_no=}, {mon.duration=}')
dic['before'] = len(block)
dic['after'] = len(dic[grp_id])
yield dic
elif splits:
dic = grid_point_sources(splits, spacing, monitor)
for src in dic[grp_id]:
src.num_ruptures = src.count_ruptures()
# this is also prefiltering the split sources
cmaker.set_weight(dic[grp_id], sf, multiplier, mon)
# print(f'{mon.task_no=}, {mon.duration=}')
dic['before'] = len(splits)
dic['after'] = len(dic[grp_id])
yield dic


def store_tiles(dstore, csm, sitecol, cmakers):
Expand Down
25 changes: 10 additions & 15 deletions openquake/hazardlib/source/point.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,8 @@ def calc_average(pointsources):
rupture_aspect_ratio=[])
rates = []
trt = pointsources[0].tectonic_region_type
msr = msr_name(pointsources[0])
for src in pointsources:
assert src.tectonic_region_type == trt
assert msr_name(src) == msr
rates.append(sum(r for m, r in src.get_annual_occurrence_rates()))
ws, ds = zip(*src.nodal_plane_distribution.data)
strike = numpy.average([np.strike for np in ds], weights=ws)
Expand Down Expand Up @@ -387,13 +385,14 @@ def psources_to_pdata(pointsources, name):
array=array,
tom=ps.temporal_occurrence_model,
trt=ps.tectonic_region_type,
msr=ps.magnitude_scaling_relationship,
rms=ps.rupture_mesh_spacing,
npd=Deduplicate([ps.nodal_plane_distribution
for ps in pointsources]),
hcd=Deduplicate([ps.hypocenter_distribution
for ps in pointsources]),
mfd=Deduplicate([ps.mfd for ps in pointsources]))
mfd=Deduplicate([ps.mfd for ps in pointsources]),
msr=Deduplicate([ps.magnitude_scaling_relationship
for ps in pointsources]))
return pdata


Expand All @@ -417,7 +416,7 @@ def pdata_to_psources(pdata):
tectonic_region_type=trt,
mfd=mfd[i],
rupture_mesh_spacing=rms,
magnitude_scaling_relationship=msr,
magnitude_scaling_relationship=msr[i],
rupture_aspect_ratio=rec['rar'],
upper_seismogenic_depth=rec['usd'],
lower_seismogenic_depth=rec['lsd'],
Expand Down Expand Up @@ -499,29 +498,24 @@ def count_ruptures(self):
for src in pdata_to_psources(self.pdata))


def grid_point_sources(
sources, ps_grid_spacing, msr, cnt=0, monitor=Monitor()):
def grid_point_sources(sources, ps_grid_spacing, monitor=Monitor()):
"""
:param sources:
a list of sources with the same grp_id (point sources and not)
:param ps_grid_spacing:
value of the point source grid spacing in km; if None, do nothing
:param msr:
magnitude scaling relationship as a string
:param cnt:
a counter starting from 0 used to produce distinct source IDs
:returns:
a dict grp_id -> list of non-point sources and collapsed point sources
"""
grp_id = sources[0].grp_id
for src in sources[1:]:
assert src.grp_id == grp_id, (src.grp_id, grp_id)
if not ps_grid_spacing:
return {grp_id: sources, 'cnt': cnt}
return {grp_id: sources}
out = [src for src in sources if not hasattr(src, 'location')]
ps = numpy.array([src for src in sources if hasattr(src, 'location')])
if len(ps) < 2: # nothing to collapse
return {grp_id: out + list(ps), 'cnt': cnt}
return {grp_id: out + list(ps)}
coords = numpy.zeros((len(ps), 3))
for p, psource in enumerate(ps):
coords[p, 0] = psource.location.x
Expand All @@ -530,11 +524,12 @@ def grid_point_sources(
if (len(numpy.unique(coords[:, 0])) == 1 or
len(numpy.unique(coords[:, 1])) == 1):
# degenerated rectangle, there is no grid, do not collapse
return {grp_id: out + list(ps), 'cnt': cnt}
return {grp_id: out + list(ps)}
deltax = angular_distance(ps_grid_spacing, lat=coords[:, 1].mean())
deltay = angular_distance(ps_grid_spacing)
grid = groupby_grid(coords[:, 0], coords[:, 1], deltax, deltay)
task_no = getattr(monitor, 'task_no', 0)
cnt = 0
for idxs in grid.values():
if len(idxs) > 1:
cnt += 1
Expand All @@ -546,7 +541,7 @@ def grid_point_sources(
out.append(cps)
else: # there is a single source
out.append(ps[idxs[0]])
return {grp_id: out, 'cnt': cnt}
return {grp_id: out}


def get_rup_maxlen(src):
Expand Down

0 comments on commit 46e47c1

Please sign in to comment.