Skip to content

Commit

Permalink
Merge pull request #10158 from gem/multi_peril
Browse files Browse the repository at this point in the history
Extended damages-rlzs to multiple perils
  • Loading branch information
micheles authored Nov 18, 2024
2 parents 84d2e50 + 3c4aa5f commit b75b934
Show file tree
Hide file tree
Showing 59 changed files with 293 additions and 284 deletions.
10 changes: 10 additions & 0 deletions openquake/baselib/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -1761,6 +1761,16 @@ def around(vec, value, delta):
return (vec <= value + delta) & (vec >= value - delta)


def sum_records(array):
"""
:returns: the sums of the composite array
"""
res = numpy.zeros(1, array.dtype)
for name in array.dtype.names:
res[name] = array[name].sum(axis=0)
return res


def compose_arrays(**kwarrays):
"""
Compose multiple 1D and 2D arrays into a single composite array.
Expand Down
10 changes: 4 additions & 6 deletions openquake/calculators/classical_damage.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,13 +73,11 @@ def post_execute(self, result):
a dictionary asset_ordinal -> array(R, D)
"""
D = len(self.crmodel.damage_states)
damages = numpy.zeros((self.A, self.R, self.L, D), numpy.float32)
damages = numpy.zeros((1, self.A, self.R, self.L, D), numpy.float32)
for a in result:
damages[a] = result[a]
self.datastore['damages-rlzs'] = damages
damages[0, a] = result[a]
self.datastore['damages-rlzs'] = self.crmodel.to_multi_damage(damages)
stats.set_rlzs_stats(self.datastore, 'damages-rlzs',
assets=self.assetcol['id'],
loss_type=self.oqparam.loss_types,
dmg_state=self.crmodel.damage_states)
assets=self.assetcol['id'])
dmg = views.view('portfolio_damage', self.datastore)
logging.info('\n' + views.text_table(dmg, ext='org'))
47 changes: 28 additions & 19 deletions openquake/calculators/event_based_damage.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ def zero_dmgcsq(A, R, L, crmodel):
"""
dmg_csq = crmodel.get_dmg_csq()
Dc = len(dmg_csq) + 1 # damages + consequences
return numpy.zeros((A, R, L, Dc), F32)
P = len(crmodel.perils)
return numpy.zeros((P, A, R, L, Dc), F32)


def damage_from_gmfs(gmfslices, oqparam, dstore, monitor):
Expand Down Expand Up @@ -83,7 +84,7 @@ def event_based_damage(df, oq, dstore, monitor):
crmodel = monitor.read('crmodel')
aggids = monitor.read('aggids')
dmgcsq = zero_dmgcsq(len(assetcol), oq.R, oq.L, crmodel)
_A, R, L, Dc = dmgcsq.shape
P, _A, R, L, Dc = dmgcsq.shape
D = len(crmodel.damage_states)
rlzs = dstore['events']['rlz_id']
dddict = general.AccumDict(accum=numpy.zeros((L, Dc), F32)) # eid, kid
Expand All @@ -93,21 +94,32 @@ def event_based_damage(df, oq, dstore, monitor):
if len(gmf_df) == 0:
continue
eids = gmf_df.eid.to_numpy()
E = len(eids)
if not oq.float_dmg_dist:
rng = scientific.MultiEventRNG(
oq.master_seed, numpy.unique(eids))
else:
rng = None
for taxo, adf in asset_df.groupby('taxonomy'):
aids = adf.index.to_numpy()
A = len(aids)
with mon:
rc = scientific.RiskComputer(crmodel, taxo)
dd4 = rc.get_dd4(adf, gmf_df, rng, Dc-D, crmodel) # (A, E, L, Dc)
dd5 = rc.get_dd5(adf, gmf_df, rng, Dc-D, crmodel) # (A, E, L, Dc)
if R == 1: # possibly because of collect_rlzs
dmgcsq[aids, 0] += dd4.sum(axis=1)
dmgcsq[:, aids, 0] += dd5.sum(axis=2)
else:
for e, rlz in enumerate(rlzs[eids]):
dmgcsq[aids, rlz] += dd4[:, e]
dmgcsq[:, aids, rlz] += dd5[:, :, e]
if P > 1:
dd4 = numpy.empty(dd5.shape[1:])
for li in range(L):
for a in range(A):
for e in range(E):
dd4[a, e, li, :D] = scientific.compose_dds(dd5[:, a, e, li, :D])
dd4[a, e, li, D:] = dd5[:, a, e, li, D:].max(axis=0)
else:
dd4 = dd5[0]
tot = dd4.sum(axis=0) # (E, L, Dc)
for e, eid in enumerate(eids):
dddict[eid, oq.K] += tot[e]
Expand Down Expand Up @@ -204,7 +216,7 @@ def post_execute(self, dummy):
"""
oq = self.oqparam
# no damage check, perhaps the sites where disjoint from gmf_data
if self.dmgcsq[:, :, :, 1:].sum() == 0:
if self.dmgcsq[:, :, :, :, 1:].sum() == 0:
haz_sids = self.datastore['gmf_data/sid'][:]
count = numpy.isin(haz_sids, self.sitecol.sids).sum()
if count == 0:
Expand All @@ -222,24 +234,21 @@ def post_execute(self, dummy):
with prc.datastore:
prc.run(exports='')

_A, _R, L, _Dc = self.dmgcsq.shape
P, _A, _R, L, _Dc = self.dmgcsq.shape
D = len(self.crmodel.damage_states)
# fix no_damage distribution for events with zero damage
number = self.assetcol['value-number']
for r in range(self.R):
self.dmgcsq[:, r] /= prc.num_events[r]
ndamaged = self.dmgcsq[:, r, :, 1:D].sum(axis=2) # shape (A, L)
for li in range(L):
# set no_damage
self.dmgcsq[:, r, li, 0] = number - ndamaged[:, li]
for p in range(P):
for r in range(self.R):
self.dmgcsq[p, :, r] /= prc.num_events[r]
ndamaged = self.dmgcsq[p, :, r, :, 1:D].sum(axis=2) # shape (A, L)
for li in range(L):
# set no_damage
self.dmgcsq[p, :, r, li, 0] = number - ndamaged[:, li]

assert (self.dmgcsq >= 0).all() # sanity check
self.datastore['damages-rlzs'] = self.dmgcsq
set_rlzs_stats(self.datastore,
'damages-rlzs',
asset_id=self.assetcol['id'],
loss_type=oq.loss_types,
dmg_state=['no_damage'] + self.crmodel.get_dmg_csq())
self.datastore['damages-rlzs'] = self.crmodel.to_multi_damage(self.dmgcsq)
set_rlzs_stats(self.datastore, 'damages-rlzs', asset_id=self.assetcol['id'])

if oq.infrastructure_connectivity_analysis:
logging.info('Running connectivity analysis')
Expand Down
58 changes: 31 additions & 27 deletions openquake/calculators/export/risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@
from openquake.hazardlib import nrml
from openquake.hazardlib.stats import compute_stats2
from openquake.risklib import scientific
from openquake.calculators.extract import (
extract, build_damage_dt, build_csq_dt, build_damage_array, sanitize,
avglosses)
from openquake.calculators.extract import extract, sanitize, avglosses
from openquake.calculators import post_risk
from openquake.calculators.export import export, loss_curves
from openquake.calculators.export.hazard import savez
Expand Down Expand Up @@ -378,34 +376,41 @@ def export_loss_maps_npz(ekey, dstore):
return [fname]


def modal_damage_array(data, damage_dt):
def modal_damage_array(data, dstates):
# determine the damage state with the highest probability
A, _L, _D = data.shape
dmgstate = damage_dt['structural'].names
arr = numpy.zeros(A, [('modal-ds-' + lt, hdf5.vstr)
for lt in damage_dt.names])
for li, loss_type in enumerate(damage_dt.names):
arr['modal-ds-' + loss_type] = [dmgstate[data[a, li].argmax()]
for a in range(A)]
acc = general.AccumDict(accum=[])
for name in data.dtype.names: # peril-ltype-dstate
try:
peril, ltype, _dstate = name.split('-')
modal = f'modal-ds-{peril}~{ltype}'
except ValueError:
ltype, _dstate = name.split('-')
modal = 'modal-ds-' + ltype
if ltype != 'no_damage':
acc[modal].append(data[name])
acc = {k: numpy.array(acc[k]).argmax(axis=0) for k in acc}
arr = numpy.zeros(len(data), [(key, object) for key in acc])
for key in acc:
arr[key] = dstates[acc[key]]
return arr


# used by event_based_damage, scenario_damage, classical_damage
@export.add(('damages-rlzs', 'csv'), ('damages-stats', 'csv'))
def export_damages_csv(ekey, dstore):
oq = dstore['oqparam']
dmgstates = numpy.concatenate(
[['no_damage'], dstore.getitem('crm').attrs['limit_states']])
ebd = oq.calculation_mode == 'event_based_damage'
rlzs = dstore['full_lt'].get_realizations()
orig = dstore[ekey[0]][:] # shape (L, A, R, D)
dmg_dt = build_damage_dt(dstore)
orig = dstore[ekey[0]][:] # shape (A, R, L, D, P)
writer = writers.CsvWriter(fmt='%.6E')
assets = get_assets(dstore)
md = dstore.metadata
if oq.investigation_time:
rit = oq.risk_investigation_time or oq.investigation_time
md.update(dict(investigation_time=oq.investigation_time,
risk_investigation_time=rit))
D = len(oq.limit_states) + 1
R = 1 if oq.collect_rlzs else len(rlzs)
if ekey[0].endswith('stats'):
rlzs_or_stats = oq.hazard_stats()
Expand All @@ -414,27 +419,26 @@ def export_damages_csv(ekey, dstore):
name = ekey[0].split('-')[0]
if oq.calculation_mode != 'classical_damage':
name = 'avg_' + name
csqs = tuple(dstore.getitem('crm').attrs['consequences'])
for i, ros in enumerate(rlzs_or_stats):
if ebd: # export only the consequences from damages-rlzs, i == 0
rate = len(dstore['events']) * oq.time_ratio / len(rlzs)
data = orig[:, i] * rate
A, _L, Dc = data.shape
if Dc == D: # no consequences, export nothing
if len(csqs) == 0: # no consequences, export nothing
return []
csq_dt = build_csq_dt(dstore)
damages = numpy.zeros(A, [(lt, csq_dt) for lt in oq.loss_types])
for a in range(A):
for li, lt in enumerate(oq.loss_types):
damages[lt][a] = tuple(data[a, li, D:Dc])
rate = len(dstore['events']) * oq.time_ratio / len(rlzs)
data = orig[:, i]
dtlist = [(col, F32) for col in data.dtype.names if col.endswith(csqs)]
damages = numpy.zeros(len(data), dtlist)
for csq, _ in dtlist:
damages[csq] = data[csq] * rate
fname = dstore.build_fname('avg_risk', ros, ekey[1])
else: # scenario_damage, classical_damage
if oq.modal_damage_state:
damages = modal_damage_array(orig[:, i], dmg_dt)
damages = modal_damage_array(orig[:, i], dmgstates)
else:
damages = build_damage_array(orig[:, i], dmg_dt)
damages = orig[:, i]
fname = dstore.build_fname(name, ros, ekey[1])
writer.save(compose_arrays(assets, damages), fname,
comment=md, renamedict=dict(id='asset_id'))
arr = compose_arrays(assets, damages)
writer.save(arr, fname, comment=md, renamedict=dict(id='asset_id'))
return writer.getsaved()


Expand Down
56 changes: 22 additions & 34 deletions openquake/calculators/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -841,6 +841,21 @@ def extract_agg_losses(dstore, what):
return _filter_agg(dstore['assetcol'], losses, tags, stats)


# TODO: extend to multiple perils
def _dmg_get(array, loss_type):
# array of shape (A, R)
out = []
for name in array.dtype.names:
try:
ltype, _dstate = name.split('-')
except ValueError:
# ignore secondary perils
continue
if ltype == loss_type:
out.append(array[name])
return numpy.array(out).transpose(1, 2, 0) # shape (A, R, Dc)


@extract.add('agg_damages')
def extract_agg_damages(dstore, what):
"""
Expand All @@ -859,9 +874,7 @@ def extract_agg_damages(dstore, what):
loss_type = what
tags = []
if 'damages-rlzs' in dstore:
oq = dstore['oqparam']
li = oq.lti[loss_type]
damages = dstore['damages-rlzs'][:, :, li]
damages = _dmg_get(dstore['damages-rlzs'][:], loss_type)
else:
raise KeyError('No damages found in %s' % dstore)
return _filter_agg(dstore['assetcol'], damages, tags)
Expand Down Expand Up @@ -1045,53 +1058,28 @@ def build_damage_dt(dstore):
"""
oq = dstore['oqparam']
attrs = json.loads(dstore.get_attr('damages-rlzs', 'json'))
perils = attrs['peril']
limit_states = list(dstore.get_attr('crm', 'limit_states'))
csqs = attrs['dmg_state'][len(limit_states) + 1:] # consequences
dt_list = [(ds, F32) for ds in ['no_damage'] + limit_states + csqs]
dt_list = []
for peril in perils:
for ds in ['no_damage'] + limit_states + csqs:
dt_list.append((ds if peril == 'earthquake' else f'{peril}_{ds}', F32))
damage_dt = numpy.dtype(dt_list)
loss_types = oq.loss_dt().names
return numpy.dtype([(lt, damage_dt) for lt in loss_types])


def build_csq_dt(dstore):
"""
:param dstore: a datastore instance
:returns:
a composite dtype (csq1, csq2, ...)
"""
attrs = json.loads(dstore.get_attr('damages-rlzs', 'json'))
limit_states = list(dstore.get_attr('crm', 'limit_states'))
csqs = attrs['dmg_state'][len(limit_states) + 1:] # consequences
dt = numpy.dtype([(csq, F32) for csq in csqs])
return dt


def build_damage_array(data, damage_dt):
"""
:param data: an array of shape (A, L, D)
:param damage_dt: a damage composite data type loss_type -> states
:returns: a composite array of length N and dtype damage_dt
"""
A, _L, _D = data.shape
dmg = numpy.zeros(A, damage_dt)
for a in range(A):
for li, lt in enumerate(damage_dt.names):
dmg[lt][a] = tuple(data[a, li])
return dmg


@extract.add('damages-rlzs')
def extract_damages_npz(dstore, what):
oq = dstore['oqparam']
damage_dt = build_damage_dt(dstore)
R = dstore['full_lt'].get_num_paths()
if oq.collect_rlzs:
R = 1
data = dstore['damages-rlzs']
assets = util.get_assets(dstore)
for r in range(R):
damages = build_damage_array(data[:, r], damage_dt)
yield 'rlz-%03d' % r, util.compose_arrays(assets, damages)
yield 'rlz-%03d' % r, util.compose_arrays(assets, data[:, r])


# tested on oq-risk-tests event_based/etna
Expand Down
4 changes: 2 additions & 2 deletions openquake/calculators/multi_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ def get_dmg_csq(crm, assets_by_site, gmf, time_event):
number = assets['value-number']
for a, o in enumerate(assets['ordinal']):
for li in range(L):
out[o, li, 0, :D] = number[a] * dd5[0, a, 0, li]
out[o, li, 0, [D]] = csq['losses', li][a]
out[o, li, :, :D] = number[a] * dd5[:, a, 0, li]
out[o, li, :, [D]] = csq['losses', li][:, a]
return out


Expand Down
16 changes: 5 additions & 11 deletions openquake/calculators/tests/scenario_damage_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,10 @@ def test_case_1(self):
# test agg_damages, 1 realization x 3 damage states
# checking that passing a fake loss type works,
# for compatibility with the past
[dmg] = extract(self.calc.datastore,
'agg_damages/structural?taxonomy=RC&CRESTA=01.1')
aac([1482., 489., 29.], dmg, atol=1E-4)
dmg = extract(self.calc.datastore,
'agg_damages/structural?taxonomy=RC&CRESTA=01.1')
aac([[1482., 489., 29.]], dmg, atol=1E-4)

# test no intersection
dmg = extract(self.calc.datastore, 'agg_damages/structural?taxonomy=RM&CRESTA=01.1')
self.assertEqual(dmg.shape, ())
Expand All @@ -80,7 +81,7 @@ def test_case_1c(self):
[fname] = export(('damages-rlzs', 'csv'), self.calc.datastore)
self.assertEqualFiles('expected/' + strip_calc_id(fname), fname)
df = self.calc.datastore.read_df('damages-rlzs', 'asset_id')
self.assertEqual(list(df.columns), ['rlz', 'loss_type', 'dmg_state', 'value'])
self.assertEqual(list(df.columns), ['rlz', 'value'])

# check risk_by_event
[fname] = export(('risk_by_event', 'csv'), self.calc.datastore)
Expand Down Expand Up @@ -114,13 +115,6 @@ def test_case_4b(self):
self.assertEqualFiles('expected/' + strip_calc_id(fname), fname,
delta=5E-4)

return # TODO: fix avg_losses
fnames = export(('avg_losses-rlzs', 'csv'), self.calc.datastore)
self.assertEqual(len(fnames), 2) # one per realization
for fname in fnames:
self.assertEqualFiles('expected/' + strip_calc_id(fname), fname,
delta=2E-4)

def test_wrong_gsim_lt(self):
with self.assertRaises(InvalidFile) as ctx:
self.run_calc(os.path.dirname(case_4b.__file__), 'job_err.ini')
Expand Down
Loading

0 comments on commit b75b934

Please sign in to comment.