Skip to content

Commit

Permalink
Merge pull request #10432 from gem/mmi_tags
Browse files Browse the repository at this point in the history
Changed `mmi_tags` to have only 12 columns
  • Loading branch information
micheles authored Mar 10, 2025
2 parents 4f5679c + 5c37dff commit 4df49c6
Show file tree
Hide file tree
Showing 7 changed files with 78 additions and 43 deletions.
3 changes: 2 additions & 1 deletion debian/changelog
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
* Fixed a bug in the Thingbaijam et al. (2017) MSR

[Michele Simionato]
* Added an input `mmi_shapes_file` for use in OQ Impact
* Added an input `mmi_shapes_file` for use in OQ Impact and then
a JSON output /v1/calc/:ID/mmi_tags

[Christopher Brooks]
* Added Boore and Atkinson (2008) site term to the ModifiableGMPE
Expand Down
16 changes: 15 additions & 1 deletion doc/api-reference/rest-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,21 @@ columns are "ID_1", "loss_type", "value", "lossmea", "lossq05",
GET /v1/calc/:calc_id/mmi_tags
**********************************

FIXME
Get exposure aggregated by MMI regions and tags.

NB: this URL is valid only for ShakeMap based calculations downloading
the MMI regions from the USGS service/

Otherwise it returns a BadRequest error with HTTP code 400.

Parameters: None

Response:

A JSON object corresponding to a pandas DataFrame. The names of the
columns are "ID_1", "number", "contents", "nonstructural", "structural",
"residents", "area", "occupants_day", "occupants_night", "occupants_transit",
"occupants_avg", "mmi".

***********************************
GET /v1/calc/:calc_id/extract/:spec
Expand Down
7 changes: 4 additions & 3 deletions openquake/calculators/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -731,9 +731,10 @@ def pre_execute(self):
self.save_crmodel()
if oq.impact and 'mmi' in oq.inputs:
logging.info('Computing MMI-aggregated values')
if mmi_values := self.assetcol.get_mmi_values(
oq.aggregate_by, oq.inputs['mmi']):
self.datastore['mmi_tags'] = mmi_values
mmi_df = self.assetcol.get_mmi_values(
oq.aggregate_by, oq.inputs['mmi'])
if len(mmi_df):
self.datastore.hdf5.create_df('mmi_tags', mmi_df)

def pre_execute_from_parent(self):
"""
Expand Down
38 changes: 8 additions & 30 deletions openquake/calculators/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -846,6 +846,14 @@ def extract_aggexp_tags(dstore, what):
return _aggexp_tags(dstore)[0]


@extract.add('mmi_tags')
def extract_mmi_tags(dstore, what):
"""
Aggregates exposure by MMI regions and tags. Use it as /extract/mmi_tags?
"""
return dstore.read_df('mmi_tags')


@extract.add('aggrisk_tags')
def extract_aggrisk_tags(dstore, what):
"""
Expand Down Expand Up @@ -893,36 +901,6 @@ def extract_aggrisk_tags(dstore, what):
return df


@extract.add('mmi_tags')
def extract_mmi_tags(dstore, what):
"""
Aggregates mmi by tag. Use it as /extract/mmi_tags?
"""
oq = dstore['oqparam']
if len(oq.aggregate_by) > 1: # i.e. [['ID_0'], ['OCCUPANCY']]
# see impact_test.py
aggby = [','.join(a[0] for a in oq.aggregate_by)]
else: # i.e. [['ID_0', 'OCCUPANCY']]
# see event_based_risk_test/case_1
[aggby] = oq.aggregate_by
keys = numpy.array([line.decode('utf8').split('\t')
for line in dstore['agg_keys'][:]])
values = dstore['mmi_tags']
acc = general.AccumDict(accum=[])
K = len(keys)
ok = numpy.zeros(K, bool)
for agg_id in range(K):
for agg_key, key in zip(aggby, keys[agg_id]):
acc[agg_key].append(key)
for mmi in list(values):
array = values[mmi][agg_id] # structured array with loss types
for lt in array.dtype.names:
acc[f'{lt}_{mmi}'].append(array[lt])
ok[agg_id] += array[lt]
df = pandas.DataFrame(acc)[ok]
return df


@extract.add('agg_losses')
def extract_agg_losses(dstore, what):
"""
Expand Down
27 changes: 19 additions & 8 deletions openquake/risklib/asset.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,17 +48,16 @@
'business_interruption'}


def to_mmi(value, MMIs=('I', 'II', 'III', 'IV', 'V', 'VI', 'VII',
'VIII', 'IX', 'X')):
def to_mmi(value):
"""
:param value: float in the range 1..10
:returns: string "I" .. "X" representing a MMI
:returns: an MMI value in the range 1..10
"""
if value >= 10.5:
raise ValueError(f'{value} is too large to be an MMI')
elif value < 0.5:
raise ValueError(f'{value} is too small to be an MMI')
return MMIs[round(value) - 1]
return round(value) - 1


def add_dupl_fields(df, oqfields):
Expand Down Expand Up @@ -495,7 +494,7 @@ def get_mmi_values(self, aggregate_by, mmi_file):
:param mmi_file:
shapefile containing MMI geometries and values
:returns:
a dictionary MMI -> array with the value fields
a DataFrame with columns number, structural, ..., mmi
"""
out = {}
with fiona.open(f'zip://{mmi_file}!mi.shp') as f:
Expand All @@ -506,11 +505,23 @@ def get_mmi_values(self, aggregate_by, mmi_file):
values = self.get_agg_values(aggregate_by, geom)
if values['number'].any():
if mmi not in out:
out[mmi] = values
out[mmi] = values[:-1] # discard total
else:
for lt in values.dtype.names:
out[mmi][lt] += values[lt]
return out
out[mmi][lt] += values[lt][:-1]
_aggids, aggtags = self.build_aggids(aggregate_by)
aggtags = numpy.array(aggtags) # shape (K+1, T)
dfs = []
for mmi in out:
dic = {key: aggtags[:, k] for k, key in enumerate(aggregate_by[0])}
dic.update({col: out[mmi][col] for col in out[mmi].dtype.names})
df = pandas.DataFrame(dic)
df['mmi'] = mmi
dfs.append(df)
if not dfs:
return ()
df = pandas.concat(dfs)
return df[df.number > 0]

# not used yet
def agg_by_site(self):
Expand Down
1 change: 1 addition & 0 deletions openquake/server/v1/calc_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
re_path(r'^(\d+)/log/(\d*):(\d*)$', views.calc_log, name="log"),
re_path(r'^result/(\d+)$', views.calc_result),
re_path(r'^(\d+)/aggrisk_tags$', views.aggrisk_tags),
re_path(r'^(\d+)/mmi_tags$', views.mmi_tags),
re_path(r'^(\d+)/result/list$', views.calc_results),
re_path(r'^(\d+)/share$', views.calc_share),
re_path(r'^(\d+)/unshare$', views.calc_unshare),
Expand Down
29 changes: 29 additions & 0 deletions openquake/server/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1197,7 +1197,36 @@ def aggrisk_tags(request, calc_id):
content='%s: %s in %s\n%s' %
(exc.__class__.__name__, exc, 'aggrisk_tags', tb),
content_type='text/plain', status=400)
return HttpResponse(content=df.to_json(), content_type=JSON, status=200)


@cross_domain_ajax
@require_http_methods(['GET', 'HEAD'])
def mmi_tags(request, calc_id):
"""
Return mmi_tags, by ``calc_id``, as JSON.
:param request:
`django.http.HttpRequest` object.
:param calc_id:
The id of the requested calculation.
:returns:
a JSON object as documented in rest-api.rst
"""
job = logs.dbcmd('get_job', int(calc_id))
if job is None:
return HttpResponseNotFound()
if not utils.user_has_permission(request, job.user_name, job.status):
return HttpResponseForbidden()
try:
with datastore.read(job.ds_calc_dir + '.hdf5') as ds:
df = _extract(ds, 'mmi_tags')
except Exception as exc:
tb = ''.join(traceback.format_tb(exc.__traceback__))
return HttpResponse(
content='%s: %s in %s\n%s' %
(exc.__class__.__name__, exc, 'mmi_tags', tb),
content_type='text/plain', status=400)
return HttpResponse(content=df.to_json(), content_type=JSON, status=200)


Expand Down

0 comments on commit 4df49c6

Please sign in to comment.