From b7c1c18b7ff9cf08dc48b61390c6cc65eb83cf4a Mon Sep 17 00:00:00 2001 From: Michele Simionato Date: Mon, 10 Mar 2025 06:52:11 +0100 Subject: [PATCH 1/2] Changed mmi_tags to have only 12 columns --- openquake/calculators/base.py | 7 +++--- openquake/calculators/extract.py | 38 +++++++------------------------- openquake/risklib/asset.py | 25 ++++++++++++++------- 3 files changed, 29 insertions(+), 41 deletions(-) diff --git a/openquake/calculators/base.py b/openquake/calculators/base.py index 267aadcd8c8..f3e331085e7 100644 --- a/openquake/calculators/base.py +++ b/openquake/calculators/base.py @@ -731,9 +731,10 @@ def pre_execute(self): self.save_crmodel() if oq.impact and 'mmi' in oq.inputs: logging.info('Computing MMI-aggregated values') - if mmi_values := self.assetcol.get_mmi_values( - oq.aggregate_by, oq.inputs['mmi']): - self.datastore['mmi_tags'] = mmi_values + mmi_df = self.assetcol.get_mmi_values( + oq.aggregate_by, oq.inputs['mmi']) + if len(mmi_df): + self.datastore.hdf5.create_df('mmi_tags', mmi_df) def pre_execute_from_parent(self): """ diff --git a/openquake/calculators/extract.py b/openquake/calculators/extract.py index dc9a4f7ea55..0df048db3b9 100644 --- a/openquake/calculators/extract.py +++ b/openquake/calculators/extract.py @@ -846,6 +846,14 @@ def extract_aggexp_tags(dstore, what): return _aggexp_tags(dstore)[0] +@extract.add('mmi_tags') +def extract_mmi_tags(dstore, what): + """ + Aggregates exposure by MMI regions and tags. Use it as /extract/mmi_tags? + """ + return dstore.read_df('mmi_tags') + + @extract.add('aggrisk_tags') def extract_aggrisk_tags(dstore, what): """ @@ -893,36 +901,6 @@ def extract_aggrisk_tags(dstore, what): return df -@extract.add('mmi_tags') -def extract_mmi_tags(dstore, what): - """ - Aggregates mmi by tag. Use it as /extract/mmi_tags? - """ - oq = dstore['oqparam'] - if len(oq.aggregate_by) > 1: # i.e. [['ID_0'], ['OCCUPANCY']] - # see impact_test.py - aggby = [','.join(a[0] for a in oq.aggregate_by)] - else: # i.e. [['ID_0', 'OCCUPANCY']] - # see event_based_risk_test/case_1 - [aggby] = oq.aggregate_by - keys = numpy.array([line.decode('utf8').split('\t') - for line in dstore['agg_keys'][:]]) - values = dstore['mmi_tags'] - acc = general.AccumDict(accum=[]) - K = len(keys) - ok = numpy.zeros(K, bool) - for agg_id in range(K): - for agg_key, key in zip(aggby, keys[agg_id]): - acc[agg_key].append(key) - for mmi in list(values): - array = values[mmi][agg_id] # structured array with loss types - for lt in array.dtype.names: - acc[f'{lt}_{mmi}'].append(array[lt]) - ok[agg_id] += array[lt] - df = pandas.DataFrame(acc)[ok] - return df - - @extract.add('agg_losses') def extract_agg_losses(dstore, what): """ diff --git a/openquake/risklib/asset.py b/openquake/risklib/asset.py index 6117913cafc..12f3f9a5560 100644 --- a/openquake/risklib/asset.py +++ b/openquake/risklib/asset.py @@ -48,17 +48,16 @@ 'business_interruption'} -def to_mmi(value, MMIs=('I', 'II', 'III', 'IV', 'V', 'VI', 'VII', - 'VIII', 'IX', 'X')): +def to_mmi(value): """ :param value: float in the range 1..10 - :returns: string "I" .. "X" representing a MMI + :returns: an MMI value in the range 1..10 """ if value >= 10.5: raise ValueError(f'{value} is too large to be an MMI') elif value < 0.5: raise ValueError(f'{value} is too small to be an MMI') - return MMIs[round(value) - 1] + return round(value) - 1 def add_dupl_fields(df, oqfields): @@ -495,7 +494,7 @@ def get_mmi_values(self, aggregate_by, mmi_file): :param mmi_file: shapefile containing MMI geometries and values :returns: - a dictionary MMI -> array with the value fields + a DataFrame with columns number, structural, ..., mmi """ out = {} with fiona.open(f'zip://{mmi_file}!mi.shp') as f: @@ -506,11 +505,21 @@ def get_mmi_values(self, aggregate_by, mmi_file): values = self.get_agg_values(aggregate_by, geom) if values['number'].any(): if mmi not in out: - out[mmi] = values + out[mmi] = values[:-1] # discard total else: for lt in values.dtype.names: - out[mmi][lt] += values[lt] - return out + out[mmi][lt] += values[lt][:-1] + _aggids, aggtags = self.build_aggids(aggregate_by) + aggtags = numpy.array(aggtags) # shape (K+1, T) + dfs = [] + for mmi in out: + dic = {key: aggtags[:, k] for k, key in enumerate(aggregate_by[0])} + dic.update({col: out[mmi][col] for col in out[mmi].dtype.names}) + df = pandas.DataFrame(dic) + df['mmi'] = mmi + dfs.append(df) + df = pandas.concat(dfs) + return df[df.number > 0] # not used yet def agg_by_site(self): From 5c37dff58372ab6778f9904cde2481fd1cf12129 Mon Sep 17 00:00:00 2001 From: Michele Simionato Date: Mon, 10 Mar 2025 07:12:41 +0100 Subject: [PATCH 2/2] Added JSON output mmi_tags --- debian/changelog | 3 ++- doc/api-reference/rest-api.rst | 16 +++++++++++++++- openquake/risklib/asset.py | 2 ++ openquake/server/v1/calc_urls.py | 1 + openquake/server/views.py | 29 +++++++++++++++++++++++++++++ 5 files changed, 49 insertions(+), 2 deletions(-) diff --git a/debian/changelog b/debian/changelog index 7d54af98f3c..e98afdeab51 100644 --- a/debian/changelog +++ b/debian/changelog @@ -12,7 +12,8 @@ * Fixed a bug in the Thingbaijam et al. (2017) MSR [Michele Simionato] - * Added an input `mmi_shapes_file` for use in OQ Impact + * Added an input `mmi_shapes_file` for use in OQ Impact and then + a JSON output /v1/calc/:ID/mmi_tags [Christopher Brooks] * Added Boore and Atkinson (2008) site term to the ModifiableGMPE diff --git a/doc/api-reference/rest-api.rst b/doc/api-reference/rest-api.rst index 2631e99556c..400300f03b2 100644 --- a/doc/api-reference/rest-api.rst +++ b/doc/api-reference/rest-api.rst @@ -135,7 +135,21 @@ columns are "ID_1", "loss_type", "value", "lossmea", "lossq05", GET /v1/calc/:calc_id/mmi_tags ********************************** -FIXME +Get exposure aggregated by MMI regions and tags. + +NB: this URL is valid only for ShakeMap based calculations downloading +the MMI regions from the USGS service/ + +Otherwise it returns a BadRequest error with HTTP code 400. + +Parameters: None + +Response: + +A JSON object corresponding to a pandas DataFrame. The names of the +columns are "ID_1", "number", "contents", "nonstructural", "structural", +"residents", "area", "occupants_day", "occupants_night", "occupants_transit", +"occupants_avg", "mmi". *********************************** GET /v1/calc/:calc_id/extract/:spec diff --git a/openquake/risklib/asset.py b/openquake/risklib/asset.py index 12f3f9a5560..fd5810791e4 100644 --- a/openquake/risklib/asset.py +++ b/openquake/risklib/asset.py @@ -518,6 +518,8 @@ def get_mmi_values(self, aggregate_by, mmi_file): df = pandas.DataFrame(dic) df['mmi'] = mmi dfs.append(df) + if not dfs: + return () df = pandas.concat(dfs) return df[df.number > 0] diff --git a/openquake/server/v1/calc_urls.py b/openquake/server/v1/calc_urls.py index 1aa16b0b2c6..2daaafc3555 100644 --- a/openquake/server/v1/calc_urls.py +++ b/openquake/server/v1/calc_urls.py @@ -34,6 +34,7 @@ re_path(r'^(\d+)/log/(\d*):(\d*)$', views.calc_log, name="log"), re_path(r'^result/(\d+)$', views.calc_result), re_path(r'^(\d+)/aggrisk_tags$', views.aggrisk_tags), + re_path(r'^(\d+)/mmi_tags$', views.mmi_tags), re_path(r'^(\d+)/result/list$', views.calc_results), re_path(r'^(\d+)/share$', views.calc_share), re_path(r'^(\d+)/unshare$', views.calc_unshare), diff --git a/openquake/server/views.py b/openquake/server/views.py index 30259d741e7..92f27fcf97c 100644 --- a/openquake/server/views.py +++ b/openquake/server/views.py @@ -1197,7 +1197,36 @@ def aggrisk_tags(request, calc_id): content='%s: %s in %s\n%s' % (exc.__class__.__name__, exc, 'aggrisk_tags', tb), content_type='text/plain', status=400) + return HttpResponse(content=df.to_json(), content_type=JSON, status=200) + +@cross_domain_ajax +@require_http_methods(['GET', 'HEAD']) +def mmi_tags(request, calc_id): + """ + Return mmi_tags, by ``calc_id``, as JSON. + + :param request: + `django.http.HttpRequest` object. + :param calc_id: + The id of the requested calculation. + :returns: + a JSON object as documented in rest-api.rst + """ + job = logs.dbcmd('get_job', int(calc_id)) + if job is None: + return HttpResponseNotFound() + if not utils.user_has_permission(request, job.user_name, job.status): + return HttpResponseForbidden() + try: + with datastore.read(job.ds_calc_dir + '.hdf5') as ds: + df = _extract(ds, 'mmi_tags') + except Exception as exc: + tb = ''.join(traceback.format_tb(exc.__traceback__)) + return HttpResponse( + content='%s: %s in %s\n%s' % + (exc.__class__.__name__, exc, 'mmi_tags', tb), + content_type='text/plain', status=400) return HttpResponse(content=df.to_json(), content_type=JSON, status=200)