diff --git a/openquake/calculators/getters.py b/openquake/calculators/getters.py index 1abbbbaf8109..332b917bf392 100644 --- a/openquake/calculators/getters.py +++ b/openquake/calculators/getters.py @@ -419,6 +419,11 @@ def get_ebrupture(dstore, rup_id): # used in show rupture return get_ebr(rec, geom, trt) +def get_rupture_from_dstore(dstore, rup_id=0): + ebr = get_ebrupture(dstore, rup_id) + return ebr.rupture + + # this is never called directly; get_rupture_getters is used instead class RuptureGetter(object): """ diff --git a/openquake/calculators/postproc/plots.py b/openquake/calculators/postproc/plots.py index f3381385286c..0c5b2b456d0a 100644 --- a/openquake/calculators/postproc/plots.py +++ b/openquake/calculators/postproc/plots.py @@ -1,27 +1,28 @@ # -*- coding: utf-8 -*- # vim: tabstop=4 shiftwidth=4 softtabstop=4 -# +# # Copyright (C) 2024, GEM Foundation -# +# # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. -# +# # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. -# +# # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see . +import io import os +import base64 import numpy from shapely.geometry import MultiPolygon from openquake.commonlib import readinput, datastore from openquake.hmtk.plotting.patch import PolygonPatch -from openquake.calculators.getters import get_ebrupture def import_plt(): @@ -32,7 +33,7 @@ def import_plt(): return plt -def add_borders(ax, read_df=readinput.read_countries_df, buffer=0): +def add_borders(ax, read_df=readinput.read_countries_df, buffer=0, alpha=0.1): plt = import_plt() polys = read_df(buffer)['geom'] cm = plt.get_cmap('RdBu') @@ -41,9 +42,27 @@ def add_borders(ax, read_df=readinput.read_countries_df, buffer=0): colour = cm(1. * idx / num_colours) if isinstance(poly, MultiPolygon): for onepoly in poly.geoms: - ax.add_patch(PolygonPatch(onepoly, fc=colour, alpha=0.1)) + ax.add_patch(PolygonPatch(onepoly, fc=colour, alpha=alpha)) else: - ax.add_patch(PolygonPatch(poly, fc=colour, alpha=0.1)) + ax.add_patch(PolygonPatch(poly, fc=colour, alpha=alpha)) + return ax + + +def add_populated_places(ax, xlim, ylim, read_df=readinput.read_populated_places_df, + lon_field='longitude', lat_field='latitude', + label_field='name'): + data = read_df(lon_field, lat_field, label_field) + if data is None: + return ax + data = data[(data[lon_field] >= xlim[0]) & (data[lon_field] <= xlim[1]) + & (data[lat_field] >= ylim[0]) & (data[lat_field] <= ylim[1])] + if len(data) == 0: + return ax + ax.scatter(data[lon_field], data[lat_field], label="Populated places", + s=2, color='black', alpha=0.5) + for _, row in data.iterrows(): + ax.text(row[lon_field], row[lat_field], row[label_field], fontsize=7, + ha='right', alpha=0.5) return ax @@ -59,6 +78,64 @@ def get_country_iso_codes(calc_id, assetcol): return id_0_str +def plt_to_base64(plt): + """ + The base64 string can be passed to a Django template and embedded + directly in HTML, without having to save the image to disk + """ + bio = io.BytesIO() + plt.savefig(bio, format='png', bbox_inches='tight') + bio.seek(0) + img_base64 = base64.b64encode(bio.getvalue()).decode('utf-8') + return img_base64 + + +def plot_shakemap(shakemap_array, imt, backend=None, figsize=(10, 10), + with_populated_places=False, return_base64=False, + rupture=None): + plt = import_plt() + if backend is not None: + # we may need to use a non-interactive backend + import matplotlib + matplotlib.use(backend) + _fig, ax = plt.subplots(figsize=figsize) + ax.set_aspect('equal') + ax.grid(True) + ax.set_xlabel('Longitude') + ax.set_ylabel('Latitude') + title = 'Avg GMF for %s' % imt + ax.set_title(title) + gmf = shakemap_array['val'][imt] + markersize = 5 + coll = ax.scatter(shakemap_array['lon'], shakemap_array['lat'], c=gmf, + cmap='jet', s=markersize) + plt.colorbar(coll) + ax = add_borders(ax, alpha=0.2) + BUF_ANGLE = 1 + min_x = shakemap_array['lon'].min() + max_x = shakemap_array['lon'].max() + min_y = shakemap_array['lat'].min() + max_y = shakemap_array['lat'].max() + if rupture is not None: + ax, rup_min_x, rup_min_y, rup_max_x, rup_max_y = add_rupture( + ax, rupture, hypo_alpha=0.8, hypo_markersize=8, surf_alpha=0.9, + surf_facecolor='none', surf_linestyle='--') + min_x = min(min_x, rup_min_x) + max_x = max(max_x, rup_max_x) + min_y = min(min_y, rup_min_y) + max_y = max(max_y, rup_max_y) + xlim = (min_x - BUF_ANGLE, max_x + BUF_ANGLE) + ylim = (min_y - BUF_ANGLE, max_y + BUF_ANGLE) + ax.set_xlim(*xlim) + ax.set_ylim(*ylim) + if with_populated_places: + ax = add_populated_places(ax, xlim, ylim) + if return_base64: + return plt_to_base64(plt) + else: + return plt + + def plot_avg_gmf(ex, imt): plt = import_plt() _fig, ax = plt.subplots(figsize=(10, 10)) @@ -94,15 +171,20 @@ def plot_avg_gmf(ex, imt): return plt -def add_surface(ax, surface, label): - ax.fill(*surface.get_surface_boundaries(), alpha=.5, edgecolor='grey', - label=label) +def add_surface(ax, surface, label, alpha=0.5, facecolor=None, linestyle='-'): + fill_params = { + 'alpha': alpha, + 'edgecolor': 'grey', + 'label': label + } + if facecolor is not None: + fill_params['facecolor'] = facecolor + ax.fill(*surface.get_surface_boundaries(), **fill_params) return surface.get_bounding_box() -def add_rupture(ax, dstore, rup_id=0): - ebr = get_ebrupture(dstore, rup_id) - rup = ebr.rupture +def add_rupture(ax, rup, hypo_alpha=0.5, hypo_markersize=8, surf_alpha=0.5, + surf_facecolor=None, surf_linestyle='-'): if hasattr(rup.surface, 'surfaces'): min_x = 180 max_x = -180 @@ -110,33 +192,47 @@ def add_rupture(ax, dstore, rup_id=0): max_y = -90 for surf_idx, surface in enumerate(rup.surface.surfaces): min_x_, max_x_, max_y_, min_y_ = add_surface( - ax, surface, 'Surface %d' % surf_idx) + ax, surface, 'Surface %d' % surf_idx, alpha=surf_alpha, + facecolor=surf_facecolor, linestyle=surf_linestyle) min_x = min(min_x, min_x_) max_x = max(max_x, max_x_) min_y = min(min_y, min_y_) max_y = max(max_y, max_y_) else: - min_x, max_x, max_y, min_y = add_surface(ax, rup.surface, 'Surface') + min_x, max_x, max_y, min_y = add_surface( + ax, rup.surface, 'Surface', alpha=surf_alpha, facecolor=surf_facecolor, + linestyle=surf_linestyle) ax.plot(rup.hypocenter.x, rup.hypocenter.y, marker='*', - color='orange', label='Hypocenter', alpha=.5, + color='orange', label='Hypocenter', alpha=hypo_alpha, linestyle='', markersize=8) return ax, min_x, min_y, max_x, max_y -def plot_rupture(dstore): +def plot_rupture(rup, backend=None, figsize=(10, 10), + with_populated_places=False, return_base64=False): # NB: matplotlib is imported inside since it is a costly import plt = import_plt() - _fig, ax = plt.subplots(figsize=(10, 10)) + if backend is not None: + # we may need to use a non-interactive backend + import matplotlib + matplotlib.use(backend) + _fig, ax = plt.subplots(figsize=figsize) ax.set_aspect('equal') ax.grid(True) - # assuming there is only 1 rupture, so rup_id=0 - ax, min_x, min_y, max_x, max_y = add_rupture(ax, dstore, rup_id=0) + ax, min_x, min_y, max_x, max_y = add_rupture(ax, rup) ax = add_borders(ax) - BUF_ANGLE = 4 - ax.set_xlim(min_x - BUF_ANGLE, max_x + BUF_ANGLE) - ax.set_ylim(min_y - BUF_ANGLE, max_y + BUF_ANGLE) + BUF_ANGLE = 1 + xlim = (min_x - BUF_ANGLE, max_x + BUF_ANGLE) + ylim = (min_y - BUF_ANGLE, max_y + BUF_ANGLE) + ax.set_xlim(*xlim) + ax.set_ylim(*ylim) + if with_populated_places: + ax = add_populated_places(ax, xlim, ylim) ax.legend() - return plt + if return_base64: + return plt_to_base64(plt) + else: + return plt def add_surface_3d(ax, surface, label): @@ -147,13 +243,11 @@ def add_surface_3d(ax, surface, label): ax.plot_surface(lon_grid, lat_grid, depth_grid, alpha=0.5, label=label) -def plot_rupture_3d(dstore): +def plot_rupture_3d(rup): # NB: matplotlib is imported inside since it is a costly import plt = import_plt() fig = plt.figure() ax = fig.add_subplot(111, projection='3d') - ebr = get_ebrupture(dstore, rup_id=0) - rup = ebr.rupture if hasattr(rup.surface, 'surfaces'): for surf_idx, surface in enumerate(rup.surface.surfaces): add_surface_3d(ax, surface, 'Surface %d' % surf_idx) diff --git a/openquake/commands/plot.py b/openquake/commands/plot.py index 73d306ac7a23..2c6598587279 100644 --- a/openquake/commands/plot.py +++ b/openquake/commands/plot.py @@ -29,6 +29,7 @@ from openquake.hazardlib.geo.utils import PolygonPlotter from openquake.hazardlib.contexts import Effect, get_effect_by_mag from openquake.hazardlib.calc.filters import getdefault, IntegrationDistance +from openquake.calculators.getters import get_rupture_from_dstore from openquake.calculators.extract import ( Extractor, WebExtractor, clusterize) from openquake.calculators.postproc.plots import ( @@ -58,6 +59,7 @@ def make_figure_magdist(extractors, what): ax.legend() return plt + def make_figure_hcurves(extractors, what): """ $ oq plot "hcurves?kind=mean&imt=PGA&site_id=0" @@ -1042,7 +1044,8 @@ def make_figure_rupture(extractors, what): """ [ex] = extractors dstore = ex.dstore - return plot_rupture(dstore) + rup = get_rupture_from_dstore(dstore, rup_id=0) + return plot_rupture(rup) def make_figure_rupture_3d(extractors, what): @@ -1051,7 +1054,8 @@ def make_figure_rupture_3d(extractors, what): """ [ex] = extractors dstore = ex.dstore - return plot_rupture_3d(dstore) + rup = get_rupture_from_dstore(dstore, rup_id=0) + return plot_rupture_3d(rup) def plot_wkt(wkt_string): diff --git a/openquake/commands/plot_assets.py b/openquake/commands/plot_assets.py index 752c3aa8511a..f41e9a21b021 100644 --- a/openquake/commands/plot_assets.py +++ b/openquake/commands/plot_assets.py @@ -22,6 +22,7 @@ import logging from openquake.commonlib import datastore from openquake.hazardlib.geo.utils import cross_idl, get_bbox +from openquake.calculators.getters import get_rupture_from_dstore from openquake.calculators.postproc.plots import ( add_borders, get_assetcol, get_country_iso_codes) from openquake.calculators.postproc.plots import add_rupture @@ -83,8 +84,8 @@ def main(calc_id: int = -1, site_model=False, print('rupture(%s, %s), dist=%s' % (lon, lat, dist)) if os.environ.get('OQ_APPLICATION_MODE') == 'ARISTOTLE': # assuming there is only 1 rupture, so rup_id=0 - ax, _min_x, _min_y, _max_x, _max_y = add_rupture( - ax, dstore, rup_id=0) + rup = get_rupture_from_dstore(dstore, rup_id=0) + ax, _min_x, _min_y, _max_x, _max_y = add_rupture(ax, rup) else: p.scatter(xlon, xlat, marker='*', color='orange', label='hypocenter', alpha=.5) diff --git a/openquake/commands/tests/independence_test.py b/openquake/commands/tests/independence_test.py index 016a39e0e625..883737c03d38 100644 --- a/openquake/commands/tests/independence_test.py +++ b/openquake/commands/tests/independence_test.py @@ -35,6 +35,9 @@ class IndependenceTestCase(unittest.TestCase): + def test_hazardlib(self): + assert_independent('openquake.hazardlib', 'openquake.calculators') + def test_risklib(self): assert_independent('openquake.risklib', 'openquake.commonlib') assert_independent('openquake.risklib', 'openquake.calculators') diff --git a/openquake/commonlib/readinput.py b/openquake/commonlib/readinput.py index f7cc6cf34b1d..8e30c2153b97 100644 --- a/openquake/commonlib/readinput.py +++ b/openquake/commonlib/readinput.py @@ -1662,6 +1662,20 @@ def read_geometries(fname, code, buffer=0): return pandas.DataFrame(dict(code=codes, geom=geoms)) +@functools.lru_cache() +def read_populated_places(fname, lon_name, lat_name, label_name): + """ + Reading coordinates and names of populated places from a CSV file + + :returns: a Pandas DataFrame + """ + data = pandas.read_csv(fname) + expected_colnames_set = {lon_name, lat_name, label_name} + if not expected_colnames_set.issubset(data.columns): + raise ValueError(f"CSV file must contain {expected_colnames_set} columns.") + return data + + def read_mosaic_df(buffer): """ :returns: a DataFrame of geometries for the mosaic models @@ -1680,6 +1694,21 @@ def read_countries_df(buffer=0.1): return read_geometries(fname, 'shapeGroup', buffer) +def read_populated_places_df(lon_field='longitude', lat_field='latitude', + label_field='name'): + """ + Reading from a 'worldcities.csv' file in the mosaic_dir, if present, or returning + None otherwise + + :returns: a DataFrame of coordinates and names of populated places + """ + mosaic_dir = config.directory.mosaic_dir + fname = os.path.join(mosaic_dir, 'worldcities.csv') + if not os.path.isfile(fname): + return + return read_populated_places(fname, lon_field, lat_field, label_field) + + def read_source_models(fnames, hdf5path='', **converterparams): """ :param fnames: a list of source model files diff --git a/openquake/engine/aristotle.py b/openquake/engine/aristotle.py index 0bddf2225a7f..3f829e18d05c 100644 --- a/openquake/engine/aristotle.py +++ b/openquake/engine/aristotle.py @@ -121,6 +121,8 @@ def get_aristotle_params(arist): inputs = {'exposure': [arist.exposure_hdf5], 'job_ini': ''} rupdic = get_rupture_dict(arist.rupture_dict, arist.ignore_shakemap) + if 'shakemap_array' in rupdic: + del rupdic['shakemap_array'] if arist.station_data_file is None: # NOTE: giving precedence to the station_data_file uploaded via form try: diff --git a/openquake/hazardlib/shakemap/parsers.py b/openquake/hazardlib/shakemap/parsers.py index 6cf145dcceb7..c685ff3dbe63 100644 --- a/openquake/hazardlib/shakemap/parsers.py +++ b/openquake/hazardlib/shakemap/parsers.py @@ -32,6 +32,7 @@ import json import zipfile import pytz +import base64 import pandas as pd from datetime import datetime from shapely.geometry import Polygon @@ -533,10 +534,72 @@ def load_rupdic_from_finite_fault(usgs_id, mag, products): rupdic = {'lon': lon, 'lat': lat, 'dep': float(p['depth']), 'mag': mag, 'rake': 0., 'local_timestamp': str(local_time), 'time_event': time_event, - 'is_point_rup': True, 'usgs_id': usgs_id, 'rupture_file': None} + 'is_point_rup': True, + 'pga_map_png': None, 'mmi_map_png': None, + 'usgs_id': usgs_id, 'rupture_file': None} return rupdic +def get_shakemap_version(usgs_id): + # USGS event page to get ShakeMap details + product_url = US_GOV + f"/earthquakes/feed/v1.0/detail/{usgs_id}.geojson" + # Get the JSON data for the earthquake event + try: + with urlopen(product_url) as response: + event_data = json.loads(response.read().decode()) + except Exception as e: + print(f"Error: Unable to fetch data for event {usgs_id} - {e}") + return None + if ("properties" in event_data and "products" in event_data["properties"] and + "shakemap" in event_data["properties"]["products"]): + shakemap_data = event_data["properties"]["products"]["shakemap"][0] + # e.g.: 'https://earthquake.usgs.gov/product/shakemap/' + # 'us7000n7n8/us/1726699735514/download/intensity.jpg' + version_id = shakemap_data["contents"]["download/intensity.jpg"]["url"].split( + '/')[-3] + return version_id + else: + print(f"No ShakeMap found for event {usgs_id}") + return None + + +def download_jpg(usgs_id, what): + """ + It can be used to download a jpg file from the USGS service, returning it in a + base64 format that can be easily passed to a Django template + """ + version_id = get_shakemap_version(usgs_id) + if version_id: + intensity_url = (f'{US_GOV}/product/shakemap/{usgs_id}/us/' + f'{version_id}/download/{what}.jpg') + try: + with urlopen(intensity_url) as img_response: + img_data = img_response.read() + img_base64 = base64.b64encode(img_data).decode('utf-8') + return img_base64 + except Exception as e: + print(f"Error: Unable to download the {what} image - {e}") + return None + else: + print("Error: Could not retrieve the ShakeMap version ID.") + return None + + +def download_grid(shakemap_contents): + if 'download/grid.xml' in shakemap_contents: + url = shakemap_contents.get('download/grid.xml')['url'] + logging.info('Downloading grid.xml') + grid_fname = gettemp(urlopen(url).read(), suffix='.xml') + return grid_fname + + +def download_rupture_data(shakemap_contents): + url = shakemap_contents.get('download/rupture.json')['url'] + logging.info('Downloading rupture.json') + rup_data = json.loads(urlopen(url).read()) + return rup_data + + def download_rupture_dict(usgs_id, ignore_shakemap=False): """ Download a rupture from the USGS site given a ShakeMap ID. @@ -561,6 +624,7 @@ def download_rupture_dict(usgs_id, ignore_shakemap=False): try: products['finite-fault'] except KeyError: + # NOTE: we might also try reading information from phase-data or origin raise MissingLink( 'There is no shakemap nor finite-fault info for %s' % usgs_id) return load_rupdic_from_finite_fault(usgs_id, mag, products) @@ -568,9 +632,11 @@ def download_rupture_dict(usgs_id, ignore_shakemap=False): contents = shakemap['contents'] if 'download/rupture.json' not in contents: return load_rupdic_from_finite_fault(usgs_id, mag, products) - url = contents.get('download/rupture.json')['url'] - logging.info('Downloading rupture.json') - rup_data = json.loads(urlopen(url).read()) + shakemap_array = None + grid_fname = download_grid(contents) + if grid_fname is not None: + shakemap_array = get_shakemap_array(grid_fname) + rup_data = download_rupture_data(contents) feats = rup_data['features'] is_point_rup = len(feats) == 1 and feats[0]['geometry']['type'] == 'Point' md = rup_data['metadata'] @@ -584,6 +650,7 @@ def download_rupture_dict(usgs_id, ignore_shakemap=False): 'mag': md['mag'], 'rake': md['rake'], 'local_timestamp': str(local_time), 'time_event': time_event, 'is_point_rup': is_point_rup, + 'shakemap_array': shakemap_array, 'usgs_id': usgs_id, 'rupture_file': None} try: oq_rup = convert_to_oq_rupture(rup_data) @@ -597,6 +664,7 @@ def download_rupture_dict(usgs_id, ignore_shakemap=False): 'mag': md['mag'], 'rake': md['rake'], 'local_timestamp': str(local_time), 'time_event': time_event, 'is_point_rup': True, + 'shakemap_array': shakemap_array, 'usgs_id': usgs_id, 'rupture_file': None, 'error': error_msg} comment_str = ( f" + +
{% csrf_token %}
@@ -184,7 +190,7 @@

- +
diff --git a/openquake/server/tests/test_aristotle_mode.py b/openquake/server/tests/test_aristotle_mode.py index a6ea9583925f..7ccc9021a22e 100644 --- a/openquake/server/tests/test_aristotle_mode.py +++ b/openquake/server/tests/test_aristotle_mode.py @@ -253,6 +253,7 @@ def test_get_rupture_data_from_shakemap_conversion_error(self): expected_keys = [ 'is_point_rup', 'local_timestamp', 'time_event', 'lon', 'lat', 'dep', 'mag', 'rake', 'usgs_id', + 'mmi_map_png', 'pga_map_png', 'rupture_file', 'rupture_file_from_usgs', 'error', 'station_data_file_from_usgs', 'mosaic_models', 'trts'] self.assertEqual(sorted(ret_dict.keys()), sorted(expected_keys)) @@ -293,6 +294,7 @@ def test_get_rupture_data_from_shakemap_correctly_converted(self): 'is_point_rup', 'local_timestamp', 'time_event', 'lon', 'lat', 'dep', 'mag', 'rake', 'usgs_id', 'rupture_file', 'rupture_file_from_usgs', + 'mmi_map_png', 'pga_map_png', 'station_data_error', 'station_data_file_from_usgs', 'trts', 'mosaic_models', 'trt'] self.assertEqual(sorted(ret_dict.keys()), sorted(expected_keys)) @@ -330,6 +332,7 @@ def test_get_point_rupture_data_from_shakemap(self): expected_keys = [ 'is_point_rup', 'local_timestamp', 'time_event', 'lon', 'lat', 'dep', 'mag', 'rake', 'usgs_id', + 'mmi_map_png', 'pga_map_png', 'rupture_file', 'rupture_file_from_usgs', 'station_data_file_from_usgs', 'trts', 'mosaic_models'] @@ -350,11 +353,14 @@ def test_get_rupture_data_from_finite_fault(self): expected_keys = [ 'is_point_rup', 'local_timestamp', 'time_event', 'lon', 'lat', 'dep', 'mag', 'rake', 'usgs_id', + 'mmi_map_png', 'pga_map_png', 'rupture_file', 'rupture_file_from_usgs', 'station_data_file_from_usgs', 'trts', 'mosaic_models'] self.assertEqual(sorted(ret_dict.keys()), sorted(expected_keys)) self.assertEqual(ret_dict['rupture_file'], None) + self.assertEqual(ret_dict['mmi_map_png'], None) + self.assertEqual(ret_dict['pga_map_png'], None) self.assertEqual(ret_dict['usgs_id'], 'us6000jllz') self.assertEqual(ret_dict['mosaic_models'], ['ARB', 'MIE']) self.assertEqual(ret_dict['trts'], { diff --git a/openquake/server/views.py b/openquake/server/views.py index 304842fea8c8..c60276bbe3ae 100644 --- a/openquake/server/views.py +++ b/openquake/server/views.py @@ -54,6 +54,7 @@ from openquake.calculators.getters import NotFound from openquake.calculators.export import export from openquake.calculators.extract import extract as _extract +from openquake.calculators.postproc.plots import plot_shakemap # , plot_rupture from openquake.engine import __version__ as oqversion from openquake.engine.export import core from openquake.engine import engine, aelo, aristotle @@ -758,6 +759,26 @@ def aristotle_get_rupture_data(request): rupdic['mosaic_models'] = mosaic_models rupdic['rupture_file_from_usgs'] = rupdic['rupture_file'] rupdic['station_data_file_from_usgs'] = station_data_file + oq_rup = None + if 'oq_rup' in rupdic: + oq_rup = rupdic['oq_rup'] + # FIXME: check if we want to display the rupture png as a separate plot, instead + # of inserting the hypocenter and the rupture boundaries in the gmf plots + # # Agg is a non-interactive backend + # rupdic['rupture_png'] = plot_rupture( + # rupdic['oq_rup'], backend='Agg', figsize=(6, 6), + # with_populated_places=True, return_base64=True) + del rupdic['oq_rup'] + if 'shakemap_array' in rupdic: + shakemap_array = rupdic['shakemap_array'] + figsize = (14, 7) # fitting in a single row in the template without resizing + rupdic['pga_map_png'] = plot_shakemap( + shakemap_array, 'PGA', backend='Agg', figsize=figsize, + with_populated_places=False, return_base64=True, rupture=oq_rup) + rupdic['mmi_map_png'] = plot_shakemap( + shakemap_array, 'MMI', backend='Agg', figsize=figsize, + with_populated_places=False, return_base64=True, rupture=oq_rup) + del rupdic['shakemap_array'] response_data = rupdic return HttpResponse(content=json.dumps(response_data), content_type=JSON, status=200)