Skip to content

Commit

Permalink
move objective function to a seperate file
Browse files Browse the repository at this point in the history
  • Loading branch information
twallema committed Oct 28, 2023
1 parent 79ac5d0 commit a9b0646
Show file tree
Hide file tree
Showing 2 changed files with 120 additions and 126 deletions.
130 changes: 4 additions & 126 deletions notebooks/manuscripts/EPNM/compute_sensitivity-grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from EPNM.models.TDPF import household_demand_shock, compute_income_expectations
from EPNM.models.draw_functions import draw_function as draw_function
from EPNM.data.calibration_data import get_NAI_value_added, get_revenue_survey, get_employment_survey, get_synthetic_GDP, get_B2B_demand
from EPNM.data.objective_function import compute_AAD

# Start- and enddate simulation
start_sim = '2020-03-01'
Expand Down Expand Up @@ -57,131 +58,6 @@
transport_labels = ['H49', 'H50', 'H51']
labels = [industry_labels, retail_labels, transport_labels, consumer_facing_labels]

# Aggregation functions
import xarray as xr
def aggregate_quarterly(simulation_in):
"""
Aggregates data temporily to quarters
"""

aggregated_simulation = simulation_in.resample(date='Q').mean()
simulation_out = xr.DataArray(aggregated_simulation.values,
dims = ['date', 'NACE64'],
coords = dict(NACE64=(['NACE64'], get_sector_labels('NACE64')),
date=aggregated_simulation.coords['date']))
return simulation_out

def aggregate_NACE21(simulation_in):
""" A function to convert a simulation of the economic IO model on the NACE64 level to the NACE21 level
Also aggregates data to quarters temporily
Input
=====
simulation_in: xarray.DataArray
Simulation result (NACE64 level). Obtained from a pySODM xarray.Dataset simulation result by using: xarray.Dataset[state_name]
Output
======
simulation_out: xarray.DataArray
Simulation result (NACE21 level)
"""

simulation_out = xr.DataArray(np.matmul(np.matmul(simulation_in.values, np.transpose(get_sectoral_conversion_matrix('NACE64_NACE38'))), np.transpose(get_sectoral_conversion_matrix('NACE38_NACE21'))),
dims = ['date', 'NACE21'],
coords = dict(NACE21=(['NACE21'], get_sector_labels('NACE21')),
date=simulation_in.coords['date']))
return simulation_out

def compute_AAD(out, weighted=True):
"""Computes the Average Absolute Deviation between model prediction and data
"""

# Pre-allocate metric
hyperdist_abs = []
hyperdist = []

# B2B Weighted Euclidian distance
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

sectors = data_B2B.index.get_level_values('NACE21').unique()
dates = data_B2B.index.get_level_values('date').unique()
out_NACE21 = aggregate_NACE21(out['O'])
out_NACE21_quart = out_NACE21.resample(date='Q').mean()
B2B_demand = np.matmul(params['O_j'], np.transpose(get_sectoral_conversion_matrix('NACE64_NACE21')))
dist_abs=np.zeros(4)
dist=np.zeros(4)
for i,date in enumerate(dates):
dist_abs_temp=[]
dist_temp=[]
for j,sector in enumerate(sectors):
if sector!='U':
x=data_B2B.loc[date, sector]*100-100
y=out_NACE21_quart.sel(NACE21=sector).sel(date=date)/out_NACE21.sel(NACE21=sector).isel(date=0)*100-100
# Weighted euclidian distance in plane
if weighted==True:
dist_abs_temp.append(B2B_demand[j]/sum(B2B_demand)*abs(abs(x)-abs(y.values)))
dist_temp.append(B2B_demand[j]/sum(B2B_demand)*(abs(x)-abs(y.values)))
else:
dist_abs_temp.append(abs(abs(x)-abs(y.values)))
dist_temp.append((abs(x)-abs(y.values)))

if weighted == True:
dist_abs[i] = np.sum(dist_abs_temp)
dist[i] = np.sum(dist_temp)
else:
dist_abs[i] = np.mean(dist_abs_temp)
dist[i] = np.mean(dist_temp)

hyperdist_abs.append(np.mean(dist_abs))
hyperdist.append(np.mean(dist))

# GDP, revenue, employment
# ~~~~~~~~~~~~~~~~~~~~~~~~

states = ['x', 'x', 'l']
sizes = [params['x_0'], params['x_0'], params['l_0']]

dist_abs=np.zeros(4)
dist=np.zeros(4)
for k, data in enumerate([data_GDP, data_revenue, data_employment]):
dates = data.index.get_level_values('date').unique()
sectors = data.index.get_level_values('NACE64').unique()
out_quart = out[states[k]].resample(date='Q').mean()
for i,date in enumerate(dates):
cumsize=[]
dist_abs_temp=[]
dist_temp=[]
for j,sector in enumerate(sectors):
if sector != 'BE':
x=data.loc[date, sector]*100-100
y=out_quart.sel(NACE64=sector).sel(date=date)/out[states[k]].sel(NACE64=sector).isel(date=0)*100-100
# Weighted euclidian distance in plane
if weighted == True:
dist_abs_temp.append(sizes[k][get_sector_labels('NACE64').index(sector)]/sum(sizes[k])*abs(abs(x)-abs(y.values)))
dist_temp.append(sizes[k][get_sector_labels('NACE64').index(sector)]/sum(sizes[k])*(abs(x)-abs(y.values)))
cumsize.append(sizes[k][get_sector_labels('NACE64').index(sector)]/sum(sizes[k]))
else:
dist_abs_temp.append(abs(abs(x)-abs(y.values)))
dist_temp.append((abs(x)-abs(y.values)))

# Weighted euclidian distance in plane
x=data.loc[date, 'BE']*100-100
y=out_quart.sum(dim='NACE64').sel(date=date)/out[states[k]].sum(dim='NACE64').isel(date=0)*100-100
dist_abs_temp.append(abs(abs(x)-abs(y.values)))
dist_temp.append((abs(x)-abs(y.values)))
# Average
if weighted==True:
dist_abs[i] = 1/(1+sum(cumsize))*np.sum(dist_abs_temp)
dist[i] = 1/(1+sum(cumsize))*np.sum(dist_temp)
else:
dist_abs[i] = np.mean(dist_abs_temp)
dist[i] = np.mean(dist_temp)

hyperdist_abs.append(np.mean(dist_abs))
hyperdist.append(np.mean(dist))

return np.mean(hyperdist_abs), np.mean(hyperdist)

def mp_run_sensitivity(combinations, model, params):
# Unpack arguments
ind, ret, tran, cons, ratio_c_s, tau, hiring_firing = combinations
Expand All @@ -200,7 +76,9 @@ def mp_run_sensitivity(combinations, model, params):
model.parameters['gamma_H'] = 2*hiring_firing
# Simulate model (discrete)
out = model.sim([start_sim, end_sim], tau=1)
return compute_AAD(out, weighted=False)[0], compute_AAD(out, weighted=True)[0]
print('done')
return compute_AAD(out, params, data_B2B, data_GDP, data_revenue, data_employment, weighted=False)[0], \
compute_AAD(out, params, data_B2B, data_GDP, data_revenue, data_employment, weighted=True)[0]

from functools import partial
import multiprocessing as mp
Expand Down
116 changes: 116 additions & 0 deletions src/EPNM/data/objective_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
import numpy as np
import xarray as xr
from EPNM.data.utils import get_sectoral_conversion_matrix, get_sector_labels

# Aggregation function
def aggregate_NACE21(simulation_in):
""" A function to convert a simulation of the economic IO model on the NACE64 level to the NACE21 level
Also aggregates data to quarters temporily
Input
=====
simulation_in: xarray.DataArray
Simulation result (NACE64 level). Obtained from a pySODM xarray.Dataset simulation result by using: xarray.Dataset[state_name]
Output
======
simulation_out: xarray.DataArray
Simulation result (NACE21 level)
"""

simulation_out = xr.DataArray(np.matmul(np.matmul(simulation_in.values, np.transpose(get_sectoral_conversion_matrix('NACE64_NACE38'))), np.transpose(get_sectoral_conversion_matrix('NACE38_NACE21'))),
dims = ['date', 'NACE21'],
coords = dict(NACE21=(['NACE21'], get_sector_labels('NACE21')),
date=simulation_in.coords['date']))
return simulation_out

# function to compute AAD
def compute_AAD(out, params, data_B2B, data_GDP, data_revenue, data_employment, weighted=True):
"""Computes the Average Absolute Deviation between model prediction and data
"""

# Pre-allocate metric
hyperdist_abs = []
hyperdist = []

# B2B Weighted Euclidian distance
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

sectors = data_B2B.index.get_level_values('NACE21').unique()
dates = data_B2B.index.get_level_values('date').unique()
out_NACE21 = aggregate_NACE21(out['O'])
out_NACE21_quart = out_NACE21.resample(date='Q').mean()
B2B_demand = np.matmul(params['O_j'], np.transpose(get_sectoral_conversion_matrix('NACE64_NACE21')))
dist_abs=np.zeros(4)
dist=np.zeros(4)
for i,date in enumerate(dates):
dist_abs_temp=[]
dist_temp=[]
for j,sector in enumerate(sectors):
if sector!='U':
x=data_B2B.loc[date, sector]*100-100
y=out_NACE21_quart.sel(NACE21=sector).sel(date=date)/out_NACE21.sel(NACE21=sector).isel(date=0)*100-100
# Weighted euclidian distance in plane
if weighted==True:
dist_abs_temp.append(B2B_demand[j]/sum(B2B_demand)*abs(abs(x)-abs(y.values)))
dist_temp.append(B2B_demand[j]/sum(B2B_demand)*(abs(x)-abs(y.values)))
else:
dist_abs_temp.append(abs(abs(x)-abs(y.values)))
dist_temp.append((abs(x)-abs(y.values)))

if weighted == True:
dist_abs[i] = np.sum(dist_abs_temp)
dist[i] = np.sum(dist_temp)
else:
dist_abs[i] = np.mean(dist_abs_temp)
dist[i] = np.mean(dist_temp)

hyperdist_abs.append(np.mean(dist_abs))
hyperdist.append(np.mean(dist))

# GDP, revenue, employment
# ~~~~~~~~~~~~~~~~~~~~~~~~

states = ['x', 'x', 'l']
sizes = [params['x_0'], params['x_0'], params['l_0']]

dist_abs=np.zeros(4)
dist=np.zeros(4)
for k, data in enumerate([data_GDP, data_revenue, data_employment]):
dates = data.index.get_level_values('date').unique()
sectors = data.index.get_level_values('NACE64').unique()
out_quart = out[states[k]].resample(date='Q').mean()
for i,date in enumerate(dates):
cumsize=[]
dist_abs_temp=[]
dist_temp=[]
for j,sector in enumerate(sectors):
if sector != 'BE':
x=data.loc[date, sector]*100-100
y=out_quart.sel(NACE64=sector).sel(date=date)/out[states[k]].sel(NACE64=sector).isel(date=0)*100-100
# Weighted euclidian distance in plane
if weighted == True:
dist_abs_temp.append(sizes[k][get_sector_labels('NACE64').index(sector)]/sum(sizes[k])*abs(abs(x)-abs(y.values)))
dist_temp.append(sizes[k][get_sector_labels('NACE64').index(sector)]/sum(sizes[k])*(abs(x)-abs(y.values)))
cumsize.append(sizes[k][get_sector_labels('NACE64').index(sector)]/sum(sizes[k]))
else:
dist_abs_temp.append(abs(abs(x)-abs(y.values)))
dist_temp.append((abs(x)-abs(y.values)))

# Weighted euclidian distance in plane
x=data.loc[date, 'BE']*100-100
y=out_quart.sum(dim='NACE64').sel(date=date)/out[states[k]].sum(dim='NACE64').isel(date=0)*100-100
dist_abs_temp.append(abs(abs(x)-abs(y.values)))
dist_temp.append((abs(x)-abs(y.values)))
# Average
if weighted==True:
dist_abs[i] = 1/(1+sum(cumsize))*np.sum(dist_abs_temp)
dist[i] = 1/(1+sum(cumsize))*np.sum(dist_temp)
else:
dist_abs[i] = np.mean(dist_abs_temp)
dist[i] = np.mean(dist_temp)

hyperdist_abs.append(np.mean(dist_abs))
hyperdist.append(np.mean(dist))

return np.mean(hyperdist_abs), np.mean(hyperdist)

0 comments on commit a9b0646

Please sign in to comment.