Skip to content

Commit

Permalink
[Refine] Refine loss and metric module (#919)
Browse files Browse the repository at this point in the history
* return loss dict instead of loss summation for all loss.forward

* adapt all mtl module for Dict[str, Tensor] type of input losses

* fix

* remove 'area' in Constriant.output_keys

* fix eval.py

* fix code

* fix examples in func.py

* fix examples in func.py

* Fix for MSELossWithL2Decay and train_enn.py

* fix doctest in loss/mse.py

* fix epnn

* fix
  • Loading branch information
HydrogenSulfate authored Jun 6, 2024
1 parent d5f10d5 commit bf17f68
Show file tree
Hide file tree
Showing 50 changed files with 367 additions and 211 deletions.
5 changes: 4 additions & 1 deletion examples/RegAE/RegAE.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,10 @@ def loss_expr(output_dict, label_dict, weight_dict=None):
base = paddle.exp(2.0 * log_sigma) + paddle.pow(mu, 2) - 1.0 - 2.0 * log_sigma
KLLoss = 0.5 * paddle.sum(base) / mu.shape[0]

return F.mse_loss(output_dict["decoder_z"], label_dict["p_train"]) + KLLoss
return {
"decode_loss": F.mse_loss(output_dict["decoder_z"], label_dict["p_train"])
+ KLLoss
}

# set constraint
sup_constraint = ppsci.constraint.SupervisedConstraint(
Expand Down
2 changes: 1 addition & 1 deletion examples/amgnet/amgnet_airfoil.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def train_mse_func(
label_dict: Dict[str, "pgl.Graph"],
*args,
) -> paddle.Tensor:
return F.mse_loss(output_dict["pred"], label_dict["label"].y)
return {"pred": F.mse_loss(output_dict["pred"], label_dict["label"].y)}


def eval_rmse_func(
Expand Down
2 changes: 1 addition & 1 deletion examples/amgnet/amgnet_cylinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def train_mse_func(
label_dict: Dict[str, "pgl.Graph"],
*args,
) -> paddle.Tensor:
return F.mse_loss(output_dict["pred"], label_dict["label"].y)
return {"pred": F.mse_loss(output_dict["pred"], label_dict["label"].y)}


def eval_rmse_func(
Expand Down
2 changes: 1 addition & 1 deletion examples/cfdgcn/cfdgcn.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def train_mse_func(
label_dict: Dict[str, "pgl.Graph"],
*args,
) -> paddle.Tensor:
return F.mse_loss(output_dict["pred"], label_dict["label"].y)
return {"pred": F.mse_loss(output_dict["pred"], label_dict["label"].y)}


def eval_rmse_func(
Expand Down
2 changes: 1 addition & 1 deletion examples/deepcfd/deepcfd.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def loss_expr(
loss_v = (output[:, 1:2, :, :] - y[:, 1:2, :, :]) ** 2
loss_p = (output[:, 2:3, :, :] - y[:, 2:3, :, :]).abs()
loss = (loss_u + loss_v + loss_p) / CHANNELS_WEIGHTS
return loss.sum()
return {"output": loss.sum()}

sup_constraint = ppsci.constraint.SupervisedConstraint(
{
Expand Down
4 changes: 2 additions & 2 deletions examples/deephpms/burgers.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

def pde_loss_func(output_dict, *args):
losses = F.mse_loss(output_dict["f_pde"], output_dict["du_t"], "sum")
return losses
return {"pde": losses}


def pde_l2_rel_func(output_dict, *args):
Expand All @@ -53,7 +53,7 @@ def boundary_loss_func(output_dict, *args):

losses = F.mse_loss(u_lb, u_ub, "sum")
losses += F.mse_loss(du_x_lb, du_x_ub, "sum")
return losses
return {"boundary": losses}


def train(cfg: DictConfig):
Expand Down
4 changes: 2 additions & 2 deletions examples/deephpms/korteweg_de_vries.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

def pde_loss_func(output_dict, *args):
losses = F.mse_loss(output_dict["f_pde"], output_dict["du_t"], "sum")
return losses
return {"pde": losses}


def pde_l2_rel_func(output_dict, *args):
Expand All @@ -56,7 +56,7 @@ def boundary_loss_func(output_dict, *args):
losses = F.mse_loss(u_lb, u_ub, "sum")
losses += F.mse_loss(du_x_lb, du_x_ub, "sum")
losses += F.mse_loss(du_xx_lb, du_xx_ub, "sum")
return losses
return {"boundary": losses}


def train(cfg: DictConfig):
Expand Down
4 changes: 2 additions & 2 deletions examples/deephpms/kuramoto_sivashinsky.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

def pde_loss_func(output_dict, *args):
losses = F.mse_loss(output_dict["f_pde"], output_dict["du_t"], "sum")
return losses
return {"pde": losses}


def pde_l2_rel_func(output_dict, *args):
Expand Down Expand Up @@ -59,7 +59,7 @@ def boundary_loss_func(output_dict, *args):
losses += F.mse_loss(du_x_lb, du_x_ub, "sum")
losses += F.mse_loss(du_xx_lb, du_xx_ub, "sum")
losses += F.mse_loss(du_xxx_lb, du_xxx_ub, "sum")
return losses
return {"boundary": losses}


def train(cfg: DictConfig):
Expand Down
2 changes: 1 addition & 1 deletion examples/deephpms/navier_stokes.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

def pde_loss_func(output_dict, *args):
losses = F.mse_loss(output_dict["f_pde"], output_dict["dw_t"], "sum")
return losses
return {"pde": losses}


def pde_l2_rel_func(output_dict, *args):
Expand Down
4 changes: 2 additions & 2 deletions examples/deephpms/schrodinger.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
def pde_loss_func(output_dict, *args):
losses = F.mse_loss(output_dict["f_pde"], output_dict["du_t"], "sum")
losses += F.mse_loss(output_dict["g_pde"], output_dict["dv_t"], "sum")
return losses
return {"pde": losses}


def pde_l2_rel_func(output_dict, *args):
Expand Down Expand Up @@ -62,7 +62,7 @@ def boundary_loss_func(output_dict, *args):
losses += F.mse_loss(v_lb, v_ub, "sum")
losses += F.mse_loss(du_x_lb, du_x_ub, "sum")
losses += F.mse_loss(dv_x_lb, dv_x_ub, "sum")
return losses
return {"boundary": losses}


def sol_l2_rel_func(output_dict, label_dict):
Expand Down
4 changes: 3 additions & 1 deletion examples/earthformer/enso_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,9 @@ def train_mse_func(
label_dict: Dict[str, "paddle.Tensor"],
*args,
) -> paddle.Tensor:
return F.mse_loss(output_dict["sst_target"], label_dict["sst_target"])
return {
"sst_target": F.mse_loss(output_dict["sst_target"], label_dict["sst_target"])
}


def eval_rmse_func(
Expand Down
2 changes: 1 addition & 1 deletion examples/earthformer/sevir_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,4 +278,4 @@ def train_mse_func(
pred = output_dict["vil"]
vil_target = label_dict["vil"]
target = vil_target.reshape([-1, *vil_target.shape[2:]])
return F.mse_loss(pred, target)
return {"vil": F.mse_loss(pred, target)}
18 changes: 11 additions & 7 deletions examples/epnn/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,13 @@ def transform_in_stress(input, model, out_key):


def val_loss_criterion(x, y):
return 100.0 * (
paddle.linalg.norm(x=x["input"] - y["input"]) / paddle.linalg.norm(x=y["input"])
)
return {
"input": 100.0
* (
paddle.linalg.norm(x=x["input"] - y["input"])
/ paddle.linalg.norm(x=y["input"])
)
}


def train_loss_func(output_dict, *args) -> paddle.Tensor:
Expand All @@ -128,7 +132,7 @@ def train_loss_func(output_dict, *args) -> paddle.Tensor:
loss_log["state_elasto"].append(float(loss_elasto))
loss_log["state_plastic"].append(float(loss_plastic))
loss_log["stress"].append(float(loss_stress))
return loss
return {"train_loss": loss}


def eval_loss_func(output_dict, *args) -> paddle.Tensor:
Expand All @@ -150,7 +154,7 @@ def eval_loss_func(output_dict, *args) -> paddle.Tensor:
logger.message(
f"error(total): {float(error)}, error(error_elasto): {float(error_elasto)}, error(error_plastic): {float(error_plastic)}, error(error_stress): {float(error_stress)}"
)
return error
return {"eval_loss": error}


def metric_expr(output_dict, *args) -> Dict[str, paddle.Tensor]:
Expand Down Expand Up @@ -216,8 +220,8 @@ def loss_func(output_dict, criterion) -> paddle.Tensor:
)
target_stress = output_dict["stress_y"]
loss_stress = criterion({"input": input_stress}, {"input": target_stress})
loss = loss_elasto + loss_plastic + loss_stress
return loss, loss_elasto, loss_plastic, loss_stress
loss = loss_elasto["input"] + loss_plastic["input"] + loss_stress["input"]
return loss, loss_elasto["input"], loss_plastic["input"], loss_stress["input"]


class Dataset:
Expand Down
6 changes: 3 additions & 3 deletions examples/hpinns/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ def pde_loss_fun(output_dict: Dict[str, paddle.Tensor], *args) -> paddle.Tensor:
)
loss_log.append(float(loss_eqs1 + loss_eqs2)) # for plotting
loss_log.append(float(loss_lag1 + loss_lag2)) # for plotting
return losses
return {"pde": losses}


def obj_loss_fun(output_dict: Dict[str, paddle.Tensor], *args) -> paddle.Tensor:
Expand Down Expand Up @@ -293,7 +293,7 @@ def obj_loss_fun(output_dict: Dict[str, paddle.Tensor], *args) -> paddle.Tensor:
losses = loss_weight[4] * loss_opt_area
loss_log.append(float(loss_opt_area)) # for plotting
loss_obj = float(loss_opt_area) # for plotting
return losses
return {"obj": losses}


def eval_loss_fun(output_dict: Dict[str, paddle.Tensor], *args) -> paddle.Tensor:
Expand All @@ -314,7 +314,7 @@ def eval_loss_fun(output_dict: Dict[str, paddle.Tensor], *args) -> paddle.Tensor
j = e_re**2 + e_im**2 - f1 * f2
losses = paddle.mean(j**2)

return losses
return {"eval": losses}


def eval_metric_fun(
Expand Down
4 changes: 2 additions & 2 deletions examples/neuraloperator/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def __call__(
):
x = output_dict["y"]
y = label_dict["y"]
return self.rel(x, y)
return {"y": self.rel(x, y)}


class H1Loss(object):
Expand Down Expand Up @@ -404,4 +404,4 @@ def __call__(
x = output_dict["y"]
y = label_dict["y"]

return self.rel(x, y, h=h)
return {"y": self.rel(x, y, h=h)}
4 changes: 2 additions & 2 deletions examples/phycrnet/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,11 +151,11 @@ def train_loss_func(result_dict, *args) -> paddle.Tensor:
Returns:
paddle.Tensor: Loss value.
"""
return result_dict["loss"]
return {"residual": result_dict["loss"]}


def val_loss_func(result_dict, *args) -> paddle.Tensor:
return result_dict["loss"]
return {"residual": result_dict["loss"]}


def metric_expr(output_dict, *args) -> Dict[str, paddle.Tensor]:
Expand Down
4 changes: 3 additions & 1 deletion examples/phygeonet/heat_equation.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ def train(cfg: DictConfig):
"iters_per_epoch": iters_per_epoch,
"num_workers": 0,
},
ppsci.loss.FunctionalLoss(lambda out, label, weight: out["residual"]),
ppsci.loss.FunctionalLoss(
lambda out, label, weight: {"residual": out["residual"]}
),
name="residual",
)
sup_constraint = {sup_constraint_res.name: sup_constraint_res}
Expand Down
4 changes: 3 additions & 1 deletion examples/phygeonet/heat_equation_with_bc.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ def train(cfg: DictConfig):
"iters_per_epoch": iters_per_epoch,
"num_workers": 0,
},
ppsci.loss.FunctionalLoss(lambda out, label, weight: out["residual"]),
ppsci.loss.FunctionalLoss(
lambda out, label, weight: {"residual": out["residual"]}
),
name="residual",
)
sup_constraint = {sup_constraint_res.name: sup_constraint_res}
Expand Down
4 changes: 2 additions & 2 deletions examples/phylstm/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def train_loss_func2(result_dict, *args) -> paddle.Tensor:
# total loss
loss = loss_u + loss_udot + loss_ut_c + loss_e
loss = paddle.square(loss)
return loss
return {"loss2": loss}


def train_loss_func3(result_dict, *args) -> paddle.Tensor:
Expand Down Expand Up @@ -106,7 +106,7 @@ def train_loss_func3(result_dict, *args) -> paddle.Tensor:

loss = loss_u + loss_udot + loss_ut_c + loss_gt_c + loss_e
loss = paddle.square(loss)
return loss
return {"loss3": loss}


class Dataset:
Expand Down
4 changes: 2 additions & 2 deletions examples/tempoGAN/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def loss_func_gen(self, output_dict: Dict, *args) -> paddle.Tensor:
)
losses += loss_layer * self.weight_gen_layer[0]

return losses
return {"output_gen": losses}

def loss_func_gen_tempo(self, output_dict: Dict, *args) -> paddle.Tensor:
"""Calculate loss of generator when use temporal discriminator.
Expand All @@ -342,7 +342,7 @@ def loss_func_gen_tempo(self, output_dict: Dict, *args) -> paddle.Tensor:
out_disc_tempo_from_gen, label_t_ones, reduction="mean"
)
losses = loss_gen_t * self.weight_gen[2]
return losses
return {"out_disc_tempo_from_gen": losses}


class DiscFuncs:
Expand Down
2 changes: 1 addition & 1 deletion examples/topopt/topopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def loss_expr(output_dict, label_dict, weight_dict=None):
nn.functional.log_loss(label_pred, label_true, epsilon=1.0e-7)
)
vol_loss = paddle.square(paddle.mean(label_true - label_pred))
return conf_loss + cfg.vol_coeff * vol_loss
return {"output": conf_loss + cfg.vol_coeff * vol_loss}

return loss_expr

Expand Down
2 changes: 1 addition & 1 deletion examples/xpinn/xpinn.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def residual_func(output_der: paddle.Tensor, input: paddle.Tensor) -> paddle.Ten
residual_func=residual_func,
)

return loss1 + loss2 + loss3
return {"residuals": loss1 + loss2 + loss3}


def eval_l2_rel_func(
Expand Down
3 changes: 0 additions & 3 deletions ppsci/constraint/boundary_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,6 @@ def __init__(
self.output_expr = {
k: v for k, v in output_expr.items() if k in self.output_keys
}
# "area" will be kept in "output_dict" for computation.
if isinstance(geom, geometry.Mesh):
self.output_keys += ("area",)

if isinstance(criteria, str):
criteria = eval(criteria)
Expand Down
3 changes: 0 additions & 3 deletions ppsci/constraint/initial_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,6 @@ def __init__(
self.output_expr = {
k: v for k, v in output_expr.items() if k in self.output_keys
}
# "area" will be kept in "output_dict" for computation.
if isinstance(geom.geometry, geometry.Mesh):
self.output_keys += ("area",)

if isinstance(criteria, str):
criteria = eval(criteria)
Expand Down
3 changes: 0 additions & 3 deletions ppsci/constraint/integral_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,6 @@ def __init__(
self.output_expr = {
k: v for k, v in output_expr.items() if k in self.output_keys
}
# "area" will be kept in "output_dict" for computation.
if isinstance(geom, geometry.Mesh):
self.output_keys += ("area",)

if isinstance(criteria, str):
criteria = eval(criteria)
Expand Down
3 changes: 0 additions & 3 deletions ppsci/constraint/interior_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,6 @@ def __init__(
self.output_expr = {
k: v for k, v in output_expr.items() if k in self.output_keys
}
# "area" will be kept in "output_dict" for computation.
if isinstance(geom, geometry.Mesh):
self.output_keys += ("area",)

if isinstance(criteria, str):
criteria = eval(criteria)
Expand Down
3 changes: 0 additions & 3 deletions ppsci/constraint/periodic_constraint.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,6 @@ def __init__(
self.output_expr = {
k: v for k, v in output_expr.items() if k in self.output_keys
}
# "area" will be kept in "output_dict" for computation.
if isinstance(geom, geometry.Mesh):
self.output_keys += ("area",)

if isinstance(criteria, str):
criteria = eval(criteria)
Expand Down
Loading

0 comments on commit bf17f68

Please sign in to comment.