From 9f98e04f4f63212e2e80f65aec42fc08ccc0ab8b Mon Sep 17 00:00:00 2001 From: alafage Date: Fri, 21 Feb 2025 14:28:03 +0100 Subject: [PATCH] :book: Fix CalibrationError docstring --- .../classification/calibration_error.py | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/torch_uncertainty/metrics/classification/calibration_error.py b/torch_uncertainty/metrics/classification/calibration_error.py index 6d6d668a..a4ae8096 100644 --- a/torch_uncertainty/metrics/classification/calibration_error.py +++ b/torch_uncertainty/metrics/classification/calibration_error.py @@ -237,39 +237,39 @@ def __new__( # type: ignore[misc] .. math:: - \text{ECE} = \sum_{i=1}^N b_i \lvert p_i - c_i \rvert + \text{ECE} = \sum_{i=1}^N b_i \lvert p_i - c_i \rvert **Maximum Calibration Error (MCE):** .. math:: - \text{MCE} = \max_{i} \lvert p_i - c_i \rvert + \text{MCE} = \max_{i} \lvert p_i - c_i \rvert **Root Mean Square Calibration Error (RMSCE):** .. math:: - \text{RMSCE} = \sqrt{\sum_{i=1}^N b_i (p_i - c_i)^2} + \text{RMSCE} = \sqrt{\sum_{i=1}^N b_i (p_i - c_i)^2} Here: - - :math:`p_i` is the accuracy of bin :math:`i` (fraction of correct predictions). - - :math:`c_i` is the mean predicted confidence in bin :math:`i`. - - :math:`b_i` is the fraction of total samples falling into bin :math:`i`. + - :math:`p_i` is the accuracy of bin :math:`i` (fraction of correct predictions). + - :math:`c_i` is the mean predicted confidence in bin :math:`i`. + - :math:`b_i` is the fraction of total samples falling into bin :math:`i`. Bins are constructed either uniformly in the range :math:`[0, 1]` or adaptively (if `adaptive=True`). Args: - task (str): Specifies the task type, either ``"binary"`` or ``"multiclass"``. - adaptive (bool, optional): Whether to use adaptive binning. Defaults to ``False``. - num_bins (int, optional): Number of bins to divide the probability space. Defaults to ``10``. - norm (str, optional): Specifies the type of norm to use: ``"l1"``, ``"l2"``, or ``"max"``. - Defaults to ``"l1"``. - num_classes (int, optional): Number of classes for ``"multiclass"`` tasks. Required when task - is ``"multiclass"``. - ignore_index (int, optional): Index to ignore during calculations. Defaults to ``None``. - validate_args (bool, optional): Whether to validate input arguments. Defaults to ``True``. - **kwargs: Additional keyword arguments for the metric. + task (str): Specifies the task type, either ``"binary"`` or ``"multiclass"``. + adaptive (bool, optional): Whether to use adaptive binning. Defaults to ``False``. + num_bins (int, optional): Number of bins to divide the probability space. Defaults to ``10``. + norm (str, optional): Specifies the type of norm to use: ``"l1"``, ``"l2"``, or ``"max"``. + Defaults to ``"l1"``. + num_classes (int, optional): Number of classes for ``"multiclass"`` tasks. Required when task + is ``"multiclass"``. + ignore_index (int, optional): Index to ignore during calculations. Defaults to ``None``. + validate_args (bool, optional): Whether to validate input arguments. Defaults to ``True``. + **kwargs: Additional keyword arguments for the metric. Example: @@ -292,20 +292,20 @@ def __new__( # type: ignore[misc] # Output: Calibration Error: 0.199 Note: - - Bins are either uniformly distributed in :math:`[0, 1]` or adaptively sized - (if `adaptive=True`). + Bins are either uniformly distributed in :math:`[0, 1]` or adaptively sized + (if `adaptive=True`). Warning: - If `task="multiclass"`, `num_classes` must be an integer; otherwise, a :class:`TypeError` - is raised. + If `task="multiclass"`, `num_classes` must be an integer; otherwise, a :class:`TypeError` + is raised. References: - [1] `Naeini et al. Obtaining well calibrated probabilities using Bayesian binning. In AAAI, 2015 - `_. + [1] `Naeini et al. Obtaining well calibrated probabilities using Bayesian binning. In AAAI, 2015 + `_. .. seealso:: - - See `CalibrationError `_ - for details. Our version of the metric is a wrapper around the original metric providing a plotting functionality. + See `CalibrationError `_ + for details. Our version of the metric is a wrapper around the original metric providing a plotting functionality. """ if kwargs.get("n_bins") is not None: raise ValueError("`n_bins` does not exist, use `num_bins`.")