@@ -663,6 +663,50 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
663663 return np .sum (kl_loss )
664664
665665
666+ def root_mean_squared_error (y_true , y_pred ):
667+ """
668+ Root Mean Squared Error (RMSE)
669+
670+ Root Mean Squared Error (RMSE) is a standard metric,
671+ it measures the average magnitude of the prediction errors, giving
672+ higher weight to larger errors due to squaring.
673+
674+ RMSE = sqrt( (1/n) * Σ (y_true - y_pred) ^ 2)
675+
676+ Reference: https://en.wikipedia.org/wiki/Root_mean_square_deviation
677+
678+ Parameters:
679+ - y_pred: Predicted Value
680+ - y_true: Actual Value
681+
682+ Returns:
683+ float: The RMSE Loss function between y_pred and y_true
684+
685+ >>> true_labels = np.array([100, 200, 300])
686+ >>> predicted_probs = np.array([110, 190, 310])
687+ >>> root_mean_squared_error(true_labels, predicted_probs)
688+ 3.42
689+
690+ >>> true_labels = [2, 4, 6, 8]
691+ >>> predicted_probs = [3, 5, 7, 10]
692+ >>> root_mean_squared_error(true_labels, predicted_probs)
693+ 1.2247
694+
695+ >>> true_labels = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
696+ >>> predicted_probs = np.array([0.3, 0.8, 0.9, 0.2])
697+ >>> root_mean_squared_error(true_labels, predicted_probs)
698+ Traceback (most recent call last):
699+ ...
700+ ValueError: Input arrays must have the same length.
701+ """
702+ if len (y_true ) != len (y_pred ):
703+ raise ValueError ("Input arrays must have the same length." )
704+ y_true , y_pred = np .array (y_true ), np .array (y_pred )
705+
706+ mse = np .mean ((y_pred - y_true ) ** 2 )
707+ return np .sqrt (mse )
708+
709+
666710if __name__ == "__main__" :
667711 import doctest
668712
0 commit comments