@@ -662,6 +662,40 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
662662 kl_loss = y_true * np .log (y_true / y_pred )
663663 return np .sum (kl_loss )
664664
665+ def root_mean_squared_error (y_true : np .array , y_pred : np .array ) -> float :
666+ """
667+ Root Mean Squared Error (RMSE)
668+
669+ Root Mean Squared Error (RMSE) is a standard metric,
670+ it measures the average magnitude of the prediction errors, giving
671+ higher weight to larger errors due to squaring.
672+
673+ RMSE = sqrt( (1/n) * Σ (y_true - y_pred) ^ 2)
674+
675+ Reference: https://en.wikipedia.org/wiki/Root_mean_square_deviation
676+
677+ Parameters:
678+ y_true: Actual Value
679+ y_pred: Predicted Value
680+ Returns:
681+ float: The RMSE Loss function between y_pred and y_true
682+
683+ >>> true_labels = np.array([100, 200, 300])
684+ >>> predicted_probs = np.array([110, 190, 310])
685+ >>> float(root_mean_squared_error(true_labels, predicted_probs))
686+ 10.0
687+ >>> true_labels = np.array([1.0, 2.0, 3.0, 4.0, 5.0])
688+ >>> predicted_probs = np.array([0.3, 0.8, 0.9, 0.2])
689+ >>> root_mean_squared_error(true_labels, predicted_probs)
690+ Traceback (most recent call last):
691+ ...
692+ ValueError: Input arrays must have the same length.
693+ """
694+ if len (y_true ) != len (y_pred ):
695+ raise ValueError ("Input arrays must have the same length." )
696+
697+ mse = np .mean ((y_pred - y_true ) ** 2 )
698+ return np .sqrt (mse )
665699
666700if __name__ == "__main__" :
667701 import doctest
0 commit comments