Mean Squared Error (MSE) is common for regression tasks.
def mean_squared_error(y_pred, y_true):
# 1. Calculate difference
error = y_pred - y_true
# 2. Square it
squared_error = error ** 2
# 3. Average it (for multiple examples)
mse = np.mean(squared_error)
return mseFor classification (Yes/No), we use Log Loss instead.
def binary_cross_entropy(y_pred, y_true):
# Penalize confident wrong answers heavily
return -(y_true * log(y_pred) + (1 - y_true) * log(1 - y_pred))