semantics/net/loss.py
2024-09-26 17:23:23 -04:00

65 lines
No EOL
2 KiB
Python

import numpy as np
class Loss:
@staticmethod
def mean_squared_error(Y, A):
""" Mean Squared Error (MSE) """
return np.mean((Y - A) ** 2)
@staticmethod
def mean_absolute_error(Y, A):
""" Mean Absolute Error (MAE) """
return np.mean(np.abs(Y - A))
@staticmethod
def huber_loss(Y, A, delta=1.0):
""" Huber Loss """
error = Y - A
is_small_error = np.abs(error) <= delta
squared_loss = 0.5 * error ** 2
linear_loss = delta * (np.abs(error) - 0.5 * delta)
return np.where(is_small_error, squared_loss, linear_loss).mean()
@staticmethod
def binary_cross_entropy_loss(Y, A):
""" Binary Cross-Entropy Loss """
m = Y.shape[1]
return -np.sum(Y * np.log(A + 1e-8) + (1 - Y) * np.log(1 - A + 1e-8)) / m
@staticmethod
def categorical_cross_entropy_loss(Y, A):
""" Categorical Cross-Entropy Loss (for softmax) """
m = Y.shape[1]
return -np.sum(Y * np.log(A + 1e-8)) / m
@staticmethod
def hinge_loss(Y, A):
""" Hinge Loss (used in SVM) """
return np.mean(np.maximum(0, 1 - Y * A))
@staticmethod
def kl_divergence(P, Q):
""" Kullback-Leibler Divergence """
return np.sum(P * np.log(P / (Q + 1e-8)))
@staticmethod
def poisson_loss(Y, A):
""" Poisson Loss """
return np.mean(A - Y * np.log(A + 1e-8))
@staticmethod
def cosine_proximity_loss(Y, A):
""" Cosine Proximity Loss """
dot_product = np.sum(Y * A, axis=0)
norms = np.linalg.norm(Y, axis=0) * np.linalg.norm(A, axis=0)
return -np.mean(dot_product / (norms + 1e-8))
@classmethod
def get_function_name(cls, func):
return func.__name__
@classmethod
def get_all_loss_names(cls):
return [name for name, func in cls.__dict__.items()
if callable(func) and not name.startswith("__") and
not name in ['get_function_name', 'get_all_loss_names']]