59 lines
1.5 KiB
Python
59 lines
1.5 KiB
Python
import numpy as np
|
|
|
|
class Activations:
|
|
@staticmethod
|
|
def LeakyReLU(x, alpha=0.01):
|
|
return np.where(x > 0, x, alpha * x)
|
|
|
|
@staticmethod
|
|
def LeakyReLU_deriv(x, alpha=0.01):
|
|
return np.where(x > 0, 1, alpha)
|
|
|
|
@staticmethod
|
|
def InverseLeakyReLU(x, alpha=0.01):
|
|
return np.where(x > 0, x, x / alpha)
|
|
|
|
@staticmethod
|
|
def ReLU(x):
|
|
return np.maximum(0, x)
|
|
|
|
@staticmethod
|
|
def ReLU_deriv(x):
|
|
return np.where(x > 0, 1, 0)
|
|
|
|
@staticmethod
|
|
def InverseReLU(x):
|
|
return np.maximum(0, x) # Note: This is lossy for negative values
|
|
|
|
@staticmethod
|
|
def Sigmoid(x):
|
|
return 1 / (1 + np.exp(-x))
|
|
|
|
@staticmethod
|
|
def Sigmoid_deriv(x):
|
|
s = Activations.Sigmoid(x)
|
|
return s * (1 - s)
|
|
|
|
@staticmethod
|
|
def InverseSigmoid(x):
|
|
return np.log(x / (1 - x))
|
|
|
|
@staticmethod
|
|
def Softmax(x):
|
|
exp_x = np.exp(x - np.max(x, axis=0, keepdims=True))
|
|
return exp_x / np.sum(exp_x, axis=0, keepdims=True)
|
|
|
|
@staticmethod
|
|
def InverseSoftmax(x):
|
|
return np.log(x) - np.max(np.log(x))
|
|
|
|
@classmethod
|
|
def get_function_name(cls, func):
|
|
return func.__name__
|
|
|
|
@classmethod
|
|
def get_all_activation_names(cls):
|
|
return [name for name, func in cls.__dict__.items()
|
|
if callable(func) and not name.startswith("__") and
|
|
not name.endswith("_deriv") and not name.startswith("Inverse") and
|
|
not name in ['get_function_name', 'get_all_activation_names']]
|