class Optimizers: @staticmethod def gradient_descent(params, grads, alpha): """ Performs gradient descent optimization for a multi-layer network. :param params: Dictionary containing the network parameters (W1, b1, W2, b2, etc.) :param grads: Dictionary containing the gradients (dW1, db1, dW2, db2, etc.) :param alpha: Learning rate :return: Updated parameters dictionary """ for key in params: params[key] -= alpha * grads['d' + key] return params