Skip to content
Snippets Groups Projects
regularization.py 1.47 KiB
Newer Older
johannes bilk's avatar
johannes bilk committed
import numpy as np
from .layer import Layer
from .module import Module
from .weights import Weights
johannes bilk's avatar
johannes bilk committed


class Regularization(Layer):
    def __init__(self, layers: list | Module, Lambda: float) -> None:
        self.name = self.__class__.__name__
        self.lambda = Lambda
        self.params = []
johannes bilk's avatar
johannes bilk committed

        for layer in layers:
            try:
                params = layer.params()
                for param in params:
                    self.params.append(param)
            except AttributeError:
                # 'params' method not found in the layer, skip updating
                continue
johannes bilk's avatar
johannes bilk committed

    def forward(self, input: np.ndarray) -> np.ndarray:
        return input
johannes bilk's avatar
johannes bilk committed

    @abstractmethod
    def backward(self, gradient: np.ndarray) -> np.ndarray:
        pass
class L1Regularization(Regularization):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
johannes bilk's avatar
johannes bilk committed

    def backward(self, gradient: np.ndarray) -> np.ndarray:
    # Compute regularization gradients and add to existing gradients
    for param in self.params:
        gradient += self.Lambda * np.sign(param.values)
    return gradient


class L2Regularization(Regularization):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def backward(self, gradient: np.ndarray) -> np.ndarray:
        # Compute regularization gradients and add to existing gradients
        for param in self.params:
            gradient += self.Lambda * 2 * param.values
        return gradient