Skip to content
Snippets Groups Projects
regularization.py 1.45 KiB
import numpy as np
from abc import ABC, abstractmethod
from .layer import Layer
#from .module import Module
from .weights import Weights


class Regularization(Layer):
    def __init__(self, layers: list, lambda_: float) -> None:
        self.name = self.__class__.__name__
        self.lambda_ = lambda_
        self.params = []

    def forward(self, input: np.ndarray) -> np.ndarray:
        return input

    @abstractmethod
    def backward(self, gradient: np.ndarray) -> np.ndarray:
        pass

    def addParams(self, module) -> None:
        for layer in module:
            try:
               params = layer.params()
               self.params.extend(params)
            except AttributeError:
                continue


class L1Regularization(Regularization):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def backward(self, gradient: np.ndarray) -> np.ndarray:
        # Compute regularization gradients and add to existing gradients
        for param in self.params:
            gradient += self.lambda_ * np.sign(param.values)
        return gradient


class L2Regularization(Regularization):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

    def backward(self, gradient: np.ndarray) -> np.ndarray:
        # Compute regularization gradients and add to existing gradients
        for param in self.params:
            gradient += self.lambda_ * 2 * param.values
        return gradient