Skip to content
Snippets Groups Projects
Commit a9c1c595 authored by Falguni Ghosh's avatar Falguni Ghosh
Browse files

Upload New File

parent f81d62b9
No related branches found
No related tags found
No related merge requests found
import numpy as np
from numpy import linalg as LA
class L2_Regularizer:
def __init__(self, alpha):
self.regularization_weight = alpha
self.norm_enhanced_loss = None
self.alpha_weight = None
def calculate_gradient(self, weights):
self.alpha_weight = self.regularization_weight * weights
return self.alpha_weight
def norm(self, weights):
sqr_val = np.square(weights)
sum_tot = np.sum(sqr_val)
self.norm_enhanced_loss = self.regularization_weight* sum_tot
return self.norm_enhanced_loss
class L1_Regularizer:
def __init__(self, alpha):
self.regularization_weight = alpha
self.norm_enhanced_loss = None
self.alpha_weight = None
def calculate_gradient(self, weights):
self.alpha_weight = self.regularization_weight * np.sign(weights)
return self.alpha_weight
def norm(self, weights):
abs_val = np.absolute(weights)
sum_tot= np.sum(abs_val)
weight_matrix_norm = sum_tot
self.norm_enhanced_loss = self.regularization_weight * weight_matrix_norm
return self.norm_enhanced_loss
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment