From a9c1c59531c22fed8637388266a2a33437492c18 Mon Sep 17 00:00:00 2001
From: Falguni Ghosh <falguni.ghosh@fau.de>
Date: Sun, 15 Oct 2023 21:12:08 +0000
Subject: [PATCH] Upload New File

---
 3_RNN/Constraints.py | 37 +++++++++++++++++++++++++++++++++++++
 1 file changed, 37 insertions(+)
 create mode 100644 3_RNN/Constraints.py

diff --git a/3_RNN/Constraints.py b/3_RNN/Constraints.py
new file mode 100644
index 0000000..662b57a
--- /dev/null
+++ b/3_RNN/Constraints.py
@@ -0,0 +1,37 @@
+import numpy as np
+from numpy import linalg as LA
+
+
+class L2_Regularizer:
+    def __init__(self, alpha):
+        self.regularization_weight = alpha
+        self.norm_enhanced_loss = None
+        self.alpha_weight = None
+
+    def calculate_gradient(self, weights):
+        self.alpha_weight = self.regularization_weight * weights
+        return self.alpha_weight
+
+    def norm(self, weights):
+        sqr_val = np.square(weights)
+        sum_tot = np.sum(sqr_val)
+        self.norm_enhanced_loss = self.regularization_weight* sum_tot
+        return self.norm_enhanced_loss
+
+
+class L1_Regularizer:
+    def __init__(self, alpha):
+        self.regularization_weight = alpha
+        self.norm_enhanced_loss = None
+        self.alpha_weight = None
+
+    def calculate_gradient(self, weights):
+        self.alpha_weight = self.regularization_weight * np.sign(weights)
+        return self.alpha_weight
+
+    def norm(self, weights):
+        abs_val = np.absolute(weights)
+        sum_tot= np.sum(abs_val)
+        weight_matrix_norm = sum_tot
+        self.norm_enhanced_loss = self.regularization_weight * weight_matrix_norm
+        return self.norm_enhanced_loss
-- 
GitLab