From 3835785c11c8453a4ecbe97dc8843a8833b2e307 Mon Sep 17 00:00:00 2001
From: Falguni Ghosh <falguni.ghosh@fau.de>
Date: Sun, 15 Oct 2023 21:13:46 +0000
Subject: [PATCH] Upload New File

---
 3_RNN/NeuralNetwork.py | 74 ++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 74 insertions(+)
 create mode 100644 3_RNN/NeuralNetwork.py

diff --git a/3_RNN/NeuralNetwork.py b/3_RNN/NeuralNetwork.py
new file mode 100644
index 0000000..e208a82
--- /dev/null
+++ b/3_RNN/NeuralNetwork.py
@@ -0,0 +1,74 @@
+import numpy as np
+import copy
+
+
+class NeuralNetwork:
+    input_tensor = None
+    label_tensor = None
+
+    def __init__(self, optimizer, weights_initializer, bias_initializer):
+        self.optimizer = optimizer
+        self.layers = []
+        self.loss = []
+        self.data_layer = None
+        self.loss_layer = None
+        self.weights_initializer = weights_initializer
+        self.bias_initializer = bias_initializer
+        #self._phase = None
+
+    @property
+    def phase(self):
+        if self.layers:
+            return self.layers[0].phase
+
+    @phase.setter
+    def phase(self, p):
+        for i in self.layers:
+            i.testing_phase = p
+
+    def forward(self):
+
+        input_tensor, label_tensor = self.data_layer.next()
+        self.input_tensor = np.copy(input_tensor)
+        self.label_tensor = np.copy(label_tensor)
+
+        norm_loss = 0
+        for i in self.layers:
+            input_tensor = i.forward(input_tensor)
+            if i.trainable and i.optimizer and i.optimizer.regularizer:
+                norm_loss += i.optimizer.regularizer.norm(i.weights)
+
+        return self.loss_layer.forward(input_tensor, label_tensor) + norm_loss
+
+    def backward(self):
+        error_tensor = self.loss_layer.backward(self.label_tensor)
+
+        for i in reversed(self.layers):
+            error_tensor = i.backward(error_tensor)
+
+    def append_layer(self, layer):
+        if layer.trainable:
+            layer.optimizer = copy.deepcopy(self.optimizer)
+            # layer.set_optimizer(copy.deepcopy(self.optimizer))
+            layer.initialize(self.weights_initializer, self.bias_initializer)
+
+        #layer.testing_phase = self._phase
+
+        self.layers.append(layer)
+
+    def train(self, iterations):
+        self.phase = False
+
+        for i in range(iterations):
+            intermed_loss = self.forward()
+
+            self.loss.append(intermed_loss)
+            self.backward()
+
+    def test(self, input_tensor):
+        self.phase = True
+
+        for i in self.layers:
+            input_tensor = i.forward(input_tensor)
+
+        return input_tensor
-- 
GitLab