From 9ed354e0de4f87f0f3354c571d0ae383671018af Mon Sep 17 00:00:00 2001
From: Falguni Ghosh <falguni.ghosh@fau.de>
Date: Sun, 15 Oct 2023 21:16:02 +0000
Subject: [PATCH] Upload New File

---
 3_RNN/SoftMax.py | 28 ++++++++++++++++++++++++++++
 1 file changed, 28 insertions(+)
 create mode 100644 3_RNN/SoftMax.py

diff --git a/3_RNN/SoftMax.py b/3_RNN/SoftMax.py
new file mode 100644
index 0000000..7e45611
--- /dev/null
+++ b/3_RNN/SoftMax.py
@@ -0,0 +1,28 @@
+import numpy as np
+from .Base import BaseLayer
+
+
+class SoftMax(BaseLayer):
+
+    def __init__(self):
+        super().__init__()
+        self.input_in_backward_path = None
+
+    def forward(self, input_tensor):
+        output = np.ones(input_tensor.shape)
+        # print(input_tensor)
+        for i in range(input_tensor.shape[0]):
+            input_tensor[i] = input_tensor[i] - np.max(input_tensor[i])
+            output[i, :] = np.exp(input_tensor[i]) / sum(np.exp(input_tensor[i]))
+        input_tensor = output
+        self.input_in_backward_path = np.copy(input_tensor)
+        return input_tensor
+
+    def backward(self, error_tensor):
+        new_error_tensor = np.empty((self.input_in_backward_path.shape[0], error_tensor.shape[1]))
+        for i in range(self.input_in_backward_path.shape[0]):
+            new_error_tensor[i, :] = self.input_in_backward_path[i, :] * (
+                        error_tensor[i, :] - sum(error_tensor[i, :] * self.input_in_backward_path[i, :]))
+
+        error_tensor = new_error_tensor
+        return error_tensor
-- 
GitLab