diff --git a/1_DL_base/SoftMax.py b/1_DL_base/SoftMax.py new file mode 100644 index 0000000000000000000000000000000000000000..7b923bc53029e61d33959ef32d2e8a01eb237da3 --- /dev/null +++ b/1_DL_base/SoftMax.py @@ -0,0 +1,28 @@ +import numpy as np +from exercise1_material.src_to_implement.Layers.Base import BaseLayer + + +class SoftMax(BaseLayer): + + def __init__(self): + super().__init__() + self.input_in_backward_path = None + + def forward(self, input_tensor): + output = np.ones(input_tensor.shape) + # print(input_tensor) + for i in range(input_tensor.shape[0]): + input_tensor[i] = input_tensor[i] - np.max(input_tensor[i]) + output[i, :] = np.exp(input_tensor[i]) / sum(np.exp(input_tensor[i])) + input_tensor = output + self.input_in_backward_path = np.copy(input_tensor) + return input_tensor + + def backward(self, error_tensor): + new_error_tensor = np.empty((self.input_in_backward_path.shape[0], error_tensor.shape[1])) + for i in range(self.input_in_backward_path.shape[0]): + new_error_tensor[i, :] = self.input_in_backward_path[i, :] * ( + error_tensor[i, :] - sum(error_tensor[i, :] * self.input_in_backward_path[i, :])) + + error_tensor = new_error_tensor + return error_tensor