Skip to content
Snippets Groups Projects
Commit 7959235c authored by Falguni Ghosh's avatar Falguni Ghosh
Browse files

Upload New File

parent 0fb9ad41
Branches
Tags
No related merge requests found
import numpy as np
from .Base import BaseLayer
class SoftMax(BaseLayer):
def __init__(self):
super().__init__()
self.input_in_backward_path = None
def forward(self, input_tensor):
output = np.ones(input_tensor.shape)
# print(input_tensor)
for i in range(input_tensor.shape[0]):
input_tensor[i] = input_tensor[i] - np.max(input_tensor[i])
output[i, :] = np.exp(input_tensor[i]) / sum(np.exp(input_tensor[i]))
input_tensor = output
self.input_in_backward_path = np.copy(input_tensor)
return input_tensor
def backward(self, error_tensor):
new_error_tensor = np.empty((self.input_in_backward_path.shape[0], error_tensor.shape[1]))
for i in range(self.input_in_backward_path.shape[0]):
new_error_tensor[i, :] = self.input_in_backward_path[i, :] * (
error_tensor[i, :] - sum(error_tensor[i, :] * self.input_in_backward_path[i, :]))
error_tensor = new_error_tensor
return error_tensor
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment