diff --git a/3_RNN/Dropout.py b/3_RNN/Dropout.py
new file mode 100644
index 0000000000000000000000000000000000000000..d044903d55c6bf2ec958c6f583058962483d5826
--- /dev/null
+++ b/3_RNN/Dropout.py
@@ -0,0 +1,32 @@
+from .Base import BaseLayer
+import numpy as np
+
+
+class Dropout(BaseLayer):
+
+    def __init__(self, probability):
+        super().__init__()
+        self.probability = probability
+        self.input_tensor_shape = None
+        self.output_tensor = None
+        self.dropout_mask = None
+        self.error_tensor = None
+        self.output_error_tensor = None
+
+    def forward(self, input_tensor):
+        if not self.testing_phase:
+            self.input_tensor_shape = np.shape(input_tensor)
+            self.dropout_mask = np.random.uniform(0, 1, self.input_tensor_shape) < self.probability
+            output_tensor = self.dropout_mask * input_tensor * np.float(1 / self.probability)
+            self.output_tensor = output_tensor
+            return self.output_tensor.copy()
+        else:
+            self.output_tensor = input_tensor
+            return self.output_tensor.copy()
+
+    def backward(self, error_tensor):
+        self.error_tensor = np.copy(error_tensor)
+        # output_error_tensor = np.ones(self.input_tensor_shape)
+        self.output_error_tensor = error_tensor * self.dropout_mask * np.float(1 / self.probability)
+        return self.output_error_tensor.copy()
+