+class reLULayer:
+ def __init__(self, shape):
+ self.shape = shape
+
+ def forward(self, Z):
+ assert Z.shape == self.shape
+ self.A = np.maximum(Z, 0)
+ return self.A
+
+ def backward(self, upstream_grad, learning_rate=0.1):
+ # couple upstream gradient with local gradient, the result will be sent back to the Linear layer
+ return upstream_grad * np.heaviside(self.A, 1)
+
+