Skip to content

Commit 71e24b3

Browse files
author
Hiteshi
committed
added tests for activation functions
1 parent 8a96a3b commit 71e24b3

File tree

3 files changed

+63
-15
lines changed

3 files changed

+63
-15
lines changed

MLlib/activations.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ def derivative(X, alpha=0.01):
253253
ndarray(dtype=float,ndim=1)
254254
Outputs array of derivatives.
255255
"""
256-
dx = np.greater(X,0).astype(float)
256+
dx = np.greater(X, 0).astype(float)
257257
dx[X < 0] = -alpha
258258
return dx
259259

MLlib/optimizers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
class GradientDescent():
77
"""
8-
8+
99
A classic gradient descent implementation.
1010
1111
W = W - a * dm

MLlib/tests/test_activations.py

+61-13
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import numpy as np
2-
from MLlib.activations import Sigmoid, Relu
3-
from MLlib.activations import unit_step,TanH,LeakyRelu,Elu
2+
from MLlib.activations import Sigmoid, Relu, Softsign, Swish
3+
from MLlib.activations import unit_step, TanH, LeakyRelu, Elu, Softmax
44

55

66
def test_Sigmoid():
@@ -42,7 +42,7 @@ def test_unit_step():
4242

4343

4444
def test_TanH():
45-
X= np.array([[1, -2, 3], [-1, 2, 1],[0, -5, 6]])
45+
X = np.array([[1, -2, 3], [-1, 2, 1], [0, -5, 6]])
4646
if np.array_equal(
4747
np.tanh(X),
4848
TanH.activation(X)
@@ -56,26 +56,74 @@ def test_TanH():
5656

5757

5858
def test_LeakyRelu(alpha):
59-
X= np.array([[1, -2, 3], [-1, 2, 1],[0, -5, 6]])
59+
X = np.array([[1, -2, 3], [-1, 2, 1], [0, -5, 6]])
6060
if np.array_equal(
61-
np.maximum(alpha*X,X),
62-
LeakyRelu.activation(X,alpha)
61+
np.maximum(alpha*X, X),
62+
LeakyRelu.activation(X, alpha)
6363
) is not True:
6464
raise AssertionError
6565
dx = np.greater(X, 0).astype(float)
66-
dx[X<0]=-alpha
66+
dx[X < 0] = -alpha
6767
if np.array_equal(
6868
dx,
69-
LeakyRelu.derivative(X,alpha)
69+
LeakyRelu.derivative(X, alpha)
7070
) is not True:
7171
raise AssertionError
7272

73-
test_LeakyRelu(0.01)
73+
7474
def test_Elu(alpha):
75-
X= np.array([[1, -2, 3], [-1, 2, 1],[0, -5, 6]])
75+
X = np.array([[1, -2, 3], [-1, 2, 1], [0, -5, 6]])
76+
if np.array_equal(
77+
np.maximum(X, 0)+np.minimum(0, alpha * (np.exp(X) - 1)),
78+
Elu.activation(X, alpha)
79+
) is not True:
80+
raise AssertionError
81+
82+
83+
def test_Softmax():
84+
X = np.array([1.3, 5.1, 2.2, 0.7, 1.1])
85+
x_vector = X.reshape(X.shape[0], 1)
86+
if np.array_equal(
87+
np.exp(X)/np.sum(np.exp(X)),
88+
Softmax.activation(X)
89+
) is not True:
90+
raise AssertionError
91+
x_vector = X.reshape(X.shape[0], 1)
92+
x_matrix = np.tile(x_vector, X.shape[0])
93+
x_der = np.diag(X) - (x_matrix * np.transpose(x_matrix))
94+
if np.array_equal(
95+
x_der,
96+
Softmax.derivative(X)
97+
) is not True:
98+
raise AssertionError
99+
100+
101+
def test_Softsign():
102+
X = np.array([1.3, 5.1, 2.2, 0.7, 1.1])
103+
if np.array_equal(
104+
X / (np.abs(X) + 1),
105+
Softsign.activation(X)
106+
) is not True:
107+
raise AssertionError
108+
if np.array_equal(
109+
1 / (np.abs(X) + 1)**2,
110+
Softsign.derivative(X)
111+
) is not True:
112+
raise AssertionError
113+
114+
115+
def test_Swish(alpha):
116+
X = np.array([1.3, 5.1, 2.2, 0.7, 1.1])
117+
if np.array_equal(
118+
X / (1 + np.exp(-(alpha*X))),
119+
Swish.activation(X)
120+
) is not True:
121+
raise AssertionError
122+
s = 1 / (1 + np.exp(-X))
123+
f = X / (1 + np.exp(-(alpha*X)))
124+
df = f + (s * (1 - f))
76125
if np.array_equal(
77-
np.maximum(X,0)+np.minimum(0, alpha * (np.exp(X) - 1)),
78-
Elu.activation(X,alpha)
126+
df,
127+
Swish.derivative(X)
79128
) is not True:
80129
raise AssertionError
81-
test_Elu(1)

0 commit comments

Comments
 (0)