1
1
import numpy as np
2
- from MLlib .activations import Sigmoid , Relu
3
- from MLlib .activations import unit_step ,TanH ,LeakyRelu ,Elu
2
+ from MLlib .activations import Sigmoid , Relu , Softsign , Swish
3
+ from MLlib .activations import unit_step , TanH , LeakyRelu , Elu , Softmax
4
4
5
5
6
6
def test_Sigmoid ():
@@ -42,7 +42,7 @@ def test_unit_step():
42
42
43
43
44
44
def test_TanH ():
45
- X = np .array ([[1 , - 2 , 3 ], [- 1 , 2 , 1 ],[0 , - 5 , 6 ]])
45
+ X = np .array ([[1 , - 2 , 3 ], [- 1 , 2 , 1 ], [0 , - 5 , 6 ]])
46
46
if np .array_equal (
47
47
np .tanh (X ),
48
48
TanH .activation (X )
@@ -56,26 +56,74 @@ def test_TanH():
56
56
57
57
58
58
def test_LeakyRelu (alpha ):
59
- X = np .array ([[1 , - 2 , 3 ], [- 1 , 2 , 1 ],[0 , - 5 , 6 ]])
59
+ X = np .array ([[1 , - 2 , 3 ], [- 1 , 2 , 1 ], [0 , - 5 , 6 ]])
60
60
if np .array_equal (
61
- np .maximum (alpha * X ,X ),
62
- LeakyRelu .activation (X ,alpha )
61
+ np .maximum (alpha * X , X ),
62
+ LeakyRelu .activation (X , alpha )
63
63
) is not True :
64
64
raise AssertionError
65
65
dx = np .greater (X , 0 ).astype (float )
66
- dx [X < 0 ] = - alpha
66
+ dx [X < 0 ] = - alpha
67
67
if np .array_equal (
68
68
dx ,
69
- LeakyRelu .derivative (X ,alpha )
69
+ LeakyRelu .derivative (X , alpha )
70
70
) is not True :
71
71
raise AssertionError
72
72
73
- test_LeakyRelu ( 0.01 )
73
+
74
74
def test_Elu (alpha ):
75
- X = np .array ([[1 , - 2 , 3 ], [- 1 , 2 , 1 ],[0 , - 5 , 6 ]])
75
+ X = np .array ([[1 , - 2 , 3 ], [- 1 , 2 , 1 ], [0 , - 5 , 6 ]])
76
+ if np .array_equal (
77
+ np .maximum (X , 0 )+ np .minimum (0 , alpha * (np .exp (X ) - 1 )),
78
+ Elu .activation (X , alpha )
79
+ ) is not True :
80
+ raise AssertionError
81
+
82
+
83
+ def test_Softmax ():
84
+ X = np .array ([1.3 , 5.1 , 2.2 , 0.7 , 1.1 ])
85
+ x_vector = X .reshape (X .shape [0 ], 1 )
86
+ if np .array_equal (
87
+ np .exp (X )/ np .sum (np .exp (X )),
88
+ Softmax .activation (X )
89
+ ) is not True :
90
+ raise AssertionError
91
+ x_vector = X .reshape (X .shape [0 ], 1 )
92
+ x_matrix = np .tile (x_vector , X .shape [0 ])
93
+ x_der = np .diag (X ) - (x_matrix * np .transpose (x_matrix ))
94
+ if np .array_equal (
95
+ x_der ,
96
+ Softmax .derivative (X )
97
+ ) is not True :
98
+ raise AssertionError
99
+
100
+
101
+ def test_Softsign ():
102
+ X = np .array ([1.3 , 5.1 , 2.2 , 0.7 , 1.1 ])
103
+ if np .array_equal (
104
+ X / (np .abs (X ) + 1 ),
105
+ Softsign .activation (X )
106
+ ) is not True :
107
+ raise AssertionError
108
+ if np .array_equal (
109
+ 1 / (np .abs (X ) + 1 )** 2 ,
110
+ Softsign .derivative (X )
111
+ ) is not True :
112
+ raise AssertionError
113
+
114
+
115
+ def test_Swish (alpha ):
116
+ X = np .array ([1.3 , 5.1 , 2.2 , 0.7 , 1.1 ])
117
+ if np .array_equal (
118
+ X / (1 + np .exp (- (alpha * X ))),
119
+ Swish .activation (X )
120
+ ) is not True :
121
+ raise AssertionError
122
+ s = 1 / (1 + np .exp (- X ))
123
+ f = X / (1 + np .exp (- (alpha * X )))
124
+ df = f + (s * (1 - f ))
76
125
if np .array_equal (
77
- np . maximum ( X , 0 ) + np . minimum ( 0 , alpha * ( np . exp ( X ) - 1 )) ,
78
- Elu . activation ( X , alpha )
126
+ df ,
127
+ Swish . derivative ( X )
79
128
) is not True :
80
129
raise AssertionError
81
- test_Elu (1 )
0 commit comments