@@ -43,24 +43,28 @@ function results() result(test_results)
4343 type (neural_network_test_t) neural_network_test
4444
4545 test_results = neural_network_test% run([ &
46- test_description_t(" performing elemental inference with 1 hidden layer" , elemental_infer_with_1_hidden_layer_xor_net) &
47- ,test_description_t(" performing elemental inference with 2 hidden layers" , elemental_infer_with_2_hidden_layer_xor_net) &
46+ test_description_t(" elemental inference with step activation and 1 hidden layer" , elemental_infer_1_hidden_layer_xor_step) &
47+ ,test_description_t(" elemental inference with relu activation and 1 hidden layer" , elemental_infer_1_hidden_layer_xor_relu) &
48+ ,test_description_t(" elemental inference with 2 hidden layers" , elemental_infer_with_2_hidden_layer_xor_net) &
4849 ,test_description_t(" converting a network with 2 hidden layers to and from JSON format" , multi_hidden_layer_net_to_from_json) &
4950 ,test_description_t(" converting a network with varying-width hidden layers to/from JSON" , varying_width_net_to_from_json) &
5051 ,test_description_t(" performing inference with a network with hidden layers of varying width" , infer_with_varying_width_net) &
5152 ,test_description_t(" double-precision inference" , double_precision_inference) &
5253 ])
5354 end function
5455
55- function single_hidden_layer_xor_network () result(neural_network)
56+ function one_hidden_layer_xor_net (activation ) result(neural_network)
57+ character (len=* ), intent (in ) :: activation
5658 type (neural_network_t) neural_network
5759 integer , parameter :: nodes_per_layer(* ) = [2 , 3 , 1 ]
5860 integer , parameter :: max_n = maxval (nodes_per_layer), layers = size (nodes_per_layer)
5961
62+ if (.not. any (activation==[" step" ," relu" ])) error stop " neural_network_test_m(one_hidden_layer_xor_net): step or relu required."
63+
6064 neural_network = neural_network_t( &
61- metadata = [string_t(" XOR" ), string_t(" Damian Rouson" ), string_t(" 2023-07-02" ), string_t(" step " ), string_t(" false" )], &
65+ metadata = [string_t(" XOR" ), string_t(" Damian Rouson" ), string_t(" 2023-07-02" ), string_t(activation ), string_t(" false" )], &
6266 weights = reshape ([real :: [1 ,1 ,0 , 0 ,1 ,1 , 0 ,0 ,0 ], [1 ,0 ,0 , - 2 ,0 ,0 , 1 ,0 ,0 ]], [max_n, max_n, layers-1 ]), &
63- biases = reshape ([[0 .,- 1.99 ,0 .], [0 ., 0 ., 0 .]], [max_n, layers-1 ]), &
67+ biases = reshape ([[0 .,- 1 .,0 .], [0 ., 0 ., 0 .]], [max_n, layers-1 ]), &
6468 nodes = nodes_per_layer &
6569 )
6670 end function
@@ -71,10 +75,10 @@ function multi_layer_xor_network() result(neural_network)
7175 integer , parameter :: max_n = maxval (nodes_per_layer), layers = size (nodes_per_layer)
7276
7377 neural_network = neural_network_t( &
74- metadata = [string_t(" XOR" ), string_t(" Damian Rouson" ), string_t(" 2023-07-02" ), string_t(" step " ), string_t(" false" )], &
78+ metadata = [string_t(" XOR" ), string_t(" Damian Rouson" ), string_t(" 2023-07-02" ), string_t(" relu " ), string_t(" false" )], &
7579 weights = reshape ([real :: [1 ,1 ,0 , 0 ,1 ,1 , 1 ,0 ,0 , 1 ,0 ,0 , 0 ,1 ,0 , 0 ,0 ,1 ], [1 ,0 ,0 , - 2 ,0 ,0 , 1 ,0 ,0 ]], &
7680 [max_n, max_n, layers-1 ]), &
77- biases = reshape ([[0 .,- 1.99 ,0 .], [0 ., 0 ., 0 .], [0 ., 0 ., 0 .]], [max_n, layers-1 ]), &
81+ biases = reshape ([[0 .,- 1 .,0 .], [0 ., 0 ., 0 .], [0 ., 0 ., 0 .]], [max_n, layers-1 ]), &
7882 nodes = nodes_per_layer &
7983 )
8084 end function
@@ -205,11 +209,33 @@ function double_precision_inference() result(test_diagnosis)
205209 test_diagnosis = .all. (inputs% values() .approximates. outputs% values() .within. tolerance)
206210 end function
207211
208- function elemental_infer_with_1_hidden_layer_xor_net () result(test_diagnosis)
212+ function elemental_infer_1_hidden_layer_xor_step () result(test_diagnosis)
213+ type (test_diagnosis_t) test_diagnosis
214+ type (neural_network_t) neural_network
215+
216+ neural_network = one_hidden_layer_xor_net(" step" )
217+
218+ block
219+ type (tensor_t), allocatable :: truth_table(:)
220+ real , parameter :: tolerance = 1.E-08 , false = 0 ., true = 1 .
221+ integer i
222+
223+ associate(array_of_inputs = > [tensor_t([true,true]), tensor_t([true,false]), tensor_t([false,true]), tensor_t([false,false])])
224+ truth_table = neural_network% infer(array_of_inputs)
225+ end associate
226+ test_diagnosis = &
227+ (.all. (truth_table(1 )% values() .approximates. (false) .within. tolerance)) &
228+ .also. (.all. (truth_table(2 )% values() .approximates. ( true) .within. tolerance)) &
229+ .also. (.all. (truth_table(3 )% values() .approximates. ( true) .within. tolerance)) &
230+ .also. (.all. (truth_table(4 )% values() .approximates. (false) .within. tolerance))
231+ end block
232+ end function
233+
234+ function elemental_infer_1_hidden_layer_xor_relu () result(test_diagnosis)
209235 type (test_diagnosis_t) test_diagnosis
210236 type (neural_network_t) neural_network
211237
212- neural_network = single_hidden_layer_xor_network( )
238+ neural_network = one_hidden_layer_xor_net( " relu " )
213239
214240 block
215241 type (tensor_t), allocatable :: truth_table(:)
0 commit comments