1
- from konverter .utils .model_attributes import BaseLayerInfo , BaseModelInfo , Models , Activations , Layers
1
+ from konverter .utils .model_attributes import BaseLayerInfo , BaseModelInfo , Models , Activations , Layers , \
2
+ LAYERS_NO_ACTIVATION , LAYERS_IGNORED , LAYERS_RECURRENT
2
3
import numpy as np
3
4
4
5
@@ -8,11 +9,6 @@ def __init__(self):
8
9
self .layers = [getattr (Layers , i ) for i in dir (Layers ) if '_' not in i ]
9
10
self .activations = [getattr (Activations , i ) for i in dir (Activations ) if '_' not in i ]
10
11
11
- self .attrs_without_activations = [Layers .Dropout .name , Activations .Linear .name , Layers .BatchNormalization .name ]
12
- self .unused_layers = [Layers .Dropout .name ]
13
- self .recurrent_layers = [Layers .SimpleRNN .name , Layers .GRU .name ]
14
- self .ignored_layers = [Layers .Dropout .name ]
15
-
16
12
def get_class_from_name (self , name , search_in ):
17
13
"""
18
14
:param name: A name of an attribute, ex. keras.layers.Dense, keras.activations.relu
@@ -73,63 +69,78 @@ def get_model_info(self, model):
73
69
74
70
return model_class
75
71
76
- def get_layer_info (self , layer ):
72
+ @staticmethod
73
+ def _get_layer_name (layer ):
77
74
name = getattr (layer , '_keras_api_names_v1' )
78
75
if not len (name ):
79
76
name = getattr (layer , '_keras_api_names' )
77
+ return name
78
+
79
+ def _get_layer_activation (self , layer ):
80
+ if hasattr (layer .activation , '_keras_api_names' ):
81
+ activation = getattr (layer .activation , '_keras_api_names' )
82
+ else : # fixme: TF 2.3 is missing _keras_api_names
83
+ activation = 'keras.activations.' + getattr (layer .activation , '__name__' )
84
+ activation = (activation ,)
85
+
86
+ if len (activation ) == 1 :
87
+ return self .get_class_from_name (activation [0 ], 'activations' )
88
+ else :
89
+ raise Exception ('None or multiple activations?' )
90
+
91
+ def get_layer_info (self , layer , next_layer ):
92
+ # Identify layer
93
+ name = self ._get_layer_name (layer )
80
94
layer_class = self .get_class_from_name (name [0 ], 'layers' ) # assume only one name
81
95
layer_class .info = BaseLayerInfo ()
82
96
if not layer_class :
83
97
layer_class = Layers .Unsupported () # add activation below to raise exception with
84
98
layer_class .name = name
85
99
86
- layer_class .info .is_ignored = layer_class .name in self .ignored_layers
87
-
88
- is_linear = False
89
- if layer_class .name not in self .attrs_without_activations :
90
- if hasattr (layer .activation , '_keras_api_names' ):
91
- activation = getattr (layer .activation , '_keras_api_names' )
92
- else : # fixme: TF 2.3 is missing _keras_api_names
93
- activation = 'keras.activations.' + getattr (layer .activation , '__name__' )
94
- activation = (activation ,) # fixme: expects this as a tuple
95
-
96
- if len (activation ) == 1 :
97
- layer_class .info .activation = self .get_class_from_name (activation [0 ], 'activations' )
98
- if layer_class .info .activation .name not in self .attrs_without_activations :
99
- layer_class .info .has_activation = True
100
- else :
101
- is_linear = True
102
- else :
103
- raise Exception ('None or multiple activations?' )
104
-
105
- if layer_class .info .has_activation :
106
- if layer_class .info .activation .name == 'keras.layers.LeakyReLU' : # set alpha
100
+ layer_class .info .is_ignored = layer_class .name in LAYERS_IGNORED
101
+
102
+ # Handle layer activation
103
+ if layer_class .name not in LAYERS_NO_ACTIVATION :
104
+ layer_class .info .activation = self ._get_layer_activation (layer )
105
+ layer_class .info .has_activation = True
106
+
107
+ # Note: special case for when activation is a separate layer after dense
108
+ if layer_class .name == Layers .Dense .name and layer_class .info .activation .name == Activations .Linear .name :
109
+ if next_layer is not None and self ._get_layer_name (next_layer )[0 ] == Layers .Activation .name :
110
+ layer_class .info .activation = self ._get_layer_activation (next_layer )
111
+
112
+ # Check if layer is supported given ignored status and activation
113
+ if layer_class .info .has_activation and not layer_class .info .is_ignored :
114
+ if layer_class .info .activation .name == Activations .LeakyReLU .name : # set alpha
107
115
layer_class .info .activation .alpha = round (float (layer .activation .alpha ), 5 )
108
116
109
117
# check layer activation against this layer's supported activations
110
118
if layer_class .info .activation .name in self .attr_map (layer_class .supported_activations , 'name' ):
111
119
layer_class .info .supported = True
112
- elif layer_class .info .is_ignored or is_linear : # skip activation check if layer has no activation (eg. dropout or linear)
120
+
121
+ elif layer_class .info .is_ignored :
113
122
layer_class .info .supported = True
114
- elif layer_class .name in self .attrs_without_activations :
123
+
124
+ elif layer_class .name in LAYERS_NO_ACTIVATION : # skip activation check if layer has no activation (eg. dropout)
115
125
layer_class .info .supported = True
116
126
117
127
# if not layer_class.info.supported or (not is_linear and not layer_class.info.has_activation):
118
128
# return layer_class
119
129
if not layer_class .info .supported :
120
130
return layer_class
121
131
132
+ # Parse weights and biases from layer if available
122
133
try :
123
134
wb = layer .get_weights ()
124
135
if len (wb ) == 0 :
125
136
return layer_class
126
137
except :
127
138
return layer_class
128
139
129
- if len (wb ) == 2 :
140
+ if len (wb ) == 2 : # Dense
130
141
layer_class .info .weights = np .array (wb [0 ])
131
142
layer_class .info .biases = np .array (wb [1 ])
132
- elif len (wb ) == 3 and layer_class .name in self . recurrent_layers :
143
+ elif len (wb ) == 3 and layer_class .name in LAYERS_RECURRENT :
133
144
layer_class .info .weights = np .array (wb [:2 ]) # input and recurrent weights
134
145
layer_class .info .biases = np .array (wb [- 1 ])
135
146
layer_class .info .returns_sequences = layer .return_sequences
0 commit comments