Skip to content

Commit 76995e1

Browse files
authored
Better support for converted Keras models (#12)
* support activation layers (with dense only) and ignore inputlayer * clean up * test new support * bump version
1 parent dd0cd3f commit 76995e1

File tree

6 files changed

+78
-45
lines changed

6 files changed

+78
-45
lines changed

konverter/__init__.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from konverter.utils.model_attributes import Activations, Layers, watermark
1+
from konverter.utils.model_attributes import Activations, Layers, LAYERS_IGNORED, watermark
22
from konverter.utils.konverter_support import KonverterSupport
33
from konverter.utils.general import success, error, info, warning, COLORS
44
import numpy as np
@@ -45,7 +45,7 @@ def start(self):
4545
self.get_layers()
4646
if self.verbose:
4747
self.print_model_architecture()
48-
self.remove_unused_layers()
48+
self.remove_ignored_layers()
4949
self.parse_output_file()
5050
self.build_konverted_model()
5151

@@ -72,7 +72,7 @@ def build_konverted_model(self):
7272
if layer.name == Layers.Dense.name:
7373
model_line = f'l{idx} = {layer.string.format(prev_output, idx, idx)}'
7474
model_builder['model'].append(model_line)
75-
if layer.info.has_activation:
75+
if layer.info.has_activation and layer.info.activation.name != Activations.Linear.name:
7676
if layer.info.activation.needs_function:
7777
lyr_w_act = f'l{idx} = {layer.info.activation.alias.lower()}(l{idx})'
7878
else: # eg. tanh or relu
@@ -165,8 +165,8 @@ def save_model(self, model_builder):
165165
with open(f'{self.output_file}.py', 'w') as f:
166166
f.write(output.replace('\t', self.indent))
167167

168-
def remove_unused_layers(self):
169-
self.layers = [layer for layer in self.layers if layer.name not in support.unused_layers]
168+
def remove_ignored_layers(self):
169+
self.layers = [layer for layer in self.layers if layer.name not in LAYERS_IGNORED]
170170

171171
def parse_output_file(self):
172172
if self.output_file is None: # user hasn't supplied output file path, use input file name in same dir
@@ -186,7 +186,8 @@ def parse_output_file(self):
186186
def print_model_architecture(self):
187187
success('\nSuccessfully got model architecture! 😄\n')
188188
info('Layers:')
189-
to_print = [[COLORS.BASE.format(74) + f'name: {layer.alias}' + COLORS.ENDC] for layer in self.layers]
189+
ignored_txt = {True: ' (ignored)', False: ''}
190+
to_print = [[COLORS.BASE.format(74) + f'name: {layer.alias}{ignored_txt[layer.info.is_ignored]}' + COLORS.ENDC] for layer in self.layers]
190191
max_len = 0
191192
indentation = ' '
192193
for idx, layer in enumerate(self.layers):
@@ -205,8 +206,12 @@ def print_model_architecture(self):
205206
print(COLORS.ENDC, end='')
206207

207208
def get_layers(self):
208-
for layer in self.model.layers:
209-
layer = support.get_layer_info(layer)
209+
for idx, layer in enumerate(self.model.layers):
210+
next_layer = None
211+
if idx < len(self.model.layers) - 1:
212+
next_layer = self.model.layers[idx + 1]
213+
214+
layer = support.get_layer_info(layer, next_layer)
210215
if layer.info.supported:
211216
self.layers.append(layer)
212217
else:

konverter/__main__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import konverter
44
from konverter.utils.general import success, info, warning, error, COLORS, color_logo, blue_grad
55

6-
KONVERTER_VERSION = "v0.2.4.1" # fixme: unify this
6+
KONVERTER_VERSION = "v0.2.5" # fixme: unify this
77
KONVERTER_LOGO_COLORED = color_logo(KONVERTER_VERSION)
88

99

konverter/utils/konverter_support.py

Lines changed: 43 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
from konverter.utils.model_attributes import BaseLayerInfo, BaseModelInfo, Models, Activations, Layers
1+
from konverter.utils.model_attributes import BaseLayerInfo, BaseModelInfo, Models, Activations, Layers, \
2+
LAYERS_NO_ACTIVATION, LAYERS_IGNORED, LAYERS_RECURRENT
23
import numpy as np
34

45

@@ -8,11 +9,6 @@ def __init__(self):
89
self.layers = [getattr(Layers, i) for i in dir(Layers) if '_' not in i]
910
self.activations = [getattr(Activations, i) for i in dir(Activations) if '_' not in i]
1011

11-
self.attrs_without_activations = [Layers.Dropout.name, Activations.Linear.name, Layers.BatchNormalization.name]
12-
self.unused_layers = [Layers.Dropout.name]
13-
self.recurrent_layers = [Layers.SimpleRNN.name, Layers.GRU.name]
14-
self.ignored_layers = [Layers.Dropout.name]
15-
1612
def get_class_from_name(self, name, search_in):
1713
"""
1814
:param name: A name of an attribute, ex. keras.layers.Dense, keras.activations.relu
@@ -73,63 +69,78 @@ def get_model_info(self, model):
7369

7470
return model_class
7571

76-
def get_layer_info(self, layer):
72+
@staticmethod
73+
def _get_layer_name(layer):
7774
name = getattr(layer, '_keras_api_names_v1')
7875
if not len(name):
7976
name = getattr(layer, '_keras_api_names')
77+
return name
78+
79+
def _get_layer_activation(self, layer):
80+
if hasattr(layer.activation, '_keras_api_names'):
81+
activation = getattr(layer.activation, '_keras_api_names')
82+
else: # fixme: TF 2.3 is missing _keras_api_names
83+
activation = 'keras.activations.' + getattr(layer.activation, '__name__')
84+
activation = (activation,)
85+
86+
if len(activation) == 1:
87+
return self.get_class_from_name(activation[0], 'activations')
88+
else:
89+
raise Exception('None or multiple activations?')
90+
91+
def get_layer_info(self, layer, next_layer):
92+
# Identify layer
93+
name = self._get_layer_name(layer)
8094
layer_class = self.get_class_from_name(name[0], 'layers') # assume only one name
8195
layer_class.info = BaseLayerInfo()
8296
if not layer_class:
8397
layer_class = Layers.Unsupported() # add activation below to raise exception with
8498
layer_class.name = name
8599

86-
layer_class.info.is_ignored = layer_class.name in self.ignored_layers
87-
88-
is_linear = False
89-
if layer_class.name not in self.attrs_without_activations:
90-
if hasattr(layer.activation, '_keras_api_names'):
91-
activation = getattr(layer.activation, '_keras_api_names')
92-
else: # fixme: TF 2.3 is missing _keras_api_names
93-
activation = 'keras.activations.' + getattr(layer.activation, '__name__')
94-
activation = (activation,) # fixme: expects this as a tuple
95-
96-
if len(activation) == 1:
97-
layer_class.info.activation = self.get_class_from_name(activation[0], 'activations')
98-
if layer_class.info.activation.name not in self.attrs_without_activations:
99-
layer_class.info.has_activation = True
100-
else:
101-
is_linear = True
102-
else:
103-
raise Exception('None or multiple activations?')
104-
105-
if layer_class.info.has_activation:
106-
if layer_class.info.activation.name == 'keras.layers.LeakyReLU': # set alpha
100+
layer_class.info.is_ignored = layer_class.name in LAYERS_IGNORED
101+
102+
# Handle layer activation
103+
if layer_class.name not in LAYERS_NO_ACTIVATION:
104+
layer_class.info.activation = self._get_layer_activation(layer)
105+
layer_class.info.has_activation = True
106+
107+
# Note: special case for when activation is a separate layer after dense
108+
if layer_class.name == Layers.Dense.name and layer_class.info.activation.name == Activations.Linear.name:
109+
if next_layer is not None and self._get_layer_name(next_layer)[0] == Layers.Activation.name:
110+
layer_class.info.activation = self._get_layer_activation(next_layer)
111+
112+
# Check if layer is supported given ignored status and activation
113+
if layer_class.info.has_activation and not layer_class.info.is_ignored:
114+
if layer_class.info.activation.name == Activations.LeakyReLU.name: # set alpha
107115
layer_class.info.activation.alpha = round(float(layer.activation.alpha), 5)
108116

109117
# check layer activation against this layer's supported activations
110118
if layer_class.info.activation.name in self.attr_map(layer_class.supported_activations, 'name'):
111119
layer_class.info.supported = True
112-
elif layer_class.info.is_ignored or is_linear: # skip activation check if layer has no activation (eg. dropout or linear)
120+
121+
elif layer_class.info.is_ignored:
113122
layer_class.info.supported = True
114-
elif layer_class.name in self.attrs_without_activations:
123+
124+
elif layer_class.name in LAYERS_NO_ACTIVATION: # skip activation check if layer has no activation (eg. dropout)
115125
layer_class.info.supported = True
116126

117127
# if not layer_class.info.supported or (not is_linear and not layer_class.info.has_activation):
118128
# return layer_class
119129
if not layer_class.info.supported:
120130
return layer_class
121131

132+
# Parse weights and biases from layer if available
122133
try:
123134
wb = layer.get_weights()
124135
if len(wb) == 0:
125136
return layer_class
126137
except:
127138
return layer_class
128139

129-
if len(wb) == 2:
140+
if len(wb) == 2: # Dense
130141
layer_class.info.weights = np.array(wb[0])
131142
layer_class.info.biases = np.array(wb[1])
132-
elif len(wb) == 3 and layer_class.name in self.recurrent_layers:
143+
elif len(wb) == 3 and layer_class.name in LAYERS_RECURRENT:
133144
layer_class.info.weights = np.array(wb[:2]) # input and recurrent weights
134145
layer_class.info.biases = np.array(wb[-1])
135146
layer_class.info.returns_sequences = layer.return_sequences

konverter/utils/model_attributes.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,14 @@ class Dropout(_BaseLayer):
9090
name = 'keras.layers.Dropout'
9191
alias = 'dropout'
9292

93+
class InputLayer(_BaseLayer):
94+
name = 'keras.layers.InputLayer'
95+
alias = 'InputLayer'
96+
97+
class Activation(_BaseLayer):
98+
name = 'keras.layers.Activation'
99+
alias = 'Activation'
100+
93101
class BatchNormalization(_BaseLayer):
94102
name = 'keras.layers.BatchNormalization'
95103
alias = 'batch_norm'
@@ -125,6 +133,12 @@ class Unsupported(_BaseLayer): # propogated with layer info and returned to Kon
125133
pass
126134

127135

136+
# LAYERS_NO_ACTIVATION = [Layers.Dropout.name, Layers.InputLayer.name, Activations.Linear.name, Layers.BatchNormalization.name]
137+
LAYERS_NO_ACTIVATION = [Layers.Dropout.name, Layers.InputLayer.name, Layers.BatchNormalization.name]
138+
LAYERS_IGNORED = [Layers.Dropout.name, Layers.InputLayer.name, Layers.Activation.name]
139+
LAYERS_RECURRENT = [Layers.SimpleRNN.name, Layers.GRU.name]
140+
141+
128142
class BaseModelInfo:
129143
supported = False
130144
input_shape = None # this will need to be moved if we support functional models

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "keras-konverter"
3-
version = "0.2.4.1"
3+
version = "0.2.5"
44
description = "A tool to convert simple Keras models to pure Python + NumPy"
55
readme = "README.md"
66
repository = "https://github.com/ShaneSmiskol/Konverter"

tests/build_test_models.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import numpy as np
2-
from tensorflow.keras.layers import Dense, SimpleRNN, GRU, BatchNormalization
2+
from tensorflow.keras.layers import Dense, SimpleRNN, GRU, BatchNormalization, InputLayer, Activation
33
from tensorflow.keras.models import Sequential
44
from tensorflow.keras.backend import clear_session
55

@@ -14,9 +14,12 @@ def create_model(model_type):
1414
y_train = (np.mean(x_train, axis=1) ** 2) / 2 # half of squared mean of sample
1515

1616
model = Sequential()
17-
model.add(Dense(128, activation='relu', input_shape=x_train.shape[1:]))
17+
model.add(InputLayer(input_shape=x_train.shape[1:]))
18+
model.add(Dense(128))
19+
model.add(Activation(activation='relu'))
1820
model.add(BatchNormalization())
19-
model.add(Dense(64, activation='tanh'))
21+
model.add(Dense(64))
22+
model.add(Activation(activation='tanh'))
2023
model.add(BatchNormalization())
2124
model.add(Dense(32, activation='relu'))
2225
model.add(BatchNormalization())

0 commit comments

Comments
 (0)