Skip to content

Commit

Permalink
Add LeakyReLU support (#9)
Browse files Browse the repository at this point in the history
* Update lock

* Some updates for LeakyReLU

* Revert "Update lock"

This reverts commit 8c30750.

* support custom alpha values for leakyrelu

* support custom alpha values for leakyrelu

* clean up

* clean up

* clean up, bump version

* bump

* bump

* bump

* bump
  • Loading branch information
sshane authored Sep 24, 2020
1 parent f4f8ad6 commit eb56b55
Show file tree
Hide file tree
Showing 6 changed files with 43 additions and 14 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ The goal of this tool is to provide a quick and easy way to execute Keras models
- Works with all supported layers
- Activations:
- ReLU
- LeakyReLU (supports custom alphas)
- Sigmoid
- Softmax
- Tanh
Expand Down Expand Up @@ -84,7 +85,7 @@ predict([np.random.rand(3).astype(np.float32)])

## Dependencies
Thanks to [@apiad](https://github.com/apiad) you can now use [Poetry](https://github.com/python-poetry/poetry) to install all the needed dependencies for this tool! However the requirements are a pretty short list:
- It seems most versions of TensorFlow that include Keras work perfectly fine. Tested from 1.14 to 2.1.0 using Actions and no issues have occurred. **(Make sure you use implementation 2/v3 with GRU layers if not on TF 2.x)**
- It seems most versions of TensorFlow that include Keras work perfectly fine. Tested from 1.14 to 2.2 using Actions and no issues have occurred. **(Make sure you use implementation 2/v3 with GRU layers if not on TF 2.x)**
- **Important**: You must create your models with tf.keras currently (not keras)
- Python >= 3.6 (for the glorious f-strings!)

Expand Down
13 changes: 10 additions & 3 deletions konverter/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,11 @@ def build_konverted_model(self):
if layer.info.activation.needs_function:
lyr_w_act = f'l{idx} = {layer.info.activation.alias.lower()}(l{idx})'
else: # eg. tanh or relu
lyr_w_act = layer.info.activation.string.lower().format(f'l{idx}')
if layer.info.activation.alpha is None:
lyr_w_act = layer.info.activation.string.lower().format(f'l{idx}')
else: # custom alpha for leakyrelu
lyr_w_act = layer.info.activation.string.lower().format(f'l{idx}', layer.info.activation.alpha)

lyr_w_act = f'l{idx} = {lyr_w_act}'
model_builder['model'].append(lyr_w_act)

Expand Down Expand Up @@ -212,8 +216,11 @@ def load_model(self):
if isinstance(self.input_model, str):
self.input_model = self.input_model.replace('\\', '/')
if os.path.exists(self.input_model):
models = importlib.import_module('tensorflow.keras.models')
self.model = models.load_model(self.input_model)
load_model = importlib.import_module('tensorflow.keras.models').load_model # only import when needed

# FIXME: for some reason tf 2 can't load models with LeakyReLU without custom_objects
custom_leakyrelu = importlib.import_module('tensorflow.keras.layers').LeakyReLU
self.model = load_model(self.input_model, custom_objects={'LeakyReLU': custom_leakyrelu})
else:
raise Exception(error('The supplied model file path doesn\'t exist!', ret=True))
else:
Expand Down
3 changes: 2 additions & 1 deletion konverter/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
import konverter
from konverter.utils.general import success, info, warning, error, COLORS, color_logo, blue_grad

KONVERTER_VERSION = "v0.2.1" # fixme: unify this
KONVERTER_VERSION = "v0.2.2" # fixme: unify this
KONVERTER_LOGO_COLORED = color_logo(KONVERTER_VERSION)


class KonverterCLI:
def __init__(self, args):
self.args = args
Expand Down
7 changes: 7 additions & 0 deletions konverter/utils/konverter_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ def get_class_from_name(self, name, search_in):
for attr_class in attrs:
if name == attr_class.name:
return attr_class() # new instance of class
if search_in == 'activations': # not found
base = Activations.Unsupported()
base.name = name
return base
return False

def in_models(self, name):
Expand Down Expand Up @@ -94,6 +98,9 @@ def get_layer_info(self, layer):
raise Exception('None or multiple activations?')

if layer_class.info.has_activation:
if layer_class.info.activation.name == 'keras.layers.LeakyReLU': # set alpha
layer_class.info.activation.alpha = round(float(layer.activation.alpha), 5)

# check layer activation against this layer's supported activations
if layer_class.info.activation.name in self.attr_map(layer_class.supported_activations, 'name'):
layer_class.info.supported = True
Expand Down
29 changes: 21 additions & 8 deletions konverter/utils/model_attributes.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,45 +21,58 @@ class Activations:
ex. activation in string format
To add new activations, use the code_converter function and add them here!
"""
class _BaseAcivation:

class _BaseActivation:
name = None
alias = None
string = None
alpha = None
needs_function = True

class ReLU(_BaseAcivation):
class ReLU(_BaseActivation):
name = 'keras.activations.relu'
alias = 'relu'
string = 'np.maximum(0, {})'
needs_function = False

class Sigmoid(_BaseAcivation):
class LeakyReLU(_BaseActivation):
name = 'keras.layers.LeakyReLU'
alias = 'LeakyReLU'
string = 'np.where({0} > 0, {0}, {0} * {1})'
alpha = 0.3 # default from tensorflow
needs_function = False

class Sigmoid(_BaseActivation):
name = 'keras.activations.sigmoid'
alias = 'sigmoid'
string = 'def sigmoid(x):\n\treturn 1 / (1 + np.exp(-x))'

class Softmax(_BaseAcivation):
class Softmax(_BaseActivation):
name = 'keras.activations.softmax'
alias = 'softmax'
string = 'def softmax(x):\n\treturn np.exp(x) / np.sum(np.exp(x), axis=0)'

class Tanh(_BaseAcivation):
class Tanh(_BaseActivation):
name = 'keras.activations.tanh'
alias = 'tanh'
string = 'np.tanh({})' # don't define a function if you don't want your string added to file as a function
needs_function = False

class Linear(_BaseAcivation):
class Linear(_BaseActivation):
name = 'keras.activations.linear'
alias = 'linear'

class Unsupported(_BaseActivation): # propogated with act info and returned to Konverter if act is unsupported
pass


class Layers:
"""
The class that contains the supported layers and any information we will need to generate models
ex. function in string format
To add new layers, use the code_converter function and add them here!
"""

class _BaseLayer:
name = None
alias = None
Expand All @@ -70,7 +83,7 @@ class _BaseLayer:
class Dense(_BaseLayer):
name = 'keras.layers.Dense'
alias = 'dense'
supported_activations = [Activations.ReLU, Activations.Sigmoid, Activations.Softmax, Activations.Tanh, Activations.Linear]
supported_activations = [Activations.ReLU, Activations.Sigmoid, Activations.Softmax, Activations.Tanh, Activations.Linear, Activations.LeakyReLU]
string = 'np.dot({}, w[{}]) + b[{}]' # n0 is the previous layer, n1 is weight, n2 is bias

class Dropout(_BaseLayer):
Expand Down Expand Up @@ -128,7 +141,7 @@ class BaseLayerInfo:
weights = None
biases = None

gamma = None
gamma = None # for BN
beta = None
mean = None
std = None
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "keras-konverter"
version = "0.2.1"
version = "0.2.2"
description = "A tool to convert simple Keras models to pure Python + NumPy"
readme = "README.md"
repository = "https://github.com/ShaneSmiskol/Konverter"
Expand Down

0 comments on commit eb56b55

Please sign in to comment.