Skip to content

Commit 0ea319e

Browse files
authored
Merge pull request #65 from majianjia/dev
Add version, KLD lenght
2 parents 4a63e36 + e5e513d commit 0ea319e

File tree

6 files changed

+39
-5
lines changed

6 files changed

+39
-5
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,4 @@ __pycache__
1010
*.h5
1111
*.obj
1212
*.sconsign.dblite
13-
.ipynb_checkpoints
13+
.ipynb_checkpoints

docs/index.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55

66
NNoM is a high-level inference Neural Network library specifically for microcontrollers.
77

8+
Document version 0.2.1
9+
810
[[Chinese Intro]](rt-thread_guide.md)
911

1012
**Highlights**

examples/auto_test/.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,6 @@ Debug
1111
*.sconsign.dblite
1212
.ipynb_checkpoints
1313
weights.h
14-
result.csv
14+
result.csv
15+
test_*
16+
_cifar.py

inc/nnom.h

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,12 @@
2525
#define q15_t int16_t
2626
#define q31_t int32_t
2727

28+
/* version */
29+
#define NNOM_MAJORVERSION 0L /**< major version number */
30+
#define NNOM_SUBVERSION 2L /**< minor version number */
31+
#define NNOM_REVISION 1L /**< revise version number */
32+
#define NNOM_VERSION (NNOM_MAJORVERSION * 10000) + (NNOM_SUBVERSION * 100) + NNOM_REVISION)
33+
2834
typedef enum
2935
{
3036
NN_SUCCESS = 0, /**< No error */

scripts/nnom_utils.py

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -350,7 +350,7 @@ def layers_output_ranges(model, x_test, kld=True, calibrate_size=1000):
350350

351351
# saturation shift, using KLD method
352352
# Ref: http://on-demand.gputechconf.com/gtc/2017/presentation/s7310-8-bit-inference-with-tensorrt.pdf
353-
if(kld and not is_shift_fixed(layer) and "input" not in layer.name): # test, also do not use kld in input layer
353+
if(kld and not is_shift_fixed(layer) and "input" not in layer.name and "dense" not in layer.name): # test, also do not use kld in input layer
354354
import scipy.stats
355355
abs_max = max(abs(max_val), abs(min_val))
356356
small_var = 1e-5
@@ -359,7 +359,7 @@ def layers_output_ranges(model, x_test, kld=True, calibrate_size=1000):
359359
flat_hist = np.histogram(features.flatten(), bins=bins)[0]
360360
kl_loss = []
361361
kl_shifts = []
362-
for shift in range(8):
362+
for shift in range(4):
363363
t = 2 ** (dec_bits + shift) # 2-based threshold
364364
act = np.round(features.flatten() * t)
365365
act = act / t
@@ -701,6 +701,29 @@ def is_skipable_layer(layer):
701701
fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0]))
702702
else:
703703
raise Exception('unsupported layer', layer.name, layer)
704+
705+
"""
706+
# temporary fixed for activations attached into layers in construction
707+
def is_activation_attached(layer):
708+
if(("Softmax" in layer.output.name and "softmax" not in layer.name)or
709+
("Relu" in layer.output.name and "re_lu" not in layer.name) or
710+
("Sigmoid" in layer.output.name and "sigmoid" not in layer.name) or
711+
("Tanh" in layer.output.name and "tanh" not in layer.name)):
712+
return True
713+
return False
714+
if "input" not in layer.name and is_activation_attached(layer):
715+
inp = layer.output.name.replace(':', '/').split('/')[0]
716+
cfg = layer.get_config()
717+
if(cfg['activation'] == 'relu'):
718+
fp.write('\tlayer[%s] = model.active(act_relu(), layer[%s]);\n'%(id, LI[inp][0]))
719+
if(cfg['activation'] == 'tanh'):
720+
fp.write('\tlayer[%s] = model.active(act_tanh(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0]))
721+
if(cfg['activation'] == 'sigmoid'):
722+
fp.write('\tlayer[%s] = model.active(act_sigmoid(%s_OUTPUT_SHIFT), layer[%s]);\n'%(id, inp.upper(), LI[inp][0]))
723+
elif(cfg['activation'] == 'softmax'):
724+
fp.write('\tlayer[%s] = model.hook(Softmax(), layer[%s]);\n'%(id, LI[inp][0]))
725+
"""
726+
704727
# FIXME, test later.
705728
if('softmax' in layer.name
706729
or ('activation' in layer.name and layer.get_config()['activation'] == 'softmax')):

src/nnom.c

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -868,7 +868,8 @@ nnom_status_t model_compile(nnom_model_t *m, nnom_layer_t *input, nnom_layer_t *
868868
if (output == NULL)
869869
m->tail = find_last(input);
870870

871-
NNOM_LOG("\nStart compiling model...\n");
871+
NNOM_LOG("\nNNoM version %d.%d.%d\n", NNOM_MAJORVERSION, NNOM_SUBVERSION, NNOM_REVISION);
872+
NNOM_LOG("Start compiling model...\n");
872873
NNOM_LOG("Layer(#) Activation output shape ops(MAC) mem(in, out, buf) mem blk lifetime\n");
873874
NNOM_LOG("-------------------------------------------------------------------------------------------------\n");
874875

0 commit comments

Comments
 (0)