-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathlayers.py
66 lines (56 loc) · 2.24 KB
/
layers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 22 21:59:52 2019
@author: Deep
"""
import numpy as np
from keras.layers import *
from keras.activations import softmax
from keras.models import Model
from keras.optimizers import Nadam, Adam
from keras.regularizers import l2
import keras.backend as K
def create_pretrained_embedding(pretrained_weights_path, trainable=False, **kwargs):
"Create embedding layer from a pretrained weights array"
pretrained_weights = np.load(pretrained_weights_path)
in_dim, out_dim = pretrained_weights.shape
embedding = Embedding(in_dim, out_dim, weights=[pretrained_weights], trainable=False, **kwargs)
return embedding
"""
双向LSTM 获取Char embedding
"""
def create_char_embedding(charsize, maxlen, max_char_len, char_embedding_dim):
char_embedding = Embedding(input_dim=charsize, output_dim=char_embedding_dim, \
embeddings_initializer='lecun_uniform', \
mask_zero=False)
# char_embedding = Bidirectional(GRU(16, return_sequences=True), merge_mode="concat")
return char_embedding
def soft_attention_alignment(input_1, input_2):
"Align text representation with neural soft attention"
attention = Dot(axes=-1)([input_1, input_2])
w_att_1 = Lambda(lambda x: softmax(x, axis=1))(attention)
w_att_2 = Permute((2,1))(Lambda(lambda x: softmax(x, axis=2))(attention))
in1_aligned = Dot(axes=1)([w_att_1, input_1])
in2_aligned = Dot(axes=1)([w_att_2, input_2])
return in1_aligned, in2_aligned
def submult(input_1, input_2):
"Get multiplication and subtraction then concatenate results"
mult = Multiply()([input_1, input_2])
sub = substract(input_1, input_2)
out_= Concatenate()([sub, mult])
return out_
def substract(input_1, input_2):
"Substract element-wise"
neg_input_2 = Lambda(lambda x: -x)(input_2)
out_ = Add()([input_1, neg_input_2])
return out_
def apply_multiple(input_, layers):
"Apply layers to input then concatenate result"
if not len(layers) > 1:
raise ValueError('Layers list should contain more than 1 layer')
else:
agg_ = []
for layer in layers:
agg_.append(layer(input_))
out_ = Concatenate()(agg_)
return out_