@@ -14,25 +14,25 @@ def __init__(self, kernel, idx_p, Xp, index_dim=-1, name='DiffGenomeKern'):
14
14
self .index_dim = index_dim
15
15
self .kern = SplitKern (kernel ,Xp , index_dim = index_dim )
16
16
super (DEtime , self ).__init__ (input_dim = kernel .input_dim + 1 , active_dims = None , name = name )
17
- self .add_parameter (self .kern )
18
-
17
+ self .link_parameter (self .kern )
18
+
19
19
def K (self , X , X2 = None ):
20
20
assert X2 == None
21
21
K = self .kern .K (X ,X2 )
22
-
22
+
23
23
if self .idx_p <= 0 or self .idx_p > X .shape [0 ]/ 2 :
24
24
return K
25
-
25
+
26
26
slices = index_to_slices (X [:,self .index_dim ])
27
27
idx_start = slices [1 ][0 ].start
28
28
idx_end = idx_start + self .idx_p
29
29
K_c = K [idx_start :idx_end ,idx_start :idx_end ].copy ()
30
30
K [idx_start :idx_end ,:] = K [:self .idx_p ,:]
31
31
K [:,idx_start :idx_end ] = K [:,:self .idx_p ]
32
32
K [idx_start :idx_end ,idx_start :idx_end ] = K_c
33
-
33
+
34
34
return K
35
-
35
+
36
36
def Kdiag (self ,X ):
37
37
Kdiag = self .kern .Kdiag (X )
38
38
@@ -43,19 +43,19 @@ def Kdiag(self,X):
43
43
idx_start = slices [1 ][0 ].start
44
44
idx_end = idx_start + self .idx_p
45
45
Kdiag [idx_start :idx_end ] = Kdiag [:self .idx_p ]
46
-
46
+
47
47
return Kdiag
48
-
48
+
49
49
def update_gradients_full (self ,dL_dK ,X ,X2 = None ):
50
50
assert X2 == None
51
51
if self .idx_p <= 0 or self .idx_p > X .shape [0 ]/ 2 :
52
52
self .kern .update_gradients_full (dL_dK , X )
53
53
return
54
-
54
+
55
55
slices = index_to_slices (X [:,self .index_dim ])
56
56
idx_start = slices [1 ][0 ].start
57
57
idx_end = idx_start + self .idx_p
58
-
58
+
59
59
self .kern .update_gradients_full (dL_dK [idx_start :idx_end ,:], X [:self .idx_p ],X )
60
60
grad_p1 = self .kern .gradient .copy ()
61
61
self .kern .update_gradients_full (dL_dK [:,idx_start :idx_end ], X , X [:self .idx_p ])
@@ -108,7 +108,7 @@ def K(self,X ,X2=None):
108
108
if len (slices )> 1 :
109
109
[target .__setitem__ ((s ,s2 ), self .kern_cross .K (X [s ,:],X2 [s2 ,:])) for s ,s2 in itertools .product (slices [1 ], slices2 [0 ])]
110
110
if len (slices2 )> 1 :
111
- [target .__setitem__ ((s ,s2 ), self .kern_cross .K (X [s ,:],X2 [s2 ,:])) for s ,s2 in itertools .product (slices [0 ], slices2 [1 ])]
111
+ [target .__setitem__ ((s ,s2 ), self .kern_cross .K (X [s ,:],X2 [s2 ,:])) for s ,s2 in itertools .product (slices [0 ], slices2 [1 ])]
112
112
return target
113
113
114
114
def Kdiag (self ,X ):
@@ -125,7 +125,7 @@ def collate_grads(dL, X, X2, cross=False):
125
125
else :
126
126
self .kern .update_gradients_full (dL ,X ,X2 )
127
127
target [:] += self .kern .gradient
128
-
128
+
129
129
if X2 is None :
130
130
assert dL_dK .shape == (X .shape [0 ],X .shape [0 ])
131
131
[[collate_grads (dL_dK [s ,ss ], X [s ], X [ss ]) for s ,ss in itertools .product (slices_i , slices_i )] for slices_i in slices ]
@@ -154,20 +154,20 @@ def __init__(self, kernel, Xp, name='SplitKern_cross'):
154
154
Xp = np .array ([[Xp ]])
155
155
self .Xp = Xp
156
156
super (SplitKern_cross , self ).__init__ (input_dim = kernel .input_dim , active_dims = None , name = name )
157
-
157
+
158
158
def K (self , X , X2 = None ):
159
159
if X2 is None :
160
160
return np .dot (self .kern .K (X ,self .Xp ),self .kern .K (self .Xp ,X ))/ self .kern .K (self .Xp ,self .Xp )
161
161
else :
162
162
return np .dot (self .kern .K (X ,self .Xp ),self .kern .K (self .Xp ,X2 ))/ self .kern .K (self .Xp ,self .Xp )
163
-
163
+
164
164
def Kdiag (self , X ):
165
165
return np .inner (self .kern .K (X ,self .Xp ),self .kern .K (self .Xp ,X ).T )/ self .kern .K (self .Xp ,self .Xp )
166
166
167
167
def update_gradients_full (self , dL_dK , X , X2 = None ):
168
168
if X2 is None :
169
169
X2 = X
170
-
170
+
171
171
k1 = self .kern .K (X ,self .Xp )
172
172
k2 = self .kern .K (self .Xp ,X2 )
173
173
k3 = self .kern .K (self .Xp ,self .Xp )
@@ -181,7 +181,7 @@ def update_gradients_full(self, dL_dK, X, X2=None):
181
181
grad += self .kern .gradient .copy ()
182
182
self .kern .update_gradients_full (np .array ([[dL_dk3 ]]),self .Xp ,self .Xp )
183
183
grad += self .kern .gradient .copy ()
184
-
184
+
185
185
self .kern .gradient = grad
186
186
187
187
def update_gradients_diag (self , dL_dKdiag , X ):
@@ -191,14 +191,14 @@ def update_gradients_diag(self, dL_dKdiag, X):
191
191
dL_dk1 = dL_dKdiag * k2 [0 ]/ k3
192
192
dL_dk2 = dL_dKdiag * k1 [:,0 ]/ k3
193
193
dL_dk3 = - dL_dKdiag * (k1 [:,0 ]* k2 [0 ]).sum ()/ (k3 * k3 )
194
-
194
+
195
195
self .kern .update_gradients_full (dL_dk1 [:,None ],X ,self .Xp )
196
196
grad1 = self .kern .gradient .copy ()
197
197
self .kern .update_gradients_full (dL_dk2 [None ,:],self .Xp ,X )
198
198
grad2 = self .kern .gradient .copy ()
199
199
self .kern .update_gradients_full (np .array ([[dL_dk3 ]]),self .Xp ,self .Xp )
200
200
grad3 = self .kern .gradient .copy ()
201
-
201
+
202
202
self .kern .gradient = grad1 + grad2 + grad3
203
-
203
+
204
204
0 commit comments