Skip to content

Commit bb1bc50

Browse files
gehbiszumeisppk42Peter Paul Kiefer
authored
to_dict() and from_dict() functionality for Coregionalize Kernel and MixedNoise Likelihood class, appveyor CI resurrected (SheffieldML#951)
This PR adds two main things to GPy: - to- and from-dict functions for the kernels listed belop - a fix for the appveyor CI Please see the squashed commit messages listed below. Authors: @gehbiszumeis @ppk42 respectively Reviewer: @ekalosak --- * new: added to_dict() method to Coregionalize kernel class * new: added to_dict() method to MixedNoise likelihood class * fix: made Y_metadata dict content serializable * fix: typo * added additional needed parameters to to_dict() method for Coregionalize kernel + added _build_from_input dict method * new: added possibility to build MixedNoise likelihood from input_dict * Y_metadata conversion from serializable to np.array when loading from dict * fix: rework Y_metadata part for compatibility with unittests !minor * conda cleanup in appveyors pipeline * conda clean up after conda update * conda clean before conda update * try pinning packages for conda * revert all conda changes * conda clean all (not only packages) * use conda update anaconda * pin conda package * pin conda package * try installing charset-normalizer beforehand * try to get from conda-forge * revert all conda changes * Try to fix the conda update challange. See: https://community.intel.com/t5/Intel-Distribution-for-Python/Conda-update-Conda-fails/td-p/1126174 It is just a try for a different context/(conda version). * Still fixing build error on appveyor I also use a newer miniconda version for greater python versions. * Update appveyor.yml Thinking it over it decided to use miniconda38 for all python versions unless python 3.5. * revert miniconda versioning changes * adjust GPy version in appveyor.yml * 1st attempt bring the appveyor build to life again * SheffieldML#955 fixing ci build on appveyor After bringing the miniconda env to work again, the wrong matplotlib version was used. This commit should fix that. * SheffieldML#955 Fix CI build Freezing numpy and scipy was a bad idea. I freeze matplotlib dependend on the python version only. * add: built_from_dict method for White Kernel Co-authored-by: Peter Paul Kiefer <[email protected]> Co-authored-by: Peter Paul Kiefer <[email protected]>
1 parent 3e19a85 commit bb1bc50

File tree

7 files changed

+84
-12
lines changed

7 files changed

+84
-12
lines changed

GPy/core/gp.py

+8-4
Original file line numberDiff line numberDiff line change
@@ -134,9 +134,10 @@ def to_dict(self, save_data=True):
134134
if self.mean_function is not None:
135135
input_dict["mean_function"] = self.mean_function.to_dict()
136136
input_dict["inference_method"] = self.inference_method.to_dict()
137-
#FIXME: Assumes the Y_metadata is serializable. We should create a Metadata class
137+
# TODO: We should create a Metadata class
138138
if self.Y_metadata is not None:
139-
input_dict["Y_metadata"] = self.Y_metadata
139+
# make Y_metadata serializable
140+
input_dict["Y_metadata"] = {k: self.Y_metadata[k].tolist() for k in self.Y_metadata.keys()}
140141
if self.normalizer is not None:
141142
input_dict["normalizer"] = self.normalizer.to_dict()
142143
return input_dict
@@ -162,9 +163,12 @@ def _format_input_dict(input_dict, data=None):
162163
input_dict["mean_function"] = mean_function
163164
input_dict["inference_method"] = GPy.inference.latent_function_inference.LatentFunctionInference.from_dict(input_dict["inference_method"])
164165

165-
#FIXME: Assumes the Y_metadata is serializable. We should create a Metadata class
166+
# converts Y_metadata from serializable to array. We should create a Metadata class
166167
Y_metadata = input_dict.get("Y_metadata")
167-
input_dict["Y_metadata"] = Y_metadata
168+
if isinstance(Y_metadata, dict):
169+
input_dict["Y_metadata"] = {k: np.array(Y_metadata[k]) for k in Y_metadata.keys()}
170+
else:
171+
input_dict["Y_metadata"] = Y_metadata
168172

169173
normalizer = input_dict.get("normalizer")
170174
if normalizer is not None:

GPy/core/parameterization/variational.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def __init__(self, means=None, variances=None, name='latent space', *a, **kw):
106106
self.link_parameters(self.mean, self.variance)
107107
self.num_data, self.input_dim = self.mean.shape
108108
if self.has_uncertain_inputs():
109-
assert self.variance.shape == self.mean.shape, "need one variance per sample and dimenion"
109+
assert self.variance.shape == self.mean.shape, "need one variance per sample and dimension"
110110

111111
def set_gradients(self, grad):
112112
self.mean.gradient, self.variance.gradient = grad

GPy/kern/src/coregionalize.py

+25
Original file line numberDiff line numberDiff line change
@@ -134,3 +134,28 @@ def gradients_X(self, dL_dK, X, X2=None):
134134

135135
def gradients_X_diag(self, dL_dKdiag, X):
136136
return np.zeros(X.shape)
137+
138+
def to_dict(self):
139+
"""
140+
Convert the object into a json serializable dictionary.
141+
142+
Note: It uses the private method _save_to_input_dict of the parent.
143+
144+
:return dict: json serializable dictionary containing the needed information to instantiate the object
145+
"""
146+
147+
input_dict = super(Coregionalize, self)._save_to_input_dict()
148+
input_dict["class"] = "GPy.kern.Coregionalize"
149+
# W and kappa must be serializable
150+
input_dict["W"] = self.W.values.tolist()
151+
input_dict["kappa"] = self.kappa.values.tolist()
152+
input_dict["output_dim"] = self.output_dim
153+
return input_dict
154+
155+
@staticmethod
156+
def _build_from_input_dict(kernel_class, input_dict):
157+
useGPU = input_dict.pop('useGPU', None)
158+
# W and kappa must be converted back to numpy arrays
159+
input_dict['W'] = np.array(input_dict['W'])
160+
input_dict['kappa'] = np.array(input_dict['kappa'])
161+
return Coregionalize(**input_dict)

GPy/kern/src/static.py

+5
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,11 @@ def to_dict(self):
6868
input_dict = super(White, self)._save_to_input_dict()
6969
input_dict["class"] = "GPy.kern.White"
7070
return input_dict
71+
72+
@staticmethod
73+
def _build_from_input_dict(kernel_class, input_dict):
74+
useGPU = input_dict.pop('useGPU', None)
75+
return White(**input_dict)
7176

7277
def K(self, X, X2=None):
7378
if X2 is None:

GPy/likelihoods/mixed_noise.py

+29
Original file line numberDiff line numberDiff line change
@@ -80,3 +80,32 @@ def samples(self, gp, Y_metadata):
8080
_ysim = np.array([np.random.normal(lik.gp_link.transf(gpj), scale=np.sqrt(lik.variance), size=1) for gpj in gp_filtered.flatten()])
8181
Ysim[flt,:] = _ysim.reshape(n1,N2)
8282
return Ysim
83+
84+
def to_dict(self):
85+
"""
86+
Convert the object into a json serializable dictionary.
87+
88+
Note: It uses the private method _save_to_input_dict of the parent.
89+
90+
:return dict: json serializable dictionary containing the needed information to instantiate the object
91+
"""
92+
93+
# input_dict = super(MixedNoise, self)._save_to_input_dict()
94+
input_dict = {"name": self.name,
95+
"class": "GPy.likelihoods.MixedNoise",
96+
"likelihoods_list": []}
97+
for ii in range(len(self.likelihoods_list)):
98+
input_dict["likelihoods_list"].append(self.likelihoods_list[ii].to_dict())
99+
100+
return input_dict
101+
102+
@staticmethod
103+
def _build_from_input_dict(likelihood_class, input_dict):
104+
import copy
105+
input_dict = copy.deepcopy(input_dict)
106+
# gp_link_dict = input_dict.pop('gp_link_dict')
107+
# import GPy
108+
# gp_link = GPy.likelihoods.link_functions.GPTransformation.from_dict(gp_link_dict)
109+
# input_dict["gp_link"] = gp_link
110+
input_dict['likelihoods_list'] = [Likelihood.from_dict(l) for l in input_dict['likelihoods_list']]
111+
return likelihood_class(**input_dict)

appveyor.yml

+12-6
Original file line numberDiff line numberDiff line change
@@ -3,18 +3,23 @@ environment:
33
secure: 8/ZjXFwtd1S7ixd7PJOpptupKKEDhm2da/q3unabJ00=
44
COVERALLS_REPO_TOKEN:
55
secure: d3Luic/ESkGaWnZrvWZTKrzO+xaVwJWaRCEP0F+K/9DQGPSRZsJ/Du5g3s4XF+tS
6-
gpy_version: 1.9.9
6+
gpy_version: 1.10.0
77
matrix:
88
- PYTHON_VERSION: 3.5
99
MINICONDA: C:\Miniconda35-x64
10+
MPL_VERSION: 3.0.0
1011
- PYTHON_VERSION: 3.6
11-
MINICONDA: C:\Miniconda36-x64
12+
MINICONDA: C:\Miniconda3-x64
13+
MPL_VERSION: 3.3.4
1214
- PYTHON_VERSION: 3.7
13-
MINICONDA: C:\Miniconda36-x64
15+
MINICONDA: C:\Miniconda3-x64
16+
MPL_VERSION: 3.3.4
1417
- PYTHON_VERSION: 3.8
15-
MINICONDA: C:\Miniconda36-x64
18+
MINICONDA: C:\Miniconda3-x64
19+
MPL_VERSION: 3.3.4
1620
- PYTHON_VERSION: 3.9
17-
MINICONDA: C:\Miniconda36-x64
21+
MINICONDA: C:\Miniconda3-x64
22+
MPL_VERSION: 3.3.4
1823

1924
#configuration:
2025
# - Debug
@@ -25,7 +30,8 @@ install:
2530
- conda config --set always_yes yes --set changeps1 no
2631
- conda update -q conda
2732
- conda info -a
28-
- "conda create -q -n build-environment python=%PYTHON_VERSION% numpy scipy matplotlib"
33+
# github issue #955: freeze build version of matplotlib
34+
- "conda create -q -n build-environment python=%PYTHON_VERSION% numpy scipy matplotlib=%MPL_VERSION%"
2935
- activate build-environment
3036
# We need wheel installed to build wheels
3137
- python -m pip install wheel

setup.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -118,8 +118,10 @@ def ismac():
118118
ext_mods = []
119119

120120
install_requirements = ['numpy>=1.7', 'six', 'paramz>=0.9.0', 'cython>=0.29']
121+
matplotlib_version = 'matplotlib==3.3.4'
121122
if sys.version_info < (3, 6):
122123
install_requirements += ['scipy>=1.3.0,<1.5.0']
124+
matplotlib_version = 'matplotlib==3.0.0'
123125
else:
124126
install_requirements += ['scipy>=1.3.0']
125127

@@ -174,7 +176,8 @@ def ismac():
174176
'optional':['mpi4py',
175177
'ipython>=4.0.0',
176178
],
177-
'plotting':['matplotlib >= 3.0',
179+
#matplotlib Version see github issue #955
180+
'plotting':[matplotlib_version,
178181
'plotly >= 1.8.6'],
179182
'notebook':['jupyter_client >= 4.0.6',
180183
'ipywidgets >= 4.0.3',

0 commit comments

Comments
 (0)