Skip to content

Commit a4675ba

Browse files
committedFeb 5, 2018
consider MRD layers in prediction method
1 parent 4ec944f commit a4675ba

File tree

6 files changed

+13
-6
lines changed

6 files changed

+13
-6
lines changed
 

‎deepgp/layers/mrd.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def __init__(self, Y, dim_down, dim_up, likelihood, MLP_dims=None, X=None, X_var
2121
self.scale = 1.
2222

2323
if back_constraint:
24-
from .mlp import MLP
24+
from deepgp.encoder.mlp import MLP
2525
from copy import deepcopy
2626
self.encoder = MLP([dim_down, int((dim_down+dim_up)*2./3.), int((dim_down+dim_up)/3.), dim_up] if MLP_dims is None else [dim_down]+deepcopy(MLP_dims)+[dim_up])
2727
X = self.encoder.predict(Y.mean.values if isinstance(Y, VariationalPosterior) else Y)

‎deepgp/models/model.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ def collect_all_XY(self, root=0):
338338
if self.mpi_comm.rank==root: return XY
339339
else: return None
340340

341-
def predict(self, Xnew, full_cov=False, Y_metadata=None, kern=None):
341+
def predict(self, Xnew, full_cov=False, Y_metadata=None, kern=None, view=0):
342342
"""Make a prediction from the deep Gaussian process model for a given input"""
343343
from GPy.core.parameterization.variational import NormalPosterior
344344

@@ -359,7 +359,11 @@ def predict(self, Xnew, full_cov=False, Y_metadata=None, kern=None):
359359
if var.shape[1]==1:
360360
var = np.tile(var,mean.shape[1])
361361
x = NormalPosterior(mean, var)
362-
return self.layers[0].predict(x)
362+
mrd_flag = hasattr(self.layers[0], 'views')
363+
if mrd_flag:
364+
return self.layers[0].views[view].predict(x)
365+
else:
366+
return self.layers[0].predict(x)
363367

364368
@Model.optimizer_array.setter
365369
def optimizer_array(self, p):

‎examples/example_supervised_learning.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
# Number of inducing points to use
4747
num_inducing = 40
4848
# Whether to use back-constraint for variational posterior
49-
back_constraint = False
49+
back_constraint = True
5050
# Dimensions of the MLP back-constraint if set to true
5151
encoder_dims=[[300],[150]]
5252

‎examples/example_unsupervised_learning.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
output_var = m.layers[i].Y.var() if i==0 else m.layers[i].Y.mean.var()
3838
m.layers[i].Gaussian_noise.variance = output_var*0.01
3939

40-
m.optimize(max_iters=5000, messages=True)
40+
m.optimize(max_iters=500, messages=True)
4141

4242

4343
#--------- Inspection ----------#
@@ -50,4 +50,5 @@
5050
plt.figure(figsize=(5,5))
5151
deepgp.util.visualize_DGP(m, labels, layer=0, dims=[1,2]); plt.title('Layer 0')
5252
plt.figure(figsize=(5,5))
53-
deepgp.util.visualize_DGP(m, labels, layer=1, dims=[0,1]); plt.title('Layer 1')
53+
deepgp.util.visualize_DGP(m, labels, layer=1, dims=[0,1]); plt.title('Layer 1')
54+
plt.show()

‎files.txt

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
/usr/local/lib/python2.7/dist-packages/DGP-1.0-py2.7.egg

‎setup.py

+1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ def read(fname):
2020
keywords = "deep learning",
2121
url = "",
2222
packages = ["deepgp",
23+
"deepgp.encoder",
2324
"deepgp.inference",
2425
"deepgp.layers",
2526
"deepgp.util",

0 commit comments

Comments
 (0)