-
Notifications
You must be signed in to change notification settings - Fork 17
/
Copy pathugm_bbvi.py
50 lines (40 loc) · 1.22 KB
/
ugm_bbvi.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
# -*- coding: UTF-8 -*-
"""
Black Box Variational Inference
process to approximate an univariate gaussian
[IN PROCESS]: Convergence problems
"""
import edward as ed
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from edward.models import Gamma, Normal
N = 1000
# Data generation
xn = np.random.normal(7, 1, [N])
plt.plot(xn, 'go')
plt.title('Simulated dataset')
plt.show()
print('mu=7')
print('sigma=1')
# Priors definition
m = tf.constant([0.])
beta = tf.constant([0.0001])
a = tf.constant([0.001])
b = tf.constant([0.001])
# Posterior inference
# Probabilistic model
mu = Normal(loc=m, scale=beta)
sigma = Gamma(a, b)
x = Normal(loc=tf.tile(mu, [N]), scale=tf.tile(sigma, [N]))
# Variational model
qmu = Normal(loc=tf.Variable(tf.random_normal([1])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([1]))))
qsigma = Gamma(tf.nn.softplus(tf.Variable(tf.random_normal([1]))),
tf.nn.softplus(tf.Variable(tf.random_normal([1]))))
# Inference
inference = ed.KLqp({mu: qmu, sigma: qsigma}, data={x: xn})
inference.run(n_iter=1500, n_samples=30)
sess = ed.get_session()
print('Inferred mu={}'.format(sess.run(qmu.mean())))
print('Inferred sigma={}'.format(sess.run(1/qsigma.mean())))