Skip to content

Commit

Permalink
Automate choice of posterior approximation in VI algorithms (#775)
Browse files Browse the repository at this point in the history
* automate posterior approx in ed.klqp/klpq classes

* update tests
  • Loading branch information
dustinvtran authored Sep 30, 2017
1 parent c22a6c7 commit c2b61a2
Show file tree
Hide file tree
Showing 4 changed files with 348 additions and 38 deletions.
39 changes: 37 additions & 2 deletions edward/inferences/klpq.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@
from edward.models import RandomVariable
from edward.util import copy, get_descendants

try:
from edward.models import Normal
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))


class KLpq(VariationalInference):
"""Variational inference with the KL divergence
Expand Down Expand Up @@ -41,8 +46,38 @@ class KLpq(VariationalInference):
where $z^{(s)} \sim q(z; \lambda)$ and$\\beta^{(s)}
\sim q(\\beta)$.
"""
def __init__(self, *args, **kwargs):
super(KLpq, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(KLpq, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand Down
272 changes: 256 additions & 16 deletions edward/inferences/klqp.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,38 @@ class KLqp(VariationalInference):
where $z^{(s)} \sim q(z; \lambda)$ and $\\beta^{(s)}
\sim q(\\beta)$.
"""
def __init__(self, *args, **kwargs):
super(KLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(KLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, kl_scaling=None, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand Down Expand Up @@ -135,8 +165,38 @@ class ReparameterizationKLqp(VariationalInference):
This class minimizes the objective using the reparameterization
gradient.
"""
def __init__(self, *args, **kwargs):
super(ReparameterizationKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ReparameterizationKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand All @@ -162,8 +222,38 @@ class ReparameterizationKLKLqp(VariationalInference):
This class minimizes the objective using the reparameterization
gradient and an analytic KL term.
"""
def __init__(self, *args, **kwargs):
super(ReparameterizationKLKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ReparameterizationKLKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, kl_scaling=None, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand Down Expand Up @@ -203,8 +293,38 @@ class ReparameterizationEntropyKLqp(VariationalInference):
This class minimizes the objective using the reparameterization
gradient and an analytic entropy term.
"""
def __init__(self, *args, **kwargs):
super(ReparameterizationEntropyKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ReparameterizationEntropyKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand All @@ -231,8 +351,38 @@ class ScoreKLqp(VariationalInference):
This class minimizes the objective using the score function
gradient.
"""
def __init__(self, *args, **kwargs):
super(ScoreKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ScoreKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand All @@ -258,8 +408,38 @@ class ScoreKLKLqp(VariationalInference):
This class minimizes the objective using the score function gradient
and an analytic KL term.
"""
def __init__(self, *args, **kwargs):
super(ScoreKLKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ScoreKLKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, kl_scaling=None, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand Down Expand Up @@ -299,8 +479,38 @@ class ScoreEntropyKLqp(VariationalInference):
This class minimizes the objective using the score function gradient
and an analytic entropy term.
"""
def __init__(self, *args, **kwargs):
super(ScoreEntropyKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ScoreEntropyKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand Down Expand Up @@ -333,8 +543,38 @@ class ScoreRBKLqp(VariationalInference):
Rao-Blackwellize within a node such as when a node represents
multiple random variables via non-scalar batch shape.
"""
def __init__(self, *args, **kwargs):
super(ScoreRBKLqp, self).__init__(*args, **kwargs)
def __init__(self, latent_vars=None, data=None):
"""Create an inference algorithm.
Args:
latent_vars: list of RandomVariable or
dict of RandomVariable to RandomVariable.
Collection of random variables to perform inference on. If
list, each random variable will be implictly optimized using a
`Normal` random variable that is defined internally with a
free parameter per location and scale and is initialized using
standard normal draws. The random variables to approximate
must be continuous.
"""
if isinstance(latent_vars, list):
with tf.variable_scope(None, default_name="posterior"):
latent_vars_dict = {}
continuous = \
('01', 'nonnegative', 'simplex', 'real', 'multivariate_real')
for z in latent_vars:
if not hasattr(z, 'support') or z.support not in continuous:
raise AttributeError(
"Random variable {} is not continuous or a random "
"variable with supported continuous support.".format(z))
batch_event_shape = z.batch_shape.concatenate(z.event_shape)
loc = tf.Variable(tf.random_normal(batch_event_shape))
scale = tf.nn.softplus(
tf.Variable(tf.random_normal(batch_event_shape)))
latent_vars_dict[z] = Normal(loc=loc, scale=scale)
latent_vars = latent_vars_dict
del latent_vars_dict

super(ScoreRBKLqp, self).__init__(latent_vars, data)

def initialize(self, n_samples=1, *args, **kwargs):
"""Initialize inference algorithm. It initializes hyperparameters
Expand Down
Loading

0 comments on commit c2b61a2

Please sign in to comment.