Skip to content

Commit

Permalink
white space and errors caught by pylint
Browse files Browse the repository at this point in the history
  • Loading branch information
lcgraham committed May 17, 2016
1 parent dd9d6c7 commit 27a9e22
Show file tree
Hide file tree
Showing 11 changed files with 296 additions and 170 deletions.
41 changes: 25 additions & 16 deletions bet/calculateP/calculateP.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
* :mod:`~bet.calculateP.prob_emulated` provides a skeleton class and calculates
the probability for a set of emulation points.
* :mod:`~bet.calculateP.calculateP.prob_samples_mc` estimates the probability based on pre-defined volumes.
* :mod:`~bet.calculateP.calculateP.prob_samples_mc` estimates the
probability based on pre-defined volumes.
"""
from bet.Comm import comm, MPI
import numpy as np
import scipy.spatial as spatial
import bet.util as util
import bet.sample as samp

Expand Down Expand Up @@ -50,6 +50,10 @@ def prob_emulated(discretization, globalize=True):
``num_l_emulate`` iid samples :math:`(\lambda_{emulate})`.
This is added to the emulated input sample set object.
.. todo::
@smattis the way this is written globalize does nothing
:param discretization: An object containing the discretization information.
:type class:`bet.sample.discretization`
:param bool globalize: Makes local variables global.
Expand All @@ -59,7 +63,7 @@ def prob_emulated(discretization, globalize=True):
# Check dimensions
discretization.check_nums()
op_num = discretization._output_probability_set.check_num()
emi_num = discretization._emulated_input_sample_set.check_num()
discretization._emulated_input_sample_set.check_num()

# Check for necessary properties
if discretization._io_ptr_local is None:
Expand All @@ -68,21 +72,22 @@ def prob_emulated(discretization, globalize=True):
discretization.set_emulated_ii_ptr(globalize=False)

# Calculate Probabilties
P = np.zeros((discretization._emulated_input_sample_set._values_local.shape[0],))
d_distr_emu_ptr = discretization._io_ptr[discretization._emulated_ii_ptr_local]
P = np.zeros((discretization._emulated_input_sample_set.\
_values_local.shape[0],))
d_distr_emu_ptr = discretization._io_ptr[discretization.\
_emulated_ii_ptr_local]
for i in range(op_num):
if discretization._output_probability_set._probabilities[i] > 0.0:
Itemp = np.equal(d_distr_emu_ptr, i)
Itemp_sum = np.sum(Itemp)
Itemp_sum = comm.allreduce(Itemp_sum, op=MPI.SUM)
if Itemp_sum > 0:
P[Itemp] = discretization._output_probability_set._probabilities[i]/Itemp_sum
P[Itemp] = discretization._output_probability_set.\
_probabilities[i]/Itemp_sum

discretization._emulated_input_sample_set._probabilities_local = P

pass


def prob(discretization):
r"""
Calculates :math:`P_{\Lambda}(\mathcal{V}_{\lambda_{samples}})`, the
Expand All @@ -106,20 +111,23 @@ def prob(discretization):

# Calculate Probabilities
if discretization._input_sample_set._values_local is None:
discretization._input_sample_set.global_to_local()
discretization._input_sample_set.global_to_local()
P_local = np.zeros((len(discretization._io_ptr_local),))
for i in range(op_num):
if discretization._output_probability_set._probabilities[i] > 0.0:
Itemp = np.equal(discretization._io_ptr_local, i)
Itemp_sum = np.sum(discretization._input_sample_set._volumes_local[Itemp])
Itemp_sum = np.sum(discretization._input_sample_set.\
_volumes_local[Itemp])
Itemp_sum = comm.allreduce(Itemp_sum, op=MPI.SUM)
if Itemp_sum > 0:
P_local[Itemp] = discretization._output_probability_set._probabilities[i]*discretization._input_sample_set._volumes_local[Itemp]/Itemp_sum
P_local[Itemp] = discretization._output_probability_set.\
_probabilities[i]*discretization._input_sample_set.\
_volumes_local[Itemp]/Itemp_sum

discretization._input_sample_set._probabilities = util.get_global_values(P_local)
discretization._input_sample_set._probabilities = util.\
get_global_values(P_local)
discretization._input_sample_set._probabilities_local = P_local


def prob_mc(discretization):
r"""
Calculates :math:`P_{\Lambda}(\mathcal{V}_{\lambda_{samples}})`, the
Expand All @@ -136,17 +144,18 @@ def prob_mc(discretization):

# Check Dimensions
num = discretization.check_nums()
op_num = discretization._output_probability_set.check_num()
discretization._output_probability_set.check_num()
if discretization._output_probability_set._values_local is None:
discretization._output_probability_set.global_to_local()
if discretization._emulated_input_sample_set._values_local is None:
discretization._emulated_input_sample_set.global_to_local()

# Calculate Volumes
(_, emulate_ptr) = discretization._input_sample_set.query(discretization._emulated_input_sample_set._values_local)
(_, emulate_ptr) = discretization._input_sample_set.query(discretization.\
_emulated_input_sample_set._values_local)
vol = np.zeros((num,))
for i in range(num):
vol[i] = np.sum(np.equal(emulate_ptr,i))
vol[i] = np.sum(np.equal(emulate_ptr, i))
cvol = np.copy(vol)
comm.Allreduce([vol, MPI.DOUBLE], [cvol, MPI.DOUBLE], op=MPI.SUM)
vol = cvol
Expand Down
82 changes: 47 additions & 35 deletions bet/calculateP/simpleFunP.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
"""
from bet.Comm import comm, MPI
import numpy as np
import scipy.spatial as spatial
import bet.calculateP.voronoiHistogram as vHist
import collections
import bet.util as util
Expand Down Expand Up @@ -51,27 +50,30 @@ def unif_unif(data_set, Q_ref, M=50, bin_ratio=0.2, num_d_emulate=1E6):
:param int num_d_emulate: Number of samples used to emulate using an MC
assumption
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param Q_ref: :math:`Q(`\lambda_{reference})`
:type Q_ref: :class:`~numpy.ndarray` of size (mdim,)
:rtype: :class:`~bet.sample.voronoi_sample_set`
:returns: sample_set object defininng simple function approximation
"""
if isinstance(data_set, samp.sample_set_base):
num = data_set.check_num()
data_set.check_num()
dim = data_set._dim
values = data_set._values
elif isinstance(data_set, samp.discretization):
num = data_set.check_nums()
data_set.check_nums()
dim = data_set._output_sample_set._dim
values = data_set._output_sample_set._values
values = data_set._output_sample_set._values
elif isinstance(data_set, np.ndarray):
num = data_set.shape[0]
data_set.shape[0]
dim = data_set.shape[1]
values = data_set
else:
raise wrong_argument_type("The first argument must be of type bet.sample.sample_set, bet.sample.discretization or np.ndarray")
msg = "The first argument must be of type bet.sample.sample_set, "
msg += "bet.sample.discretization or np.ndarray"
raise wrong_argument_type(msg)

bin_size = (np.max(values, 0) - np.min(values, 0))*bin_ratio

Expand Down Expand Up @@ -159,7 +161,8 @@ def normal_normal(data_set, Q_ref, M, std, num_d_emulate=1E6):
from the given normal distribution.
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param int M: Defines number M samples in D used to define
:math:`\rho_{\mathcal{D},M}` The choice of M is something of an "art" -
play around with it and you can get reasonable results with a
Expand Down Expand Up @@ -252,7 +255,8 @@ def unif_normal(data_set, Q_ref, M, std, num_d_emulate=1E6):
direction.
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param int M: Defines number M samples in D used to define
:math:`\rho_{\mathcal{D},M}` The choice of M is something of an "art" -
play around with it and you can get reasonable results with a
Expand Down Expand Up @@ -331,7 +335,8 @@ def uniform_hyperrectangle_user(data_set, domain, center_pts_per_edge=1):
``len(d_distr_samples) == 3**mdim``.
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param domain: The domain overwhich :math:`\rho_\mathcal{D}` is
uniform.
:type domain: :class:`numpy.ndarray` of shape (2, mdim)
Expand All @@ -344,27 +349,26 @@ def uniform_hyperrectangle_user(data_set, domain, center_pts_per_edge=1):
"""
# make sure the shape of the data and the domain are correct
if isinstance(data_set, samp.sample_set_base):
num = data_set.check_num()
dim = data_set._dim
data_set.check_num()
values = data_set._values
elif isinstance(data_set, samp.discretization):
num = data_set.check_nums()
dim = data_set._output_sample_set._dim
values = data_set._output_sample_set._values
data_set.check_nums()
values = data_set._output_sample_set._values
elif isinstance(data_set, np.ndarray):
num = data_set.shape[0]
dim = data_set.shape[1]
data_set.shape[0]
values = data_set
else:
raise wrong_argument_type("The first argument must be of type bet.sample.sample_set, bet.sample.discretization or np.ndarray")
msg = "The first argument must be of type bet.sample.sample_set, "
msg += "bet.sample.discretization or np.ndarray"
raise wrong_argument_type(msg)

data = values
domain = util.fix_dimensions_data(domain, data.shape[1])
domain_center = np.mean(domain, 0)
domain_lengths = np.max(domain, 0) - np.min(domain, 0)

return uniform_hyperrectangle_binsize(data_set, domain_center, domain_lengths,
center_pts_per_edge)
return uniform_hyperrectangle_binsize(data_set, domain_center,
domain_lengths, center_pts_per_edge)

def uniform_hyperrectangle_binsize(data_set, Q_ref, bin_size,
center_pts_per_edge=1):
Expand All @@ -383,7 +387,8 @@ def uniform_hyperrectangle_binsize(data_set, Q_ref, bin_size,
:param int num_d_emulate: Number of samples used to emulate using an MC
assumption
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param Q_ref: :math:`Q(\lambda_{reference})`
:type Q_ref: :class:`~numpy.ndarray` of size (mdim,)
:param list() center_pts_per_edge: number of center points per edge
Expand All @@ -395,20 +400,21 @@ def uniform_hyperrectangle_binsize(data_set, Q_ref, bin_size,
"""

if isinstance(data_set, samp.sample_set_base):
num = data_set.check_num()
data_set.check_num()
dim = data_set._dim
values = data_set._values
elif isinstance(data_set, samp.discretization):
num = data_set.check_nums()
data_set.check_nums()
dim = data_set._output_sample_set._dim
values = data_set._output_sample_set._values
values = data_set._output_sample_set._values
elif isinstance(data_set, np.ndarray):
num = data_set.shape[0]
data_set.shape[0]
dim = data_set.shape[1]
values = data_set
else:
raise wrong_argument_type("The first argument must be of type bet.sample.sample_set, bet.sample.discretization or np.ndarray")

msg = "The first argument must be of type bet.sample.sample_set, "
msg += "bet.sample.discretization or np.ndarray"
raise wrong_argument_type(msg)
data = values

if not isinstance(center_pts_per_edge, collections.Iterable):
Expand All @@ -431,7 +437,7 @@ def uniform_hyperrectangle_binsize(data_set, Q_ref, bin_size,
(center_pts_per_edge, Q_ref, bin_size, sur_domain)
edges = vHist.edges_regular(center_pts_per_edge, rect_domain, sur_domain)
_, volumes, _ = vHist.histogramdd_volumes(edges, points)
s_set = vHist.simple_fun_uniform(points, volumes, rect_domain)
s_set = vHist.simple_fun_uniform(points, volumes, rect_domain)

if isinstance(data_set, samp.discretization):
data_set._output_probability_set = s_set
Expand All @@ -449,7 +455,8 @@ def uniform_hyperrectangle(data_set, Q_ref, bin_ratio, center_pts_per_edge=1):
``len(d_distr_samples) == 3^mdim``.
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param bin_ratio: The ratio used to determine the width of the
uniform distributiion as ``bin_size = (data_max-data_min)*bin_ratio``
:type bin_ratio: double or list()
Expand All @@ -471,13 +478,15 @@ def uniform_hyperrectangle(data_set, Q_ref, bin_ratio, center_pts_per_edge=1):
elif isinstance(data_set, samp.discretization):
num = data_set.check_nums()
dim = data_set._output_sample_set._dim
values = data_set._output_sample_set._values
values = data_set._output_sample_set._values
elif isinstance(data_set, np.ndarray):
num = data_set.shape[0]
dim = data_set.shape[1]
values = data_set
else:
raise wrong_argument_type("The first argument must be of type bet.sample.sample_set, bet.sample.discretization or np.ndarray")
msg = "The first argument must be of type bet.sample.sample_set, "
msg += "bet.sample.discretization or np.ndarray"
raise wrong_argument_type(msg)
data = values

if not isinstance(bin_ratio, collections.Iterable):
Expand All @@ -498,7 +507,8 @@ def uniform_data(data_set):
distributions over irregularly shaped domains.
:param data_set: Sample set that the probability measure is defined for.
:type data_set: :class:`~bet.sample.discretization` or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:type data_set: :class:`~bet.sample.discretization`
or :class:`~bet.sample.sample_set` or :class:`~numpy.ndarray`
:param list() center_pts_per_edge: number of center points per edge and
additional two points will be added to create the bounding layer
Expand All @@ -513,16 +523,18 @@ def uniform_data(data_set):
elif isinstance(data_set, samp.discretization):
num = data_set.check_nums()
dim = data_set._output_sample_set._dim
values = data_set._output_sample_set._values
values = data_set._output_sample_set._values
s_set = data_set._output_sample_set.copy()
elif isinstance(data_set, np.ndarray):
num = data_set.shape[0]
dim = data_set.shape[1]
values = data_set
s_set = samp.sample_set(dim = dim)
s_set = samp.sample_set(dim=dim)
s_set.set_values(values)
else:
raise wrong_argument_type("The first argument must be of type bet.sample.sample_set, bet.sample.discretization or np.ndarray")
msg = "The first argument must be of type bet.sample.sample_set, "
msg += "bet.sample.discretization or np.ndarray"
raise wrong_argument_type(msg)

s_set.set_probabilities(np.ones((num,), dtype=np.float)/num)

Expand Down
1 change: 0 additions & 1 deletion bet/calculateP/voronoiHistogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"""

import numpy as np
from scipy import spatial
import bet.util as util
import bet.sample as samp

Expand Down
Loading

0 comments on commit 27a9e22

Please sign in to comment.