Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python3 #70

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions examples/constrained/branin_con.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import print_function
import math
import numpy as np

Expand All @@ -6,7 +7,7 @@ def evaluate(job_id, params):
x = params['X']
y = params['Y']

print 'Evaluating at (%f, %f)' % (x, y)
print('Evaluating at (%f, %f)' % (x, y))

if x < 0 or x > 5.0 or y > 5.0:
return np.nan
Expand All @@ -30,6 +31,6 @@ def main(job_id, params):
try:
return evaluate(job_id, params)
except Exception as ex:
print ex
print 'An error occurred in branin_con.py'
print(ex)
print('An error occurred in branin_con.py')
return np.nan
7 changes: 4 additions & 3 deletions examples/distributed/branin.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import print_function
import numpy as np
import sys
import math
Expand All @@ -13,12 +14,12 @@ def branin(x, y):
#if np.random.rand > 0.75:
# raise Exception('Blah!')

print 'Result = %f' % result
print('Result = %f' % result)
time.sleep(np.random.randint(30))
return {'branin' : result}

# Write a function like this called 'main'
def main(job_id, params):
print 'Anything printed here will end up in the output directory for job #%d' % job_id
print params
print('Anything printed here will end up in the output directory for job #%d' % job_id)
print(params)
return branin(params['x'], params['y'])
7 changes: 4 additions & 3 deletions examples/noisy/branin_noisy.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import print_function
import numpy as np
import math

Expand All @@ -9,12 +10,12 @@ def branin(x, y):
result = float(result)
noise = np.random.normal() * 50.

print 'Result = %f, noise %f, total %f' % (result, noise, result+noise)
print('Result = %f, noise %f, total %f' % (result, noise, result+noise))
#time.sleep(np.random.randint(60))
return result + noise

# Write a function like this called 'main'
def main(job_id, params):
print 'Anything printed here will end up in the output directory for job #%d' % job_id
print params
print('Anything printed here will end up in the output directory for job #%d' % job_id)
print(params)
return branin(params['x'], params['y'])
7 changes: 4 additions & 3 deletions examples/simple/branin.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import print_function
import numpy as np
import math

Expand All @@ -8,12 +9,12 @@ def branin(x, y):

result = float(result)

print 'Result = %f' % result
print('Result = %f' % result)
#time.sleep(np.random.randint(60))
return result

# Write a function like this called 'main'
def main(job_id, params):
print 'Anything printed here will end up in the output directory for job #%d' % job_id
print params
print('Anything printed here will end up in the output directory for job #%d' % job_id)
print(params)
return branin(params['x'], params['y'])
55 changes: 33 additions & 22 deletions examples/simple/make_plots.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,32 @@
from __future__ import print_function
import importlib
import sys
from itertools import izip

if sys.version<'3':
from itertools import izip
else:
izip = zip


import numpy as np
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as axes3d


from spearmint.utils.database.mongodb import MongoDB
from spearmint.utils.fixes import items

from spearmint.main import get_options, parse_resources_from_config, load_jobs, remove_broken_jobs, \
load_task_group, load_hypers

def print_dict(d, level=1):
if isinstance(d, dict):
if level > 1: print ""
for k, v in d.iteritems():
print " " * level, k,
if level > 1: print("")
for k, v in items(d):
print(" " * level, k,)
print_dict(v, level=level+1)
else:
print d
print(d)

def main():
"""
Expand All @@ -32,7 +39,7 @@ def main():
unstandardized)
"""
options, expt_dir = get_options()
print "options:"
print("options:")
print_dict(options)

# reduce the grid size
Expand All @@ -43,7 +50,7 @@ def main():
# Load up the chooser.
chooser_module = importlib.import_module('spearmint.choosers.' + options['chooser'])
chooser = chooser_module.init(options)
print "chooser", chooser
print("chooser", chooser)
experiment_name = options.get("experiment-name", 'unnamed-experiment')

# Connect to the database
Expand All @@ -55,38 +62,42 @@ def main():
jobs = load_jobs(db, experiment_name)
remove_broken_jobs(db, jobs, experiment_name, resources)

print "resources:", resources
print("resources:", resources)
print_dict(resources)
resource = resources.itervalues().next()

if sys.version < '3':
resource = resources.itervalues().next()
else:
resource = list(resources.values())[0] # ugly code

task_options = { task: options["tasks"][task] for task in resource.tasks }
print "task_options:"
print("task_options:")
print_dict(task_options) # {'main': {'likelihood': u'NOISELESS', 'type': 'OBJECTIVE'}}

task_group = load_task_group(db, options, resource.tasks)
print "task_group", task_group # TaskGroup
print "tasks:"
print("task_group", task_group) # TaskGroup
print("tasks:")
print_dict(task_group.tasks) # {'main': <spearmint.tasks.task.Task object at 0x10bf63290>}


hypers = load_hypers(db, experiment_name)
print "loaded hypers", hypers # from GP.to_dict()
print("loaded hypers", hypers) # from GP.to_dict()

hypers = chooser.fit(task_group, hypers, task_options)
print "\nfitted hypers:"
print("\nfitted hypers:")
print_dict(hypers)

lp, x = chooser.best()
x = x.flatten()
print "best", lp, x
print("best", lp, x)
bestp = task_group.paramify(task_group.from_unit(x))
print "expected best position", bestp
print("expected best position", bestp)

# get the grid of points
grid = chooser.grid
# print "chooser objectives:",
# print_dict(chooser.objective)
print "chooser models:", chooser.models
print("chooser models:", chooser.models)
print_dict(chooser.models)
obj_model = chooser.models[chooser.objective['name']]
obj_mean, obj_var = obj_model.function_over_hypers(obj_model.predict, grid)
Expand All @@ -105,10 +116,10 @@ def main():

xymv = [(xy[0], xy[1], m, v) for xy, m, v in izip(grid, obj_mean, obj_std)]# if .2 < xy[0] < .25]

x = map(lambda x:x[0], xymv)
y = map(lambda x:x[1], xymv)
m = map(lambda x:x[2], xymv)
sig = map(lambda x:x[3], xymv)
x = list(map(lambda x:x[0], xymv))
y = list(map(lambda x:x[1], xymv))
m = list(map(lambda x:x[2], xymv))
sig = list(map(lambda x:x[3], xymv))
# print y

fig = plt.figure(dpi=100)
Expand All @@ -124,7 +135,7 @@ def main():
task = task_group.tasks['main']
idata = task.valid_normalized_data_dict
xy = idata["inputs"]
xy = map(task_group.from_unit, xy)
xy = list(map(task_group.from_unit, xy))
xy = np.array(xy)
vals = idata["values"]
vals = [obj_task.unstandardize_mean(obj_task.unstandardize_variance(v)) for v in vals]
Expand Down
7 changes: 6 additions & 1 deletion spearmint/choosers/acquisition_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@
# its Institution.


import sys
import os
import tempfile
import copy
Expand All @@ -191,10 +192,14 @@
import scipy.linalg as spla
import scipy.stats as sps
import scipy.optimize as spo
import cPickle
import multiprocessing
import ast

if sys.version < '3':
import cPickle
else:
import pickle as cPickle

def compute_ei(model, pred, ei_target=None, compute_grad=True):
# TODO: use ei_target
if pred.ndim == 1:
Expand Down
16 changes: 9 additions & 7 deletions spearmint/choosers/default_chooser.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,9 +193,11 @@

from .acquisition_functions import compute_ei
from ..utils.grad_check import check_grad
from ..utils.fixes import items, xrange
from ..grids import sobol_grid
from ..models.abstract_model import function_over_hypers
from .. import models
from functools import reduce

DEFAULT_GRIDSIZE = 20000
DEFAULT_GRIDSEED = 0
Expand Down Expand Up @@ -260,7 +262,7 @@ def fit(self, task_group, hypers=None, options=None):
grid_seed=self.grid_seed)

# A useful hack: add previously visited points to the grid
for task_name, task in task_group.tasks.iteritems():
for task_name, task in items(task_group.tasks):
if task.has_valid_inputs():
self.grid = np.append(self.grid, task.valid_normalized_data_dict['inputs'], axis=0)
if task.has_pending():
Expand All @@ -274,7 +276,7 @@ def fit(self, task_group, hypers=None, options=None):

# print 'Fittings tasks: %s' % str(task_group.tasks.keys())

for task_name, task in task_group.tasks.iteritems():
for task_name, task in items(task_group.tasks):
if task.type.lower() == 'objective':
data_dict = self.objective # confusing: this is how self.objective gets populated
elif task.type.lower() == 'constraint':
Expand All @@ -299,7 +301,7 @@ def fit(self, task_group, hypers=None, options=None):

self.models[task_name] = getattr(models, model_class)(task_group.num_dims, **task.options)

vals = data_dict['values'] if data_dict.has_key('values') else data_dict['counts']
vals = data_dict['values'] if 'values' in data_dict else data_dict['counts']

sys.stderr.write('Fitting %s for %s task...\n' % (model_class, task_name))
new_hypers[task_name] = self.models[task_name].fit(
Expand Down Expand Up @@ -351,7 +353,7 @@ def suggest(self):
best_grid_ei = grid_ei[best_grid_ind]

if VERBOSE:
print 'Best EI before optimization: %f' % best_grid_ei
print('Best EI before optimization: %f' % best_grid_ei)

if self.check_grad:
check_grad(lambda x: self.acq_optimize_wrapper(x, current_best, True),
Expand Down Expand Up @@ -387,8 +389,8 @@ def suggest(self):
# Optimization should always be better unless the optimization
# breaks in some way.
if VERBOSE:
print 'Best EI after optimization: %f' % best_opt_ei
print 'Suggested input %s' % cand[best_opt_ind]
print('Best EI after optimization: %f' % best_opt_ei)
print('Suggested input %s' % cand[best_opt_ind])

if best_opt_ei >= best_grid_ei:
suggestion = cand[best_opt_ind]
Expand Down Expand Up @@ -520,7 +522,7 @@ def probabilistic_constraint(self, pred):
np.ones(pred.shape[0], dtype=bool))

def acquisition_function_over_hypers(self, *args, **kwargs):
return function_over_hypers(self.models.values(), self.acquisition_function, *args, **kwargs)
return function_over_hypers(self.models.values(), self.acquisition_function, *args, **kwargs) #TODO might need to use list(.values())

def acquisition_function(self, cand, current_best, compute_grad=True):
obj_model = self.models[self.objective['name']]
Expand Down
14 changes: 11 additions & 3 deletions spearmint/grids/sobol.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,15 @@
# to enter into this License and Terms of Use on behalf of itself and
# its Institution.

import sys
import numpy as np
import cPickle as pickle

from spearmint.utils.fixes import xrange

if sys.version < '3':
import cPickle as pickle
else:
import pickle

# Numba autojit might be nice. Currently asplodes.
def sobol(num_points, num_dims):
Expand Down Expand Up @@ -236,11 +243,12 @@ def sobol(num_points, num_dims):
return Z

def to_binary(X, bits):
return 1 & (X[:,np.newaxis]/2**np.arange(bits-1,-1,-1, dtype=np.uint32))
temp = X[:,np.newaxis]//2**np.arange(bits-1,-1,-1, dtype=np.uint32) # freaking integer division required here!
return np.ones_like(temp) & temp

# These are the parameters for the Sobol sequence.
# This is hilarious.
params = """(lp1
params = b"""(lp1
(dp2
S'a'
I0
Expand Down
12 changes: 6 additions & 6 deletions spearmint/kernels/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from matern import Matern52
from sum_kernel import SumKernel
from product_kernel import ProductKernel
from noise import Noise
from scale import Scale
from transform_kernel import TransformKernel
from spearmint.kernels.matern import Matern52
from spearmint.kernels.sum_kernel import SumKernel
from spearmint.kernels.product_kernel import ProductKernel
from spearmint.kernels.noise import Noise
from spearmint.kernels.scale import Scale
from spearmint.kernels.transform_kernel import TransformKernel

__all__ = ["Matern52", "SumKernel", "ProductKernel", "Noise", "Scale", "TransformKernel"]
2 changes: 1 addition & 1 deletion spearmint/kernels/abstract_kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@

from abc import ABCMeta, abstractmethod

class AbstractKernel(object):
class AbstractKernel(object): # metaclass is an additional parameter in py3: AbstractKernel(object, metaclass=...)
__metaclass__ = ABCMeta

@property
Expand Down
8 changes: 7 additions & 1 deletion spearmint/kernels/kernel_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,13 @@


import numpy as np
import scipy.weave

try:
import scipy.weave
except ImportError:
pass

from spearmint.utils.fixes import xrange
from scipy.spatial.distance import cdist

def dist2(ls, x1, x2=None):
Expand Down
2 changes: 1 addition & 1 deletion spearmint/kernels/matern.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@


import numpy as np
import kernel_utils
from spearmint.kernels import kernel_utils

from .abstract_kernel import AbstractKernel
from ..utils import priors
Expand Down
Loading