Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding ability to define test space through convex hull #18

Merged
merged 18 commits into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions examples/burgers1d.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,25 @@ parameter_space:
test_space:
type: grid

#an example if we want to provide training points on exterior
#of region and train in convex hull of training points
# parameter_space:
# parameters:
# - name: a
# min: 0.7
# max: 0.9
# test_space_type: exterior
# sample_size: 21
# list: [0.70, 0.725, 0.75, 0.800, 0.85, 0.90]
# - name: w
# min: 0.9
# max: 1.1
# test_space_type: exterior
# sample_size: 21
# list: [0.90, 0.970, 1.00, 0.925, 0.98, 1.10]
# test_space:
# type: hull

latent_space:
type: ae
ae:
Expand Down
15 changes: 13 additions & 2 deletions src/lasdi/gplasdi.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,11 @@ def train(self):
n_train = ps.n_train()
ld = self.latent_dynamics

self.training_loss = []
self.ae_loss = []
self.ld_loss = []
self.coef_loss = []

'''
determine number of iterations.
Perform n_iter iterations until overall iterations hit max_iter.
Expand All @@ -184,6 +189,11 @@ def train(self):

loss = loss_ae + self.ld_weight * loss_ld / n_train + self.coef_weight * loss_coef / n_train

self.training_loss.append(loss.item())
self.ae_loss.append(loss_ae.item())
self.ld_loss.append(loss_ld.item())
self.coef_loss.append(loss_coef.item())

loss.backward()
self.optimizer.step()

Expand Down Expand Up @@ -267,7 +277,8 @@ def export(self):
dict_ = {'X_train': self.X_train, 'X_test': self.X_test, 'lr': self.lr, 'n_iter': self.n_iter,
'n_samples' : self.n_samples, 'best_coefs': self.best_coefs, 'max_iter': self.max_iter,
'max_iter': self.max_iter, 'ld_weight': self.ld_weight, 'coef_weight': self.coef_weight,
'restart_iter': self.restart_iter, 'timer': self.timer.export(), 'optimizer': self.optimizer.state_dict()
'restart_iter': self.restart_iter, 'timer': self.timer.export(), 'optimizer': self.optimizer.state_dict(),
'training_loss' : self.training_loss, 'ae_loss' : self.ae_loss, 'ld_loss' : self.ld_loss, 'coeff_loss' : self.coef_loss
}
return dict_

Expand All @@ -280,4 +291,4 @@ def load(self, dict_):
self.optimizer.load_state_dict(dict_['optimizer'])
if (self.device != 'cpu'):
optimizer_to(self.optimizer, self.device)
return
return
82 changes: 78 additions & 4 deletions src/lasdi/param.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
from scipy.spatial import Delaunay
from .inputs import InputParser

def get_1dspace_from_list(config):
Expand All @@ -16,8 +17,14 @@ def create_uniform_1dspace(config):
paramRange = np.linspace(minval, maxval, Nx)
return Nx, paramRange

def get_1dspace_for_exterior(config):
Nx = config['sample_size']
paramRange = np.array(config['list'])
return Nx, paramRange

getParam1DSpace = {'list': get_1dspace_from_list,
'uniform': create_uniform_1dspace}
'uniform': create_uniform_1dspace,
'exterior': get_1dspace_for_exterior}

class ParameterSpace:
param_list = []
Expand All @@ -40,12 +47,18 @@ def __init__(self, config):
for param in self.param_list:
self.param_name += [param['name']]

self.train_space = self.createInitialTrainSpace(self.param_list)
self.n_init = self.train_space.shape[0]

test_space_type = parser.getInput(['test_space', 'type'], datatype=str)
if (test_space_type == 'grid'):
self.train_space = self.createInitialTrainSpace(self.param_list)
self.n_init = self.train_space.shape[0]

self.test_grid_sizes, self.test_meshgrid, self.test_space = self.createTestGridSpace(self.param_list)
if (test_space_type == 'hull'):
assert self.n_param >=2, 'Must have at least 2 parameters if test_space is \'hull\' '
self.train_space = self.createInitialTrainSpaceForHull(self.param_list)
self.n_init = self.train_space.shape[0]

self.test_grid_sizes, self.test_meshgrid, self.test_space = self.createTestSpaceFromHull(self.param_list)

return

Expand All @@ -66,6 +79,33 @@ def createInitialTrainSpace(self, param_list):
mesh_grids = self.createHyperMeshGrid(paramRanges)
return self.createHyperGridSpace(mesh_grids)

def createInitialTrainSpaceForHull(self, param_list):
'''
If test_space is 'hull', then the provided training parameters must be
points on the exterior of our training space. So, we form the provided points
into an array.
'''

paramRanges = []

k = 0
for param in param_list:
assert (param['test_space_type'] == 'exterior'), ('test_space_type for all parameters must '
'be \'exterior\' when test_space is \'hull\'. ')

_, paramRange = getParam1DSpace[param['test_space_type']](param)
paramRanges += [paramRange]

if k > 0:
assert (len(paramRanges[k])==len(paramRanges[k - 1])), (f'Training parameters {k} and {k-1} have '
'different lengths. All training parameters '
'must have same length when test_space is \'hull\'.')
k = k + 1


mesh_grids = np.vstack((paramRanges)).T
return mesh_grids

def createTestGridSpace(self, param_list):
paramRanges = []
gridSizes = []
Expand All @@ -78,6 +118,40 @@ def createTestGridSpace(self, param_list):
mesh_grids = self.createHyperMeshGrid(paramRanges)
return gridSizes, mesh_grids, self.createHyperGridSpace(mesh_grids)

def createTestGridSpaceForHull(self, param_list):
'''
This is similar to createTestGridSpace, but with some different variables.
We take the min/max value of each parameter, and create a uniform rectangular grid
over the parameter space with 'sample_size' points in each dimension.
'''

paramRanges = []
gridSizes = []

for param in param_list:
Nx, _ = getParam1DSpace[param['test_space_type']](param)
minval = param['min']
maxval = param['max']
gridSizes += [Nx]
paramRanges += [np.linspace(minval, maxval, Nx)]

mesh_grids = self.createHyperMeshGrid(paramRanges)
return gridSizes, mesh_grids, self.createHyperGridSpace(mesh_grids)

def createTestSpaceFromHull(self, param_list):
#get the initial grid over the parameters
gridSizes, mesh_grids, test_space = self.createTestGridSpaceForHull(self.param_list)


#mesh training space. This will be slow in higher dimensions
cloud = Delaunay(self.train_space)
#Determine if each point is in/out of convex Hull
mask = cloud.find_simplex(test_space)>=0
#Only keep points in convex Hull
test_space = test_space[mask]

return gridSizes, mesh_grids, test_space

def getParameter(self, param_vector):
'''
convert numpy array parameter vector to a dict.
Expand Down
Loading