Skip to content

Commit

Permalink
fix for gekpls
Browse files Browse the repository at this point in the history
  • Loading branch information
Paul-Saves committed Jan 29, 2024
1 parent 11811dd commit 9fd747a
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 3 deletions.
7 changes: 7 additions & 0 deletions smt/applications/mfkpls.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,13 @@ def _initialize(self):
desc="Correlation function type",
types=(str),
)
declare(
"hyper_opt",
"Cobyla",
values=("Cobyla"),
desc="Optimiser for hyperparameters optimisation",
types=str,
)
declare("n_comp", 1, types=int, desc="Number of principal components")
self.name = "MFKPLS"

Expand Down
7 changes: 7 additions & 0 deletions smt/applications/mfkplsk.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,13 @@ def _initialize(self):
desc="Correlation function type",
types=(str),
)
declare(
"hyper_opt",
"Cobyla",
values=("Cobyla"),
desc="Optimiser for hyperparameters optimisation",
types=str,
)
self.name = "MFKPLSK"

def _componentwise_distance(self, dx, opt=0):
Expand Down
3 changes: 1 addition & 2 deletions smt/applications/mixed_integer.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ def __init__(
"""
super().__init__()
self._surrogate = surrogate
self._surrogate.options["hyper_opt"] = "Cobyla"
if isinstance(self._surrogate, KrgBased):
raise ValueError(
"Using MixedIntegerSurrogateModel integer model with "
Expand All @@ -109,7 +108,6 @@ def __init__(
self._input_in_folded_space = input_in_folded_space
self.supports = self._surrogate.supports
self.options["print_global"] = False
self.options["hyper_opt"] = "Cobyla"
if "poly" in self._surrogate.options:
if self._surrogate.options["poly"] != "constant":
raise ValueError("constant regression must be used with mixed integer")
Expand Down Expand Up @@ -198,6 +196,7 @@ def __init__(
+ " is not supported. Please use MixedIntegerSurrogateModel instead."
)
self.options["design_space"] = self._surrogate.design_space
self._surrogate.options["hyper_opt"] = "Cobyla"

self._input_in_folded_space = input_in_folded_space
self.supports = self._surrogate.supports
Expand Down
7 changes: 7 additions & 0 deletions smt/surrogate_models/gekpls.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ def _initialize(self):
types=int,
desc="Number of extra points per training point",
)
declare(
"hyper_opt",
"Cobyla",
values=("Cobyla"),
desc="Optimiser for hyperparameters optimisation",
types=str,
)
self.supports["training_derivatives"] = True

def _check_param(self):
Expand Down
6 changes: 5 additions & 1 deletion smt/surrogate_models/krg_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -1908,6 +1908,10 @@ def grad_minus_reduced_likelihood_function(log10t):
optimal_theta_res = optimal_theta_res_loop

elif self.options["hyper_opt"] == "TNC":
if self.options["use_het_noise"]:
raise ValueError(
"For heteroscedastic noise, please use Cobyla"
)
theta_all_loops = 10**theta_all_loops
for theta0_loop in theta_all_loops:
optimal_theta_res_loop = optimize.minimize(
Expand All @@ -1916,7 +1920,7 @@ def grad_minus_reduced_likelihood_function(log10t):
method="TNC",
jac=grad_minus_reduced_likelihood_function,
bounds=bounds_hyp,
options={"maxiter": 100},
options={"maxfun": 2 * limit},
)
if optimal_theta_res_loop["fun"] < optimal_theta_res["fun"]:
optimal_theta_res = optimal_theta_res_loop
Expand Down

0 comments on commit 9fd747a

Please sign in to comment.