Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions src/optimagic/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,7 @@ def Scalar(self) -> BoundedGradientBasedLocalNonlinearConstrainedScalarAlgorithm
@dataclass(frozen=True)
class BoundedGradientBasedLocalScalarAlgorithms(AlgoSelection):
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -486,7 +487,6 @@ def Scalar(self) -> BoundedGradientFreeLocalNonlinearConstrainedScalarAlgorithms

@dataclass(frozen=True)
class BoundedGradientFreeLocalScalarAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
nlopt_bobyqa: Type[NloptBOBYQA] = NloptBOBYQA
nlopt_cobyla: Type[NloptCOBYLA] = NloptCOBYLA
Expand Down Expand Up @@ -842,6 +842,7 @@ def NonlinearConstrained(
@dataclass(frozen=True)
class BoundedGradientBasedLocalAlgorithms(AlgoSelection):
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -891,6 +892,7 @@ def Scalar(self) -> GradientBasedLocalNonlinearConstrainedScalarAlgorithms:
@dataclass(frozen=True)
class GradientBasedLocalScalarAlgorithms(AlgoSelection):
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -958,6 +960,7 @@ def Scalar(self) -> BoundedGradientBasedNonlinearConstrainedScalarAlgorithms:
@dataclass(frozen=True)
class BoundedGradientBasedScalarAlgorithms(AlgoSelection):
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -1152,7 +1155,6 @@ def Scalar(self) -> GlobalGradientFreeParallelScalarAlgorithms:

@dataclass(frozen=True)
class BoundedGradientFreeLocalAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_dfols: Type[NagDFOLS] = NagDFOLS
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
nlopt_bobyqa: Type[NloptBOBYQA] = NloptBOBYQA
Expand Down Expand Up @@ -1202,7 +1204,6 @@ def Scalar(self) -> GradientFreeLocalNonlinearConstrainedScalarAlgorithms:

@dataclass(frozen=True)
class GradientFreeLocalScalarAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel
nlopt_bobyqa: Type[NloptBOBYQA] = NloptBOBYQA
Expand Down Expand Up @@ -1294,7 +1295,6 @@ def Scalar(self) -> BoundedGradientFreeNonlinearConstrainedScalarAlgorithms:

@dataclass(frozen=True)
class BoundedGradientFreeScalarAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
nlopt_bobyqa: Type[NloptBOBYQA] = NloptBOBYQA
nlopt_cobyla: Type[NloptCOBYLA] = NloptCOBYLA
Expand Down Expand Up @@ -1949,6 +1949,7 @@ def Scalar(self) -> GlobalGradientBasedScalarAlgorithms:
class GradientBasedLocalAlgorithms(AlgoSelection):
bhhh: Type[BHHH] = BHHH
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -1991,6 +1992,7 @@ def Scalar(self) -> GradientBasedLocalScalarAlgorithms:
@dataclass(frozen=True)
class BoundedGradientBasedAlgorithms(AlgoSelection):
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -2060,6 +2062,7 @@ def Scalar(self) -> GradientBasedNonlinearConstrainedScalarAlgorithms:
@dataclass(frozen=True)
class GradientBasedScalarAlgorithms(AlgoSelection):
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -2166,7 +2169,6 @@ def Scalar(self) -> GlobalGradientFreeScalarAlgorithms:

@dataclass(frozen=True)
class GradientFreeLocalAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_dfols: Type[NagDFOLS] = NagDFOLS
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel
Expand Down Expand Up @@ -2207,7 +2209,6 @@ def Scalar(self) -> GradientFreeLocalScalarAlgorithms:

@dataclass(frozen=True)
class BoundedGradientFreeAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_dfols: Type[NagDFOLS] = NagDFOLS
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
nlopt_bobyqa: Type[NloptBOBYQA] = NloptBOBYQA
Expand Down Expand Up @@ -2304,7 +2305,6 @@ def Scalar(self) -> GradientFreeNonlinearConstrainedScalarAlgorithms:

@dataclass(frozen=True)
class GradientFreeScalarAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel
nlopt_bobyqa: Type[NloptBOBYQA] = NloptBOBYQA
Expand Down Expand Up @@ -3075,6 +3075,7 @@ def Local(self) -> LeastSquaresLocalParallelAlgorithms:
class GradientBasedAlgorithms(AlgoSelection):
bhhh: Type[BHHH] = BHHH
fides: Type[Fides] = Fides
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
ipopt: Type[Ipopt] = Ipopt
nlopt_ccsaq: Type[NloptCCSAQ] = NloptCCSAQ
nlopt_lbfgsb: Type[NloptLBFGSB] = NloptLBFGSB
Expand Down Expand Up @@ -3127,7 +3128,6 @@ def Scalar(self) -> GradientBasedScalarAlgorithms:

@dataclass(frozen=True)
class GradientFreeAlgorithms(AlgoSelection):
iminuit_migrad: Type[IminuitMigrad] = IminuitMigrad
nag_dfols: Type[NagDFOLS] = NagDFOLS
nag_pybobyqa: Type[NagPyBOBYQA] = NagPyBOBYQA
neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel
Expand Down
80 changes: 44 additions & 36 deletions src/optimagic/optimizers/iminuit_migrad.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from dataclasses import dataclass
from typing import Optional
from typing import Callable, Optional

import numpy as np
from iminuit import Minuit # type: ignore
Expand All @@ -8,7 +8,6 @@
from optimagic import mark
from optimagic.optimization.algo_options import (
STOPPING_MAXFUN,
STOPPING_MAXITER,
)
from optimagic.optimization.algorithm import Algorithm, InternalOptimizeResult
from optimagic.optimization.internal_optimization_problem import (
Expand All @@ -22,7 +21,7 @@
solver_type=AggregationLevel.SCALAR,
is_available=True,
is_global=False,
needs_jac=False,
needs_jac=True,
needs_hess=False,
supports_parallelism=False,
supports_bounds=True,
Expand All @@ -33,19 +32,30 @@
@dataclass(frozen=True)
class IminuitMigrad(Algorithm):
stopping_maxfun: int = STOPPING_MAXFUN
stopping_maxiter: int = STOPPING_MAXITER
errordef: Optional[float] = None

def _solve_internal_problem(
self, problem: InternalOptimizationProblem, params: NDArray[np.float64]
) -> InternalOptimizeResult:
def wrapped_objective(x: NDArray[np.float64]) -> float:
return float(problem.fun(x))

m = Minuit(
wrapped_objective,
params,
)
wrapped_gradient = None
jac_func = None
if problem.jac is not None:

def wrapped_gradient(x: NDArray[np.float64]) -> NDArray[np.float64]:
return problem.jac(x)

jac_func = problem.jac
elif problem.fun_and_jac is not None:

Check warning on line 50 in src/optimagic/optimizers/iminuit_migrad.py

View check run for this annotation

Codecov / codecov/patch

src/optimagic/optimizers/iminuit_migrad.py#L50

Added line #L50 was not covered by tests

def wrapped_gradient(x: NDArray[np.float64]) -> NDArray[np.float64]:
_, jac = problem.fun_and_jac(x)
return jac

Check warning on line 54 in src/optimagic/optimizers/iminuit_migrad.py

View check run for this annotation

Codecov / codecov/patch

src/optimagic/optimizers/iminuit_migrad.py#L52-L54

Added lines #L52 - L54 were not covered by tests

jac_func = lambda x: problem.fun_and_jac(x)[1]

Check warning on line 56 in src/optimagic/optimizers/iminuit_migrad.py

View check run for this annotation

Codecov / codecov/patch

src/optimagic/optimizers/iminuit_migrad.py#L56

Added line #L56 was not covered by tests

m = Minuit(wrapped_objective, params, grad=wrapped_gradient)

if problem.bounds:
lower_bounds = problem.bounds.lower
Expand All @@ -58,37 +68,35 @@
if lower is not None or upper is not None:
m.limits[i] = (lower, upper)

m.migrad(
ncall=self.stopping_maxfun,
iterate=self.stopping_maxiter,
)
print(m.params)
m.migrad(ncall=self.stopping_maxfun)

res = process_minuit_result(m)
res = process_minuit_result(m, jac_func)
return res


def process_minuit_result(minuit_result: Minuit) -> InternalOptimizeResult:
x = np.array(minuit_result.values)
fun = minuit_result.fval
success = minuit_result.valid
message = repr(minuit_result.fmin)
jac: Optional[NDArray[np.float64]] = None
if hasattr(minuit_result, "gradient"):
jac = np.array(minuit_result.gradient)

hessian = np.array(minuit_result.hesse().params)
covariance = np.array(minuit_result.covariance)

info = {"minos": minuit_result.minos()}
def process_minuit_result(
minuit_result: Minuit,
jac_fun: Optional[Callable[[NDArray[np.float64]], NDArray[np.float64]]] = None,
) -> InternalOptimizeResult:
jac = None
if jac_fun is not None:
jac = jac_fun(np.array(minuit_result.values))

return InternalOptimizeResult(
x=x,
fun=fun,
success=success,
message=message,
res = InternalOptimizeResult(
x=np.array(minuit_result.values),
fun=minuit_result.fval,
success=minuit_result.valid,
message=repr(minuit_result.fmin),
n_fun_evals=minuit_result.nfcn,
n_jac_evals=minuit_result.ngrad,
n_hess_evals=None,
n_iterations=None,
status=None,
jac=jac,
hess=hessian,
hess_inv=covariance,
info=info,
hess=np.array(minuit_result.hesse()),
hess_inv=np.array(minuit_result.covariance),
max_constraint_violation=None,
info={"minos": minuit_result.minos()},
history=None,
)
return res