Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ShareParameters Linear Objective #1320

Draft
wants to merge 8 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions desc/objectives/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,5 +88,6 @@
FixSumModesR,
FixSumModesZ,
FixThetaSFL,
ShareParameters,
)
from .objective_funs import ObjectiveFunction
164 changes: 164 additions & 0 deletions desc/objectives/linear_objectives.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,170 @@
self._unjit()


class ShareParameters(_Objective):
"""Fix specific degrees of freedom to be the same between Optimizable things.

Parameters
----------
things : list of Optimizable
list of objects whose degrees of freedom are being fixed to eachother's values.
dpanici marked this conversation as resolved.
Show resolved Hide resolved
Must be at least length 2, but may be of arbitrary length.
Every object must be of the same type, and have the same size array for the
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I thought they didn't have to be the same type, just have the same attribute name?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yea but then I thought of all the ways this could mess up, like Equilibrium.R_lmn and FourierRZToroidalSurface.R_lmn are the same name, yet even if they are the same shape it makes no sense to have them be shared.

I can change it if you think that is not a good enough counterpoint not to, but is there an example where it makes sense to allow this?

desired parameter to be fixed (e.g. same geometric resolution if fixing
``R_lmn``, or same pressure profile resolution if fixing ``p_l``)
params : nested list of dicts
Dict keys are the names of parameters to fix (str), and dict values are the
indices to fix for each corresponding parameter (int array).
Use True (False) instead of an int array to fix all (none) of the indices
for that parameter.
Must have the same pytree structure as thing.params_dict.
The default is to fix all indices of all parameters.
target : dict of {float, ndarray}, optional
dpanici marked this conversation as resolved.
Show resolved Hide resolved
Target value(s) of the objective. Only used if bounds is None.
Should have the same tree structure as thing.params. Defaults to things.params.
Unused by this objective (as target is always just zero, the diff btwn the two
things' params)
bounds : tuple of dict {float, ndarray}, optional
Lower and upper bounds on the objective. Overrides target.
Should have the same tree structure as thing.params.
Unused by this objective (as target is always just zero, the diff btwn the two
things' params)
weight : dict of {float, ndarray}, optional
dpanici marked this conversation as resolved.
Show resolved Hide resolved
Weighting to apply to the Objective, relative to other Objectives.
Should be a scalar or have the same tree structure as thing.params.
Unused by this objective (as target is always just zero, the diff btwn the two
things' params)
normalize : bool, optional
Whether to compute the error in physical units or non-dimensionalize.
Unused by this objective (as target is always just zero, the diff btwn the two
things' params)
normalize_target : bool, optional
Whether target and bounds should be normalized before comparing to computed
values. If `normalize` is `True` and the target is in physical units,
this should also be set to True. Has no effect for this objective.
Unused by this objective (as target is always just zero, the diff btwn the two
things' params)
name : str, optional
Name of the objective function.


"""

_scalar = False
_linear = True
_fixed = False
_units = "(~)"
_print_value_fmt = "Shared parameters error: "

def __init__(
self,
things,
params=None,
target=None,
bounds=None,
weight=1,
normalize=True,
normalize_target=True,
name="shared parameters",
):
self._params = params
assert len(things) > 1, "only makes sense for >1 thing"
assert np.all([isinstance(things[0], type(t)) for t in things[1:]])

Check warning on line 307 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L305-L307

Added lines #L305 - L307 were not covered by tests
# TODO: some sort of check that all things are same resolution etc?
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could just check that getattr(thing, param).shape is consistent

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or check the param dict directly

# something like _check_type for coilsets but more general...
# maybe can make a _check_type that takes in the user-inputted params.keys
# and just ensures that the params being fixed are the same resolution, that
# should be all we need to check I think
super().__init__(

Check warning on line 313 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L313

Added line #L313 was not covered by tests
things=things,
target=target,
bounds=bounds,
weight=weight,
normalize=normalize,
normalize_target=normalize_target,
name=name,
)

def build(self, use_jit=False, verbose=1):
"""Build constant arrays.

Parameters
----------
use_jit : bool, optional
Whether to just-in-time compile the objective and derivatives.
verbose : int, optional
Level of output.

"""
thing = self.things[0]

Check warning on line 334 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L334

Added line #L334 was not covered by tests

# default params
default_params = tree_map(lambda dim: np.arange(dim), thing.dimensions)
self._params = setdefault(self._params, default_params)
self._params = broadcast_tree(self._params, default_params)
self._indices = tree_leaves(self._params)
assert tree_structure(self._params) == tree_structure(default_params)

Check warning on line 341 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L337-L341

Added lines #L337 - L341 were not covered by tests

self._dim_f = sum(idx.size for idx in self._indices) * (len(self.things) - 1)

Check warning on line 343 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L343

Added line #L343 was not covered by tests
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this account for things where idx is just a book to fix all/none?

Copy link
Collaborator Author

@dpanici dpanici Oct 24, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think so as all of my tests just use True for the param idx.
The default_params and setdefault in this build method are what make sure self._indices becomes the full arange or not if it is True or False


# default target
self.target = np.zeros(self._dim_f)

Check warning on line 346 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L346

Added line #L346 was not covered by tests
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you set target=0 in init this should happen automatically

Copy link
Collaborator Author

@dpanici dpanici Oct 24, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should I make sure that target does not get passed in ever with some sort of check in init? or is not including it in docstring enough to say that one should never pass it in? I worry someone will put a target of like what they wanna fix to and that will mess the objective up, as it would not really be caught anywhere (if the target is say a single number, as it would be broadcastable to the dim_f size)


super().build(use_jit=use_jit, verbose=verbose)

Check warning on line 348 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L348

Added line #L348 was not covered by tests

def compute(self, *args, constants=None):
"""Compute fixed degree of freedom errors.

Parameters
----------
params_1 : list of dict
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

May take more than 2 things

First thing's list of dictionaries of degrees of freedom,
eg CoilSet.params_dict
params_2 : list of dict
Second thing's list of dictionaries of degrees of freedom,
eg CoilSet.params_dict

constants : dict
Dictionary of constant data, eg transforms, profiles etc. Defaults to
self.constants

Returns
-------
f : ndarray
Fixed degree of freedom errors.

"""
# basically, just subtract the first things' params
# and every subsequent thing (adding on rows to dim_f if
# more than 2 total things) so that the Jacobian of this
# ends up being just rows with 1 in the first object's params
# indices and -1 in the second objects params indices,
# repeated vertically for each additional object
# i.e. for a a size-1 param being shared among 4 objects, the
# Jacobian looks like
# [ 1 -1 0 0]
# [ 1 0 -1 0]
# [ 1 0 0 -1]
params_1 = args[0]
return jnp.concatenate(

Check warning on line 384 in desc/objectives/linear_objectives.py

View check run for this annotation

Codecov / codecov/patch

desc/objectives/linear_objectives.py#L383-L384

Added lines #L383 - L384 were not covered by tests
[
jnp.concatenate(
[
jnp.atleast_1d(param[idx])
for param, idx in zip(tree_leaves(params_1), self._indices)
]
)
- jnp.concatenate(
[
jnp.atleast_1d(param[idx])
for param, idx in zip(tree_leaves(params), self._indices)
]
)
for params in args[1:]
]
)


class BoundaryRSelfConsistency(_Objective):
"""Ensure that the boundary and interior surfaces are self-consistent.

Expand Down
136 changes: 136 additions & 0 deletions tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import pytest
from qic import Qic
from qsc import Qsc
from scipy.constants import mu_0

from desc.backend import jnp, tree_leaves
from desc.coils import (
Expand All @@ -25,6 +26,7 @@
from desc.grid import LinearGrid
from desc.io import load
from desc.magnetic_fields import (
FourierCurrentPotentialField,
OmnigenousField,
SplineMagneticField,
ToroidalMagneticField,
Expand Down Expand Up @@ -64,6 +66,7 @@
QuadraticFlux,
QuasisymmetryBoozer,
QuasisymmetryTwoTerm,
ShareParameters,
VacuumBoundaryError,
Volume,
get_fixed_boundary_constraints,
Expand Down Expand Up @@ -1834,3 +1837,136 @@ def test_signed_PlasmaVesselDistance():
atol=1e-2,
err_msg="allowing eq to change",
)


@pytest.mark.unit
def test_share_parameters():
"""Test ShareParameters for a 2-surface quadratic flux optimization."""
## setup opt problem
# use QuadraticFlux as eq's are fixed and want fields to change
# use ShareParameters to keep surface geoms constant equal
# to eachother as they vary with surface current to reduce Bn
eq1 = get("ESTELL")
with pytest.warns(UserWarning, match="L"):
eq1.change_resolution(8, 3, 3, 12, 6, 6)
eq2 = eq1.copy()
eq2.change_resolution(8, 2, 2, 12, 4, 4)

necessary_G_for_eq1 = eq1.compute("G")["G"][-1] / mu_0 * 2 * np.pi
necessary_G_for_eq2 = eq2.compute("G")["G"][-1] / mu_0 * 2 * np.pi

R0 = eq1.compute("R0")["R0"]
a = 1
surf1 = FourierRZToroidalSurface(
R_lmn=[R0, a],
Z_lmn=[-a],
modes_R=[[0, 0], [1, 0]],
modes_Z=[[-1, 0]],
NFP=eq1.NFP,
)
surf1.change_resolution(M=2, N=2)
surf1 = FourierCurrentPotentialField.from_surface(
surf1, M_Phi=6, N_Phi=6, sym_Phi=False, I=0, G=necessary_G_for_eq1
)
surf2 = surf1.copy()
surf2.G = necessary_G_for_eq2

## setup opt problem
# first, keep surfs fixed too and just get regcoil-like solutions for each
surf_grid = LinearGrid(M=20, N=20, NFP=surf1.NFP)
eval_grid = LinearGrid(M=20, N=20, NFP=surf1.NFP)
obj = ObjectiveFunction(
(
QuadraticFlux(
eq1,
surf1,
field_grid=surf_grid,
eval_grid=eval_grid,
vacuum=True,
name="Bn error eq1",
),
QuadraticFlux(
eq2,
surf2,
field_grid=surf_grid,
eval_grid=eval_grid,
vacuum=True,
name="Bn error eq2",
),
)
)
constraints = (
FixParameters(surf1, {"I": True, "G": True, "R_lmn": True, "Z_lmn": True}),
FixParameters(surf2, {"I": True, "G": True, "R_lmn": True, "Z_lmn": True}),
) # fix the secular parts as well

opt = Optimizer("lsq-exact")
with pytest.warns(RuntimeWarning): # bc trust radius is inf
(surf1, surf2), _ = opt.optimize(
[surf1, surf2],
objective=obj,
constraints=constraints,
verbose=3,
maxiter=2,
ftol=1e-8,
options={"initial_trust_radius": np.inf},
)

# then, let surfs and Phi change while keeping their geometry fixed
obj = ObjectiveFunction(
(
QuadraticFlux(
eq1,
surf1,
field_grid=surf_grid,
eval_grid=eval_grid,
vacuum=True,
name="Bn error eq1",
),
QuadraticFlux(
eq2,
surf2,
field_grid=surf_grid,
eval_grid=eval_grid,
vacuum=True,
name="Bn error eq2",
),
PlasmaVesselDistance(
eq1,
surf1,
bounds=(0.25, np.inf),
surface_grid=surf_grid,
eq_fixed=True,
name="distance error eq1",
),
PlasmaVesselDistance(
eq2,
surf2,
bounds=(0.25, np.inf),
surface_grid=surf_grid,
eq_fixed=True,
name="distance error eq2",
),
)
)
constraints = (
ShareParameters(
[surf1, surf2], params={"R_lmn": True, "Z_lmn": True}
), # make the 2 surfaces have the same geometry
FixParameters(surf1, {"I": True, "G": True}),
FixParameters(surf2, {"I": True, "G": True}),
) # fix the secular parts as well

opt = Optimizer("lsq-exact")

(surf1, surf2), _ = opt.optimize(
[surf1, surf2],
objective=obj,
constraints=constraints,
verbose=3,
maxiter=2,
ftol=1e-8,
)

np.testing.assert_allclose(surf1.R_lmn, surf2.R_lmn)
np.testing.assert_allclose(surf1.Z_lmn, surf2.Z_lmn)
Loading