Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions src/diff_opt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -799,8 +799,8 @@ function _assign_mapped!(x, y, r, k, ::Flattened)
end

# Map the rows corresponding to `F`-in-`S` constraints and store it in `x`.
function _map_rows!(f::Function, x::Vector, model, conic_form::MatOI.GeometricConicForm, index_map::MOI.IndexMap, map_mode, k)
for ci in MOI.get(model, MOI.ListOfConstraintIndices{F, S}()) # TODO how to get F-S now?
function _map_rows!(f::Function, x::Vector, model, conic_form::MatOI.GeometricConicForm, index_map::MOIU.DoubleDicts.IndexDoubleDictInner{F, S}, map_mode, k) where {F, S}
for ci in MOI.get(model, MOI.ListOfConstraintIndices{F, S}())
r = MatOI.rows(conic_form, index_map[ci])
k += 1
_assign_mapped!(x, f(ci, r), r, k, map_mode)
Expand Down Expand Up @@ -828,7 +828,7 @@ function map_rows(f::Function, model, conic_form::MatOI.GeometricConicForm, inde
k = 0
for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent())
# Function barrier for type unstability of `F` and `S`
# `conmap` is a `MOIU.DoubleDicts.MainIndexDoubleDict`, we index it at `F, S`
# `con_map` is a `MOIU.DoubleDicts.MainIndexDoubleDict`, we index it at `F, S`
# which returns a `MOIU.DoubleDicts.IndexWithType{F, S}` which is type stable.
# If we have a small number of different constraint types and many
# constraint of each type, this mostly removes type unstabilities
Expand Down Expand Up @@ -920,14 +920,14 @@ function _fill(neg::Function, model::Optimizer, conic_form, args...)
_fill(S -> true, neg, model, conic_form, args...)
end
function _fill(filter::Function, neg::Function, model::Optimizer, conic_form, args...)
conmap = model.gradient_cache.index_map.con_map
varmap = model.gradient_cache.index_map.var_map
con_map = model.gradient_cache.index_map.con_map
var_map = model.gradient_cache.index_map.var_map
for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent())
filter(S) || continue
if F == MOI.ScalarAffineFunction{Float64}
_fill(args..., neg(S), conic_form, conmap[F,S], varmap, model.input_cache.scalar_constraints[F,S])
_fill(args..., neg(S), conic_form, con_map[F,S], var_map, model.input_cache.scalar_constraints[F,S])
elseif F == MOI.VectorAffineFunction{Float64}
_fill(args..., neg(S), conic_form, conmap[F,S], varmap, model.input_cache.vector_constraints[F,S])
_fill(args..., neg(S), conic_form, con_map[F,S], var_map, model.input_cache.vector_constraints[F,S])
end
end
return
Expand Down
4 changes: 2 additions & 2 deletions src/moi_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ function MOI.copy_to(model::Optimizer, src::MOI.ModelLike)
end

function MOI.get(model::Optimizer, ::MOI.TerminationStatus)
@show MOI.get(model.optimizer, MOI.TerminationStatus())
return MOI.get(model.optimizer, MOI.TerminationStatus())
end

Expand Down Expand Up @@ -192,8 +193,7 @@ end

# helper methods to check if a constraint contains a Variable
function _constraint_contains(model::Optimizer, v::VI, ci::CI{MOI.VariableIndex, S}) where {S <: SUPPORTED_SCALAR_SETS}
func = MOI.get(model, MOI.ConstraintFunction(), ci)
return v == func.variable
return v == MOI.get(model, MOI.ConstraintFunction(), ci)
end

function _constraint_contains(model::Optimizer, v::VI, ci::CI{MOI.ScalarAffineFunction{Float64}, S}) where {S <: SUPPORTED_SCALAR_SETS}
Expand Down
6 changes: 3 additions & 3 deletions src/quadratic_diff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ function get_problem_data(model::MOI.AbstractOptimizer)
con = ge_con_sv_idx[i]
func = MOI.get(model, MOI.ConstraintFunction(), con)
set = MOI.get(model, MOI.ConstraintSet(), con)
vidx = findfirst(v -> v == func, var_list)
vidx = findfirst(isequal(func), var_list)
G[i+nineq_le+nineq_ge+nineq_sv_le,vidx] = -1
h[i+nineq_le+nineq_ge+nineq_sv_le] = -MOI.constant(set)
ineq_cont += 1
Expand Down Expand Up @@ -276,7 +276,7 @@ function get_problem_data(model::MOI.AbstractOptimizer)

for x in func.terms
# never nothing, variable is present
vidx = findfirst(v -> v == x.variable, var_list)
vidx = findfirst(isequal(x.variable), var_list)
A[i, vidx] = x.coefficient
end
b[i] = set.value - func.constant
Expand All @@ -289,7 +289,7 @@ function get_problem_data(model::MOI.AbstractOptimizer)
con = eq_con_sv_idx[i]
func = MOI.get(model, MOI.ConstraintFunction(), con)
set = MOI.get(model, MOI.ConstraintSet(), con)
vidx = findfirst(v -> v == func, var_list)
vidx = findfirst(isequal(func), var_list)
A[i+neq,vidx] = 1
b[i+neq] = set.value
eq_cont += 1
Expand Down
4 changes: 2 additions & 2 deletions test/moi_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,7 @@ function simple_psd(solver)

# test2: changing X[1], X[3] but keeping the objective (their sum) same
MOI.set(model, DiffOpt.ForwardInConstraint(), c, MOIU.zero_with_output_dimension(MOI.VectorAffineFunction{Float64}, 1))
MOI.set(model, DiffOpt.ForwardInObjective(), -1.0fX[1] + 1.0fX[3])
MOI.set(model, DiffOpt.ForwardInObjective(), -1.0X[1] + 1.0X[3])

DiffOpt.forward(model)

Expand Down Expand Up @@ -897,7 +897,7 @@ end
MOI.set(model,
DiffOpt.ForwardInConstraint(), c1, MOIU.zero_with_output_dimension(VAF, 1))
MOI.set(model,
DiffOpt.ForwardInConstraint(), c2, MOIU.vectorize(ones(6) .* fx[1:6]))
DiffOpt.ForwardInConstraint(), c2, MOIU.vectorize(ones(6) .* x[1:6]))
MOI.set(model,
DiffOpt.ForwardInConstraint(), c3, MOIU.zero_with_output_dimension(VAF, 3))
MOI.set(model,
Expand Down
8 changes: 4 additions & 4 deletions test/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -145,10 +145,10 @@ function qp_test(
end
@_test(convert(Vector{Float64}, _λ), λ)

#dobjb = fv' * (dQb / 2.0) * fv + dqb' * fv
#dobjb = v' * (dQb / 2.0) * v + dqb' * v
# TODO, it should .-
#dleb = dGb * fv .+ dhb
#deqb = dAb * fv .+ dbb
#dleb = dGb * v .+ dhb
#deqb = dAb * v .+ dbb
@assert dzb !== nothing
@testset "Backward pass" begin
MOI.set.(model, DiffOpt.BackwardInVariablePrimal(), v, dzb)
Expand Down Expand Up @@ -230,7 +230,7 @@ function qp_test(
func = deqf[length(ceq)+j]
canonicalize && MOI.Utilities.canonicalize!(func)
if set_zero || !MOI.iszero(func)
# TODO FIXME should work if we drop support for SingleVariable and we let the Functionize bridge do the work
# TODO FIXME should work if we drop support for `VariableIndex` and we let the Functionize bridge do the work
@test_throws MOI.UnsupportedAttribute MOI.set(model, DiffOpt.ForwardInConstraint(), jc, func)
end
end
Expand Down