diff --git a/src/diff_opt.jl b/src/diff_opt.jl index 9801c3c2f..d288d8aa5 100644 --- a/src/diff_opt.jl +++ b/src/diff_opt.jl @@ -799,8 +799,8 @@ function _assign_mapped!(x, y, r, k, ::Flattened) end # Map the rows corresponding to `F`-in-`S` constraints and store it in `x`. -function _map_rows!(f::Function, x::Vector, model, conic_form::MatOI.GeometricConicForm, index_map::MOI.IndexMap, map_mode, k) - for ci in MOI.get(model, MOI.ListOfConstraintIndices{F, S}()) # TODO how to get F-S now? +function _map_rows!(f::Function, x::Vector, model, conic_form::MatOI.GeometricConicForm, index_map::MOIU.DoubleDicts.IndexDoubleDictInner{F, S}, map_mode, k) where {F, S} + for ci in MOI.get(model, MOI.ListOfConstraintIndices{F, S}()) r = MatOI.rows(conic_form, index_map[ci]) k += 1 _assign_mapped!(x, f(ci, r), r, k, map_mode) @@ -828,7 +828,7 @@ function map_rows(f::Function, model, conic_form::MatOI.GeometricConicForm, inde k = 0 for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent()) # Function barrier for type unstability of `F` and `S` - # `conmap` is a `MOIU.DoubleDicts.MainIndexDoubleDict`, we index it at `F, S` + # `con_map` is a `MOIU.DoubleDicts.MainIndexDoubleDict`, we index it at `F, S` # which returns a `MOIU.DoubleDicts.IndexWithType{F, S}` which is type stable. # If we have a small number of different constraint types and many # constraint of each type, this mostly removes type unstabilities @@ -920,14 +920,14 @@ function _fill(neg::Function, model::Optimizer, conic_form, args...) _fill(S -> true, neg, model, conic_form, args...) end function _fill(filter::Function, neg::Function, model::Optimizer, conic_form, args...) - conmap = model.gradient_cache.index_map.con_map - varmap = model.gradient_cache.index_map.var_map + con_map = model.gradient_cache.index_map.con_map + var_map = model.gradient_cache.index_map.var_map for (F, S) in MOI.get(model, MOI.ListOfConstraintTypesPresent()) filter(S) || continue if F == MOI.ScalarAffineFunction{Float64} - _fill(args..., neg(S), conic_form, conmap[F,S], varmap, model.input_cache.scalar_constraints[F,S]) + _fill(args..., neg(S), conic_form, con_map[F,S], var_map, model.input_cache.scalar_constraints[F,S]) elseif F == MOI.VectorAffineFunction{Float64} - _fill(args..., neg(S), conic_form, conmap[F,S], varmap, model.input_cache.vector_constraints[F,S]) + _fill(args..., neg(S), conic_form, con_map[F,S], var_map, model.input_cache.vector_constraints[F,S]) end end return diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 3cb18630e..adbf76501 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -135,6 +135,7 @@ function MOI.copy_to(model::Optimizer, src::MOI.ModelLike) end function MOI.get(model::Optimizer, ::MOI.TerminationStatus) + @show MOI.get(model.optimizer, MOI.TerminationStatus()) return MOI.get(model.optimizer, MOI.TerminationStatus()) end @@ -192,8 +193,7 @@ end # helper methods to check if a constraint contains a Variable function _constraint_contains(model::Optimizer, v::VI, ci::CI{MOI.VariableIndex, S}) where {S <: SUPPORTED_SCALAR_SETS} - func = MOI.get(model, MOI.ConstraintFunction(), ci) - return v == func.variable + return v == MOI.get(model, MOI.ConstraintFunction(), ci) end function _constraint_contains(model::Optimizer, v::VI, ci::CI{MOI.ScalarAffineFunction{Float64}, S}) where {S <: SUPPORTED_SCALAR_SETS} diff --git a/src/quadratic_diff.jl b/src/quadratic_diff.jl index df9e6f25b..a5cd3aa1c 100644 --- a/src/quadratic_diff.jl +++ b/src/quadratic_diff.jl @@ -240,7 +240,7 @@ function get_problem_data(model::MOI.AbstractOptimizer) con = ge_con_sv_idx[i] func = MOI.get(model, MOI.ConstraintFunction(), con) set = MOI.get(model, MOI.ConstraintSet(), con) - vidx = findfirst(v -> v == func, var_list) + vidx = findfirst(isequal(func), var_list) G[i+nineq_le+nineq_ge+nineq_sv_le,vidx] = -1 h[i+nineq_le+nineq_ge+nineq_sv_le] = -MOI.constant(set) ineq_cont += 1 @@ -276,7 +276,7 @@ function get_problem_data(model::MOI.AbstractOptimizer) for x in func.terms # never nothing, variable is present - vidx = findfirst(v -> v == x.variable, var_list) + vidx = findfirst(isequal(x.variable), var_list) A[i, vidx] = x.coefficient end b[i] = set.value - func.constant @@ -289,7 +289,7 @@ function get_problem_data(model::MOI.AbstractOptimizer) con = eq_con_sv_idx[i] func = MOI.get(model, MOI.ConstraintFunction(), con) set = MOI.get(model, MOI.ConstraintSet(), con) - vidx = findfirst(v -> v == func, var_list) + vidx = findfirst(isequal(func), var_list) A[i+neq,vidx] = 1 b[i+neq] = set.value eq_cont += 1 diff --git a/test/moi_wrapper.jl b/test/moi_wrapper.jl index d4ff65794..3c3deaf61 100644 --- a/test/moi_wrapper.jl +++ b/test/moi_wrapper.jl @@ -633,7 +633,7 @@ function simple_psd(solver) # test2: changing X[1], X[3] but keeping the objective (their sum) same MOI.set(model, DiffOpt.ForwardInConstraint(), c, MOIU.zero_with_output_dimension(MOI.VectorAffineFunction{Float64}, 1)) - MOI.set(model, DiffOpt.ForwardInObjective(), -1.0fX[1] + 1.0fX[3]) + MOI.set(model, DiffOpt.ForwardInObjective(), -1.0X[1] + 1.0X[3]) DiffOpt.forward(model) @@ -897,7 +897,7 @@ end MOI.set(model, DiffOpt.ForwardInConstraint(), c1, MOIU.zero_with_output_dimension(VAF, 1)) MOI.set(model, - DiffOpt.ForwardInConstraint(), c2, MOIU.vectorize(ones(6) .* fx[1:6])) + DiffOpt.ForwardInConstraint(), c2, MOIU.vectorize(ones(6) .* x[1:6])) MOI.set(model, DiffOpt.ForwardInConstraint(), c3, MOIU.zero_with_output_dimension(VAF, 3)) MOI.set(model, diff --git a/test/utils.jl b/test/utils.jl index 1fb24634e..dc24e4bf1 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -145,10 +145,10 @@ function qp_test( end @_test(convert(Vector{Float64}, _λ), λ) - #dobjb = fv' * (dQb / 2.0) * fv + dqb' * fv + #dobjb = v' * (dQb / 2.0) * v + dqb' * v # TODO, it should .- - #dleb = dGb * fv .+ dhb - #deqb = dAb * fv .+ dbb + #dleb = dGb * v .+ dhb + #deqb = dAb * v .+ dbb @assert dzb !== nothing @testset "Backward pass" begin MOI.set.(model, DiffOpt.BackwardInVariablePrimal(), v, dzb) @@ -230,7 +230,7 @@ function qp_test( func = deqf[length(ceq)+j] canonicalize && MOI.Utilities.canonicalize!(func) if set_zero || !MOI.iszero(func) - # TODO FIXME should work if we drop support for SingleVariable and we let the Functionize bridge do the work + # TODO FIXME should work if we drop support for `VariableIndex` and we let the Functionize bridge do the work @test_throws MOI.UnsupportedAttribute MOI.set(model, DiffOpt.ForwardInConstraint(), jc, func) end end