Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,30 @@ version = "0.1.0"
[deps]
GenericArpack = "408c25d7-97fa-4992-8bd9-ce15f1a09fe9"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c"
LuxurySparse = "d05aeea4-b7d4-55ac-b691-9e7fabb07ba2"
MKL = "33e6dc65-8f57-5167-99aa-e5a354878fb2"
MKLSparse = "0c723cd3-b8cd-5d40-b370-ba682dde9aae"
NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6"
Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a"
PolynomialRoots = "3a141323-8675-5d76-9d11-e1df1406c778"
Polynomials = "f27b6e38-b328-58d1-80ce-0feddd5e7a45"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SolverCore = "ff4d7338-4cf1-434d-91df-b86cb86fb843"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"

[compat]
GenericArpack = "0.2"
LowRankOpt = "0.2.1"
LuxurySparse = "0.7"
MKL = "0.6, 0.7"
MKLSparse = "1, 2"
NLPModels = "0.21.5"
Parameters = "0.12"
PolynomialRoots = "1"
Polynomials = "4"
SolverCore = "0.3.8"
julia = "1"

[extras]
Expand Down
3 changes: 3 additions & 0 deletions exps/Project.toml
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
[deps]
CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
Colors = "5ae59095-9a9b-59fe-a467-6f913c188581"
Dualization = "191a621a-6537-11e9-281d-650236a99e60"
GenericArpack = "408c25d7-97fa-4992-8bd9-ce15f1a09fe9"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LaTeXStrings = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c"
LuxurySparse = "d05aeea4-b7d4-55ac-b691-9e7fabb07ba2"
MKL = "33e6dc65-8f57-5167-99aa-e5a354878fb2"
MKLSparse = "0c723cd3-b8cd-5d40-b370-ba682dde9aae"
Expand Down
118 changes: 118 additions & 0 deletions exps/bench.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
using Revise
using SparseArrays
using SDPLRPlus

using LowRankOpt
using Dualization
include(joinpath(dirname(dirname(pathof(LowRankOpt))), "examples", "maxcut.jl"))

n = 500
import Random
Random.seed!(0)
W = sprand(n, n, 0.01)
W = W + W'
include(joinpath(@__DIR__, "problems.jl"))
C, As, b = maxcut(W)
@time sdplr(C, As, b, 1, maxmajoriter = 20);

# SDPLRPlus does not support sparse factor
As = [SymLowRankMatrix(Diagonal(ones(1)), hcat(e_i(Float64, i, n, sparse = false))) for i in 1:n]
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Running this bench.jl gives me the following error on this line.

ERROR: UndefKeywordError: keyword argument `vector` not assigned
Stacktrace:
 [1] (::var"#24#25")(i::Int64)
   @ Main ./none:0
 [2] iterate
   @ ./generator.jl:47 [inlined]
 [3] collect(itr::Base.Generator{UnitRange{Int64}, var"#24#25"})
   @ Base ./array.jl:834
 [4] top-level scope
   @ /u/subspace_s4/huan1754/SDPLRPlus.jl/exps/bench.jl:19

I saw in LowRankOpt.jl, vector does not have a default value, is this the cause? Function e_i in LowRankOpt.jl

d = SDPLRPlus.SDPData(C, As, b)
var = SDPLRPlus.SolverVars(d, 1)
aux = SDPLRPlus.SolverAuxiliary(d)
@time sdplr(C, As, b, 1, maxmajoriter = 50);

model = maxcut(W, dual_optimizer(LRO.Optimizer))
set_attribute(model, "solver", LRO.BurerMonteiro.Solver)
set_attribute(model, "sub_solver", SDPLRPlus.Solver)
set_attribute(model, "ranks", [1])
set_attribute(model, "maxmajoriter", 0)
set_attribute(model, "printlevel", 3)
@profview optimize!(model)
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

On this line I got one MKLSparseError, I'm quite confused about this error, do you have any clue? This error keeps showing up in the following benchmarking code too.

ERROR: MKLSparseError(SPARSE_STATUS_INVALID_VALUE)
Stacktrace:
  [1] check_status
    @ ~/.julia/packages/MKLSparse/EWj0I/src/types.jl:237 [inlined]
  [2] #MKLSparseMatrix#9
    @ ~/.julia/packages/MKLSparse/EWj0I/src/mklsparsematrix.jl:173 [inlined]
  [3] MKLSparseMatrix
    @ ~/.julia/packages/MKLSparse/EWj0I/src/mklsparsematrix.jl:167 [inlined]
  [4] mv!(transA::Char, alpha::Float64, A::SparseMatrixCSC{…}, descr::MKLSparse.matrix_descr, x::SubArray{…}, beta::Float64, y::SubArray{…})
    @ MKLSparse ~/.julia/packages/MKLSparse/EWj0I/src/generic.jl:33
  [5] mul!
    @ ~/.julia/packages/MKLSparse/EWj0I/src/interface.jl:63 [inlined]
  [6] mul!
    @ ~/.julia/packages/MKLSparse/EWj0I/src/interface.jl:107 [inlined]
  [7] jprod!
    @ ~/.julia/packages/LowRankOpt/UcxnZ/src/model.jl:216 [inlined]
  [8] jprod!(model::LowRankOpt.Model{…}, x::LowRankOpt.BurerMonteiro.Solution{…}, v::LowRankOpt.BurerMonteiro.Solution{…}, Jv::SubArray{…})
    @ LowRankOpt ~/.julia/packages/LowRankOpt/UcxnZ/src/model.jl:27
  [9] cons!
    @ ~/.julia/packages/LowRankOpt/UcxnZ/src/BurerMonteiro/model.jl:109 [inlined]
 [10] 𝒜!(𝒜_UUt::Vector{…}, model::LowRankOpt.BurerMonteiro.Model{…}, x::Vector{…})
    @ SDPLRPlus /u/subspace_s4/huan1754/SDPLRPlus.jl/src/lowrankopt.jl:75
 [11] f!(data::LowRankOpt.BurerMonteiro.Model{…}, var::SDPLRPlus.SolverVars{…}, aux::LowRankOpt.BurerMonteiro.Model{…})
    @ SDPLRPlus /u/subspace_s4/huan1754/SDPLRPlus.jl/src/coreop.jl:16
 [12] macro expansion
    @ /u/subspace_s4/huan1754/SDPLRPlus.jl/src/coreop.jl:347 [inlined]
 [13] macro expansion
    @ ./timing.jl:395 [inlined]
 [14] fg!(data::LowRankOpt.BurerMonteiro.Model{…}, var::SDPLRPlus.SolverVars{…}, aux::LowRankOpt.BurerMonteiro.Model{…}, normC::Float64, normb::Float64)
    @ SDPLRPlus /u/subspace_s4/huan1754/SDPLRPlus.jl/src/coreop.jl:346
 [15] _sdplr(data::LowRankOpt.BurerMonteiro.Model{…}, var::SDPLRPlus.SolverVars{…}, aux::LowRankOpt.BurerMonteiro.Model{…}, stats::SDPLRPlus.SolverStats{…}, config::SDPLRPlus.BurerMonteiroConfig{…})
    @ SDPLRPlus /u/subspace_s4/huan1754/SDPLRPlus.jl/src/sdplr.jl:143
 [16] solve!(solver::SDPLRPlus.Solver, model::LowRankOpt.BurerMonteiro.Model{…}, stats::SolverCore.GenericExecutionStats{…}; kwargs::@Kwargs{…})
    @ SDPLRPlus /u/subspace_s4/huan1754/SDPLRPlus.jl/src/lowrankopt.jl:40
 [17] solve!
    @ /u/subspace_s4/huan1754/SDPLRPlus.jl/src/lowrankopt.jl:26 [inlined]
 [18] #solve!#7
    @ ~/.julia/packages/LowRankOpt/UcxnZ/src/BurerMonteiro/solver.jl:25 [inlined]
 [19] optimize!(model::LowRankOpt.Optimizer{Float64})
    @ LowRankOpt ~/.julia/packages/LowRankOpt/UcxnZ/src/MOI_wrapper.jl:142
 [20] optimize!
    @ ~/.julia/packages/MathOptInterface/zq9bo/src/MathOptInterface.jl:122 [inlined]
 [21] optimize!(m::MathOptInterface.Utilities.CachingOptimizer{…})
    @ MathOptInterface.Utilities ~/.julia/packages/MathOptInterface/zq9bo/src/Utilities/cachingoptimizer.jl:370
 [22] optimize!
    @ ~/.julia/packages/MathOptInterface/zq9bo/src/Bridges/bridge_optimizer.jl:367 [inlined]
 [23] optimize!
    @ ~/.julia/packages/Dualization/ihzlf/src/MOI_wrapper.jl:255 [inlined]
 [24] optimize!
    @ ~/.julia/packages/MathOptInterface/zq9bo/src/MathOptInterface.jl:122 [inlined]
 [25] optimize!(m::MathOptInterface.Utilities.CachingOptimizer{…})
    @ MathOptInterface.Utilities ~/.julia/packages/MathOptInterface/zq9bo/src/Utilities/cachingoptimizer.jl:370
 [26] optimize!
    @ ~/.julia/packages/MathOptInterface/zq9bo/src/Bridges/bridge_optimizer.jl:367 [inlined]
 [27] optimize!
    @ ~/.julia/packages/MathOptInterface/zq9bo/src/MathOptInterface.jl:122 [inlined]
 [28] optimize!(m::MathOptInterface.Utilities.CachingOptimizer{…})
    @ MathOptInterface.Utilities ~/.julia/packages/MathOptInterface/zq9bo/src/Utilities/cachingoptimizer.jl:370
 [29] optimize!(model::Model; ignore_optimize_hook::Bool, _differentiation_backend::MathOptInterface.Nonlinear.SparseReverseMode, kwargs::@Kwargs{})
    @ JuMP ~/.julia/packages/JuMP/N7h14/src/optimizer_interface.jl:609
 [30] optimize!(model::Model)
    @ JuMP ~/.julia/packages/JuMP/N7h14/src/optimizer_interface.jl:560
 [31] macro expansion
    @ /u/subspace_s4/software/julia-1.10.1/share/julia/stdlib/v1.10/Profile/src/Profile.jl:27 [inlined]
 [32] macro expansion
    @ ~/.cursor-server/extensions/julialang.language-julia-1.149.2-universal/scripts/packages/VSCodeServer/src/profiler.jl:141 [inlined]
 [33] top-level scope
    @ /u/subspace_s4/huan1754/SDPLRPlus.jl/exps/bench.jl:31
Some type information was truncated. Use `show(err)` to see complete types.

solver = unsafe_backend(model).dual_problem.dual_model.model.optimizer.solver

using BenchmarkTools
function A_sym_bench(aux, var, nlp, var_lro)
println("𝒜 sym")
@btime SDPLRPlus.𝒜!(var.primal_vio, aux, var.Rt)
@btime SDPLRPlus.𝒜!(var_lro.primal_vio, nlp, var_lro.Rt)
end
function A_not_sym_bench(aux, var, nlp, var_lro)
println("𝒜 not sym")
@btime SDPLRPlus.𝒜!(var.A_RD, aux, var.Rt, var.Gt)
@btime SDPLRPlus.𝒜!(var_lro.A_RD, nlp, var_lro.Rt, var_lro.Gt)
end
function At_bench(aux, var, nlp, var_lro)
println("𝒜t")
@btime SDPLRPlus.𝒜t!($var.Gt, $var.Rt, $aux, $var)
@btime SDPLRPlus.𝒜t!($var_lro.Gt, $var_lro.Rt, $nlp, $var_lro)
end
function At_bench2(aux, var, nlp, var_lro)
println("𝒜t rank-1")
x = rand(n)
y = similar(x)
@time SDPLRPlus.𝒜t!(y, aux, x, var)
@time SDPLRPlus.𝒜t!(y, nlp, x, var_lro)
end
nlp = solver.model
var_lro = solver.solver.var
A_sym_bench(aux, var, solver.model, solver.solver.var)
A_not_sym_bench(aux, var, solver.model, solver.solver.var)
At_bench(aux, var, solver.model, solver.solver.var)
At_bench2(aux, var, solver.model, solver.solver.var)
@profview SDPLRPlus.𝒜!(var.primal_vio, aux, var.Rt)
@profview for i in 1:100
SDPLRPlus.𝒜!(var_lro.primal_vio, nlp, var_lro.Rt)
end
@time SDPLRPlus.𝒜!(var_lro.primal_vio, nlp, var_lro.Rt)
@time SDPLRPlus.𝒜t!(var_lro.Gt, var_lro.Rt, nlp, var_lro)
@profview for _ in 1:1000
SDPLRPlus.𝒜t!(var_lro.Gt, var_lro.Rt, nlp, var_lro)
end
function bench_lmul(A)
n = LinearAlgebra.checksquare(A)
x = rand(1, n)
y = similar(x)
@profview for i in 1:100000
LinearAlgebra.mul!(y, x, A, 2.0, 1.0)
end
#@btime LinearAlgebra.mul!($y, $x, $A, 2.0, 1.0)
end
function bench_rmul(A)
n = LinearAlgebra.checksquare(A)
x = rand(n, 1)
y = similar(x)
@btime LinearAlgebra.mul!($y, $A, $x, 2.0, 1.0)
end
bench_lmul(aux.symlowrank_As[1]);
A = aux.symlowrank_As[1]
n = LinearAlgebra.checksquare(A)
x = rand(1, n)
y = similar(x)
LinearAlgebra.mul!(y, x, A, 2.0, 1.0)

bench_rmul(nlp.model.A[1]);
bench_lmul(nlp.model.A[1])
A = nlp.model.A[1]
n = LinearAlgebra.checksquare(A)
x = rand(n, 1)
y = similar(x)
@time LinearAlgebra.mul!(y, A, x, 2.0, 1.0);
@btime LinearAlgebra.mul!($y, $A, $x, 2.0, 1.0);
methods(LinearAlgebra.mul!, typeof.((y, A, x, 2.0, 1.0)))

A = nlp.model.A[1]
n = LinearAlgebra.checksquare(A)
x = rand(n)
y = similar(x)
x = rand(n)
y = similar(x)
@btime LinearAlgebra.mul!($y, $A, $x, 2.0, 1.0);

C = LRO._lmul_diag!!(A.scaling, LRO.right_factor(A)' * x)
lA = LRO.left_factor(A)
@btime LinearAlgebra.mul!($y, $C, $lA)
@edit LinearAlgebra.mul!(y, lA, C);
@edit LinearAlgebra.mul!(y, lA, C, true, true);
@edit LinearAlgebra._rscale_add!(y, lA, C, true, true);
@edit LinearAlgebra.mul!($y, $lA, $C, 2.0, 1.0);
3 changes: 2 additions & 1 deletion src/SDPLRPlus.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,5 +41,6 @@ include("utils.jl")
# main function
include("sdplr.jl")

include("lowrankopt.jl")

end
end
Loading
Loading