From 71856b28a45c57bc77a49a57c423fb50775d5b98 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Fri, 21 Feb 2025 03:24:06 -0300 Subject: [PATCH 1/6] initial sketch --- src/jump_wrapper.jl | 63 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 src/jump_wrapper.jl diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl new file mode 100644 index 00000000..b90d87f6 --- /dev/null +++ b/src/jump_wrapper.jl @@ -0,0 +1,63 @@ +""" +""" +function diff_model( + optimizer_constructor; + method = nothing, + with_parametric_opt_interface::Bool = false, + with_bridge_type = Float64, + with_cache::Bool = true, +) + inner = diff_optimizer( + optimizer_constructor; + method = method, + with_parametric_opt_interface = with_parametric_opt_interface, + with_bridge_type = with_bridge_type, + with_cache = with_cache, + ) + return JuMP.direct_model(inner) +end + +# nonlinear_diff_model +# conic_diff_model +# quadratic_diff_model + +""" +""" +function set_forward_parameter( + model::JuMP.Model, + variable::JuMP.VariableRef, + value::Number, +) + return MOI.set( + model, + DiffOpt.ForwardConstraintSet(), + ParameterRef(variable), + value, + ) +end + +""" +""" +function get_reverse_parameter(model::JuMP.Model, variable::JuMP.VariableRef) + return MOI.get( + model, + DiffOpt.ReverseConstraintSet(), + ParameterRef(variable), + ) +end + +""" +""" +function set_reverse_variable( + model::JuMP.Model, + variable::JuMP.VariableRef, + value::Number, +) + return MOI.set(model, DiffOpt.ReverseVariablePrimal(), variable, value) +end + +""" +""" +function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) + return MOI.get(model, DiffOpt.ForwardVariablePrimal(), variable) +end From c54f32161e12a0fd05698ca50e45612a0f53fe78 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Fri, 21 Feb 2025 03:24:53 -0300 Subject: [PATCH 2/6] rm method --- src/jump_wrapper.jl | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index b90d87f6..8dfb4695 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -2,14 +2,12 @@ """ function diff_model( optimizer_constructor; - method = nothing, with_parametric_opt_interface::Bool = false, with_bridge_type = Float64, with_cache::Bool = true, ) inner = diff_optimizer( optimizer_constructor; - method = method, with_parametric_opt_interface = with_parametric_opt_interface, with_bridge_type = with_bridge_type, with_cache = with_cache, From b00ddfbf8f5a325109c85b64c1ad82bb922cd034 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sat, 22 Feb 2025 02:16:14 -0300 Subject: [PATCH 3/6] add JuMP API and test it --- README.md | 54 +++++++------------ src/DiffOpt.jl | 2 + src/jump_wrapper.jl | 106 ++++++++++++++++++++++++++++++++----- src/moi_wrapper.jl | 2 +- test/jump_wrapper.jl | 123 +++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 237 insertions(+), 50 deletions(-) create mode 100644 test/jump_wrapper.jl diff --git a/README.md b/README.md index 03989444..5913411d 100644 --- a/README.md +++ b/README.md @@ -33,15 +33,12 @@ examples, tutorials, and an API reference. ### DiffOpt-JuMP API with `Parameters` +Here is an example with a Parametric **Linear Program**: + ```julia using JuMP, DiffOpt, HiGHS -model = Model( - () -> DiffOpt.diff_optimizer( - HiGHS.Optimizer; - with_parametric_opt_interface = true, - ), -) +model = DiffOpt.quadratic_diff_model(HiGHS.Optimizer) set_silent(model) p_val = 4.0 @@ -64,9 +61,9 @@ optimize!(model) # differentiate w.r.t. p direction_p = 3.0 -MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(direction_p)) +DiffOpt.set_forward_parameter(model, p, direction_p) DiffOpt.forward_differentiate!(model) -@show MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) == direction_p * 3 / pc_val +@show DiffOpt.get_forward_variable(model, x) == direction_p * 3 / pc_val # update p and pc p_val = 2.0 @@ -82,45 +79,30 @@ optimize!(model) DiffOpt.empty_input_sensitivities!(model) # differentiate w.r.t. pc direction_pc = 10.0 -MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(pc), Parameter(direction_pc)) +DiffOpt.set_forward_parameter(model, pc, direction_pc) DiffOpt.forward_differentiate!(model) -@show abs(MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) - +@show abs(DiffOpt.get_forward_variable(model, x) - -direction_pc * 3 * p_val / pc_val^2) < 1e-5 # always a good practice to clear previously set sensitivities DiffOpt.empty_input_sensitivities!(model) # Now, reverse model AD direction_x = 10.0 -MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction_x) +DiffOpt.set_reverse_variable(model, x, direction_x) DiffOpt.reverse_differentiate!(model) -@show MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)) == MOI.Parameter(direction_x * 3 / pc_val) -@show abs(MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(pc)).value - - -direction_x * 3 * p_val / pc_val^2) < 1e-5 +@show DiffOpt.get_reverse_parameter(model, p) == direction_x * 3 / pc_val +@show DiffOpt.get_reverse_parameter(model, pc) == -direction_x * 3 * p_val / pc_val^2 ``` -### Low level DiffOpt-JuMP API: - -A brief example: +Available models: +* `DiffOpt.quadratic_diff_model`: Quadratic Programs (QP) and Linear Programs +(LP) +* `DiffOpt.conic_diff_model`: Conic Programs (CP) and Linear Programs (LP) +* `DiffOpt.nonlinear_diff_model`: Nonlinear Programs (NLP), Quadratic Program +(QP) and Linear Programs (LP) +* `DiffOpt.diff_model`: Nonlinear Programs (NLP), Conic Programs (CP), +Quadratic Programs (QP) and Linear Programs (LP) -```julia -using JuMP, DiffOpt, HiGHS -# Create a model using the wrapper -model = Model(() -> DiffOpt.diff_optimizer(HiGHS.Optimizer)) -# Define your model and solve it -@variable(model, x) -@constraint(model, cons, x >= 3) -@objective(model, Min, 2x) -optimize!(model) -# Choose the problem parameters to differentiate with respect to, and set their -# perturbations. -MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0) -# Differentiate the model -DiffOpt.reverse_differentiate!(model) -# fetch the gradients -grad_exp = MOI.get(model, DiffOpt.ReverseConstraintFunction(), cons) # -3 x - 1 -constant(grad_exp) # -1 -coefficient(grad_exp, x) # -3 -``` ## Citing DiffOpt.jl diff --git a/src/DiffOpt.jl b/src/DiffOpt.jl index 0dce78d9..27f16d5e 100644 --- a/src/DiffOpt.jl +++ b/src/DiffOpt.jl @@ -48,6 +48,8 @@ function add_default_factorization(model) return end +include("jump_wrapper.jl") + export diff_optimizer # TODO diff --git a/src/jump_wrapper.jl b/src/jump_wrapper.jl index 8dfb4695..24c4c55c 100644 --- a/src/jump_wrapper.jl +++ b/src/jump_wrapper.jl @@ -1,8 +1,15 @@ """ + diff_model(optimizer_constructor; with_parametric_opt_interface::Bool = true, with_bridge_type = Float64, with_cache::Bool = true) + +Create a JuMP model with a differentiable optimizer. The optimizer is created +using `optimizer_constructor`. This model will try to select the proper +differentiable optimization method based on the problem structure. + +See also: [`nonlinear_diff_model`](@ref), [`conic_diff_model`](@ref), [`quadratic_diff_model`](@ref). """ function diff_model( optimizer_constructor; - with_parametric_opt_interface::Bool = false, + with_parametric_opt_interface::Bool = true, with_bridge_type = Float64, with_cache::Bool = true, ) @@ -15,11 +22,79 @@ function diff_model( return JuMP.direct_model(inner) end -# nonlinear_diff_model -# conic_diff_model -# quadratic_diff_model +""" + nonlinear_diff_model(optimizer_constructor; with_bridge_type = Float64, with_cache::Bool = true) + +Create a JuMP model with a differentiable optimizer for nonlinear programs. +The optimizer is created using `optimizer_constructor`. + +See also: [`conic_diff_model`](@ref), [`quadratic_diff_model`](@ref), [`diff_model`](@ref). +""" +function nonlinear_diff_model( + optimizer_constructor; + with_bridge_type = Float64, + with_cache::Bool = true, +) + inner = diff_optimizer( + optimizer_constructor; + with_parametric_opt_interface = false, + with_bridge_type = with_bridge_type, + with_cache = with_cache, + ) + MOI.set(inner, ModelConstructor(), NonLinearProgram.Model) + return JuMP.direct_model(inner) +end + +""" + conic_diff_model(optimizer_constructor; with_bridge_type = Float64, with_cache::Bool = true) + +Create a JuMP model with a differentiable optimizer for conic programs. +The optimizer is created using `optimizer_constructor`. + +See also: [`nonlinear_diff_model`](@ref), [`quadratic_diff_model`](@ref), [`diff_model`](@ref). +""" +function conic_diff_model( + optimizer_constructor; + with_bridge_type = Float64, + with_cache::Bool = true, +) + inner = diff_optimizer( + optimizer_constructor; + with_parametric_opt_interface = true, + with_bridge_type = with_bridge_type, + with_cache = with_cache, + ) + MOI.set(inner, ModelConstructor(), ConicProgram.Model) + return JuMP.direct_model(inner) +end + +""" + quadratic_diff_model(optimizer_constructor; with_bridge_type = Float64, with_cache::Bool = true) + +Create a JuMP model with a differentiable optimizer for quadratic programs. +The optimizer is created using `optimizer_constructor`. + +See also: [`nonlinear_diff_model`](@ref), [`conic_diff_model`](@ref), [`diff_model`](@ref). +""" +function quadratic_diff_model( + optimizer_constructor; + with_bridge_type = Float64, + with_cache::Bool = true, +) + inner = diff_optimizer( + optimizer_constructor; + with_parametric_opt_interface = true, + with_bridge_type = with_bridge_type, + with_cache = with_cache, + ) + MOI.set(inner, ModelConstructor(), QuadraticProgram.Model) + return JuMP.direct_model(inner) +end """ + set_forward_parameter(model::JuMP.Model, variable::JuMP.VariableRef, value::Number) + +Set the value of a parameter input sensitivity for forward mode. """ function set_forward_parameter( model::JuMP.Model, @@ -28,34 +103,39 @@ function set_forward_parameter( ) return MOI.set( model, - DiffOpt.ForwardConstraintSet(), + ForwardConstraintSet(), ParameterRef(variable), - value, + Parameter(value), ) end """ + get_reverse_parameter(model::JuMP.Model, variable::JuMP.VariableRef) + +Get the value of a parameter output sensitivity for reverse mode. """ function get_reverse_parameter(model::JuMP.Model, variable::JuMP.VariableRef) - return MOI.get( - model, - DiffOpt.ReverseConstraintSet(), - ParameterRef(variable), - ) + return MOI.get(model, ReverseConstraintSet(), ParameterRef(variable)).value end """ + set_reverse_variable(model::JuMP.Model, variable::JuMP.VariableRef, value::Number) + +Set the value of a variable input sensitivity for reverse mode. """ function set_reverse_variable( model::JuMP.Model, variable::JuMP.VariableRef, value::Number, ) - return MOI.set(model, DiffOpt.ReverseVariablePrimal(), variable, value) + return MOI.set(model, ReverseVariablePrimal(), variable, value) end """ + get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) + +Get the value of a variable output sensitivity for forward mode. """ function get_forward_variable(model::JuMP.Model, variable::JuMP.VariableRef) - return MOI.get(model, DiffOpt.ForwardVariablePrimal(), variable) + return MOI.get(model, ForwardVariablePrimal(), variable) end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 5c7c7be8..35fe763b 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -863,7 +863,7 @@ function MOI.get(model::Optimizer, attr::DifferentiateTimeSec) end function MOI.supports( - model::Optimizer, + ::Optimizer, ::NonLinearKKTJacobianFactorization, ::Function, ) diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl new file mode 100644 index 00000000..cbc4d7a8 --- /dev/null +++ b/test/jump_wrapper.jl @@ -0,0 +1,123 @@ +# Copyright (c) 2020: Akshay Sharma and contributors +# +# Use of this source code is governed by an MIT-style license that can be found +# in the LICENSE.md file or at https://opensource.org/licenses/MIT. + +module TestJuMPWrapper + +using Test +using JuMP +import DiffOpt +import HiGHS +import Ipopt +import SCS +import MathOptInterface as MOI + +const ATOL = 1e-3 +const RTOL = 1e-3 + +function runtests() + for name in names(@__MODULE__; all = true) + if startswith("$name", "test_") + @testset "$(name)" begin + getfield(@__MODULE__, name)() + end + end + end + return +end + +function test_jump_api() + for (MODEL, SOLVER) in [ + (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), + (DiffOpt.quadratic_diff_model, SCS.Optimizer), + (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), + # (DiffOpt.conic_diff_model, HiGHS.Optimizer), + # (DiffOpt.conic_diff_model, SCS.Optimizer), # conicmodel has a issue with sign + # (DiffOpt.conic_diff_model, Ipopt.Optimizer), + # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), # SQF ctr not supported? + # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), # returns zero for sensitivity + (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), + ], + ineq in [true, false], + min in [true, false], + flip in [true, false] + + @testset "$(MODEL) with: $(SOLVER), $(ineq ? "ineqs" : "eqs"), $(min ? "Min" : "Max"), $(flip ? "geq" : "leq")" begin + model = MODEL(SOLVER) + set_silent(model) + + p_val = 4.0 + pc_val = 2.0 + @variable(model, x) + @variable(model, p in Parameter(p_val)) + @variable(model, pc in Parameter(pc_val)) + if ineq + if !flip + cons = @constraint(model, pc * x >= 3 * p) + else + cons = @constraint(model, pc * x <= 3 * p) + end + else + @constraint(model, cons, pc * x == 3 * p) + end + sign = flip ? -1 : 1 + if min + @objective(model, Min, 2x * sign) + else + @objective(model, Max, -2x * sign) + end + optimize!(model) + @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL + + # the function is + # x(p, pc) = 3p / pc + # hence, + # dx/dp = 3 / pc + # dx/dpc = -3p / pc^2 + + # First, try forward mode AD + + # differentiate w.r.t. p + direction_p = 3.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_variable(model, x) ≈ + direction_p * 3 / pc_val atol = ATOL rtol = RTOL + + # update p and pc + p_val = 2.0 + pc_val = 6.0 + set_parameter_value(p, p_val) + set_parameter_value(pc, pc_val) + # re-optimize + optimize!(model) + # check solution + @test value(x) ≈ 3 * p_val / pc_val atol = ATOL rtol = RTOL + + # stop differentiating with respect to p + DiffOpt.empty_input_sensitivities!(model) + # differentiate w.r.t. pc + direction_pc = 10.0 + DiffOpt.set_forward_parameter(model, pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test DiffOpt.get_forward_variable(model, x) ≈ + -direction_pc * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL + + # always a good practice to clear previously set sensitivities + DiffOpt.empty_input_sensitivities!(model) + # Now, reverse model AD + direction_x = 10.0 + DiffOpt.set_reverse_variable(model, x, direction_x) + DiffOpt.reverse_differentiate!(model) + @test DiffOpt.get_reverse_parameter(model, p) ≈ + direction_x * 3 / pc_val atol = ATOL rtol = RTOL + @test DiffOpt.get_reverse_parameter(model, pc) ≈ + -direction_x * 3 * p_val / pc_val^2 atol = ATOL rtol = RTOL + end + end +end + +end # module + +TestJuMPWrapper.runtests() From fdb96c8c7267c4956d03061c897345012d39fd56 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Sat, 22 Feb 2025 13:38:59 -0300 Subject: [PATCH 4/6] add diff_model test --- test/jump_wrapper.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index cbc4d7a8..dae907c3 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -29,6 +29,7 @@ end function test_jump_api() for (MODEL, SOLVER) in [ + (DiffOpt.diff_model, Ipopt.Optimizer), (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), (DiffOpt.quadratic_diff_model, SCS.Optimizer), (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), From 06efa7707a3030739cd12e79dce29485bbf065c2 Mon Sep 17 00:00:00 2001 From: Andrew Rosemberg Date: Mon, 19 May 2025 23:57:56 -0600 Subject: [PATCH 5/6] Fix conic error (#284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix conic error * use jump psd problem * [docs] update to Documenter@1 (#286) * [docs] update to Documenter@1 * Update * update * format * Pass attributes through Objective.FunctionConversionBridge (#287) * Pass attributes through Objective.FunctionConversionBridge * Fix * add test * fix tol * fix test * add reverse test --------- Co-authored-by: joaquimg * bump POI * cleanup --------- Co-authored-by: Oscar Dowson Co-authored-by: Benoît Legat Co-authored-by: joaquimg --- .github/workflows/ci.yml | 48 ++-- Project.toml | 2 +- docs/Project.toml | 2 +- docs/make.jl | 8 +- src/ConicProgram/ConicProgram.jl | 1 + src/bridges.jl | 24 ++ test/conic_program.jl | 412 ++++++++++++------------------- test/jump.jl | 43 ++++ test/jump_wrapper.jl | 6 +- 9 files changed, 252 insertions(+), 294 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b66918fe..37487402 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,7 +1,14 @@ name: CI on: - - push - - pull_request + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] +# needed to allow julia-actions/cache to delete old caches that it has created +permissions: + actions: write + contents: read jobs: test: name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} @@ -17,21 +24,12 @@ jobs: os: windows-latest arch: x64 steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v3 - env: - cache-name: cache-artifacts - with: - path: ~/.julia/artifacts - key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - restore-keys: | - ${{ runner.os }}-test-${{ env.cache-name }}- - ${{ runner.os }}-test- - ${{ runner.os }}- + - uses: julia-actions/cache@v2 - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 env: @@ -48,23 +46,17 @@ jobs: # Fix for Plots with GR backend, see https://github.com/jheinen/GR.jl/issues/422 GKSwstype: nul steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 with: version: '1' - - shell: bash - run: julia --project=docs -e "using Pkg; Pkg.develop(PackageSpec(path=pwd()))" - - shell: bash - run: julia --project=docs -e "using Pkg; Pkg.instantiate()" - - shell: bash - env: - DATADEPS_ALWAYS_ACCEPT: true # For MLDatasets.MNIST + - name: Install dependencies + shell: julia --color=yes --project=docs/ {0} run: | - julia --project=docs -e ' - using Documenter: doctest - using DiffOpt - doctest(DiffOpt)' - - run: julia --project=docs docs/make.jl + using Pkg + Pkg.develop(PackageSpec(path=pwd())) + Pkg.instantiate() + - run: julia --project=docs --color=yes docs/make.jl env: DATADEPS_ALWAYS_ACCEPT: true # For MLDatasets.MNIST GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/Project.toml b/Project.toml index a3689608..e89e4c6b 100644 --- a/Project.toml +++ b/Project.toml @@ -23,5 +23,5 @@ JuMP = "1" LazyArrays = "0.21, 0.22, 1" MathOptInterface = "1.18" MathOptSetDistances = "0.2.9" -ParametricOptInterface = "0.9.0" +ParametricOptInterface = "0.11" julia = "1.6" diff --git a/docs/Project.toml b/docs/Project.toml index f953f3b3..c4112c75 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -15,4 +15,4 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] -Documenter = "0.27" +Documenter = "1" diff --git a/docs/make.jl b/docs/make.jl index 9d825aaf..a31f7e28 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -43,8 +43,10 @@ end literate_directory(_EXAMPLE_DIR) makedocs(; + authors = "JuMP Community", + sitename = "DiffOpt.jl", + repo = "https://github.com/jump-dev/DiffOpt.jl", modules = [DiffOpt], - doctest = false, clean = true, format = Documenter.HTML(; prettyurls = get(ENV, "CI", nothing) == "true", @@ -61,10 +63,6 @@ makedocs(; f in readdir(_EXAMPLE_DIR) if endswith(f, ".md") ], ], - strict = true, # See https://github.com/JuliaOpt/JuMP.jl/issues/1576 - repo = "https://github.com/jump-dev/DiffOpt.jl", - sitename = "DiffOpt.jl", - authors = "JuMP Community", ) deploydocs(; repo = "github.com/jump-dev/DiffOpt.jl.git", push_preview = true) diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index 89a79183..f9444404 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -302,6 +302,7 @@ function DiffOpt.forward_differentiate!(model::Model) dAj, dAv, ) + dAv .*= -1.0 dA = SparseArrays.sparse(dAi, dAj, dAv, lines, cols) m = size(A, 1) diff --git a/src/bridges.jl b/src/bridges.jl index 2b0eb46b..a56717b3 100644 --- a/src/bridges.jl +++ b/src/bridges.jl @@ -3,6 +3,30 @@ # Use of this source code is governed by an MIT-style license that can be found # in the LICENSE.md file or at https://opensource.org/licenses/MIT. +function MOI.get( + model::MOI.ModelLike, + attr::ObjectiveFunctionAttribute{ReverseObjectiveFunction,G}, + ::MOI.Bridges.Objective.FunctionConversionBridge{T,F,G}, +) where {T,F,G} + return MOI.get( + model, + ObjectiveFunctionAttribute{ReverseObjectiveFunction,F}(attr.attr), + ) +end + +function MOI.set( + model::MOI.ModelLike, + attr::ObjectiveFunctionAttribute{ForwardObjectiveFunction,G}, + ::MOI.Bridges.Objective.FunctionConversionBridge{T,F,G}, + value, +) where {T,F,G} + return MOI.set( + model, + ObjectiveFunctionAttribute{ForwardObjectiveFunction,F}(attr.attr), + value, + ) +end + function MOI.get( model::MOI.ModelLike, ::ObjectiveFunctionAttribute{ReverseObjectiveFunction}, diff --git a/test/conic_program.jl b/test/conic_program.jl index a3d4a939..f5e5b000 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -11,6 +11,7 @@ import Ipopt import LinearAlgebra import MathOptInterface as MOI import SCS +using JuMP const ATOL = 2e-4 const RTOL = 2e-4 @@ -28,7 +29,7 @@ end function _test_simple_socp(eq_vec::Bool) # referred from _soc2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L1355 - # find equivalent diffcp python program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_socp_1_py.ipynb + # find reference diffcp python program here: https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_socp_1_py.ipynb # Problem SOC2 # min x # s.t. y ≥ 1/√2 @@ -38,80 +39,103 @@ function _test_simple_socp(eq_vec::Bool) # s.t. -1/√2 + y ∈ R₊ # 1 - t ∈ {0} # (t,x,y) ∈ SOC₃ - model = DiffOpt.diff_optimizer(SCS.Optimizer) - MOI.set(model, MOI.Silent(), true) - x, y, t = MOI.add_variables(model, 3) - MOI.set( - model, - MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), - 1.0x, - ) - MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + + model = JuMP.Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + set_silent(model) + + x = @variable(model) + y = @variable(model) + t = @variable(model) + + ceq = if eq_vec + @constraint(model, [t] .== [1.0]) + else + @constraint(model, t == 1.0) + end + cnon = @constraint(model, 1.0y >= 1 / √2) + csoc = @constraint(model, [1.0t, 1.0x, 1.0y] in MOI.SecondOrderCone(3)) + + @objective(model, Min, 1.0x) + + optimize!(model) + + # set foward sensitivities if eq_vec - ceq = MOI.add_constraint( - model, - MOI.Utilities.vectorize([-1.0t + 1.0]), - MOI.Zeros(1), - ) + MOI.set.(model, DiffOpt.ForwardConstraintFunction(), ceq, [1.0 * x]) else - ceq = MOI.add_constraint(model, -1.0t, MOI.EqualTo(-1.0)) + MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) end - cnon = MOI.add_constraint( - model, - MOI.Utilities.vectorize([1.0y - 1 / √2]), - MOI.Nonnegatives(1), - ) - csoc = MOI.add_constraint( - model, - MOI.Utilities.vectorize([1.0t, 1.0x, 1.0y]), - MOI.SecondOrderCone(3), - ) - MOI.optimize!(model) + + DiffOpt.forward_differentiate!(model) + + dx = -0.9999908 + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ dx atol = ATOL rtol = + RTOL + + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0) + + DiffOpt.reverse_differentiate!(model) + if eq_vec - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - ceq, - MOI.Utilities.vectorize([1.0 * x]), + @test all( + isapprox.( + JuMP.coefficient.( + MOI.get.(model, DiffOpt.ReverseConstraintFunction(), ceq), + x, + ), + dx, + atol = ATOL, + rtol = RTOL, + ), ) else - MOI.set(model, DiffOpt.ForwardConstraintFunction(), ceq, 1.0 * x) + @test JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), ceq), + x, + ) ≈ dx atol = ATOL rtol = RTOL end - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - cnon, - MOI.Utilities.vectorize([1.0 * y]), - ) + + DiffOpt.empty_input_sensitivities!(model) + + MOI.set(model, DiffOpt.ForwardConstraintFunction(), cnon, 1.0 * y) + + DiffOpt.forward_differentiate!(model) + + dy = -0.707083 + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), y) ≈ dy atol = ATOL rtol = + RTOL + + MOI.set(model, DiffOpt.ReverseVariablePrimal(), y, 1.0) + + DiffOpt.reverse_differentiate!(model) + + @test JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), cnon), + y, + ) ≈ dy atol = ATOL rtol = RTOL + + DiffOpt.empty_input_sensitivities!(model) + MOI.set( model, DiffOpt.ForwardConstraintFunction(), csoc, - MOI.Utilities.operate(vcat, Float64, 1.0 * t, 0.0, 0.0), + MOI.Utilities.operate(vcat, Float64, 1.0 * t.index, 0.0, 0.0), ) + DiffOpt.forward_differentiate!(model) - # these matrices are benchmarked with the output generated by diffcp - # refer the python file mentioned above to get equivalent python source code - @test model.diff.model.x ≈ [-1 / √2; 1 / √2; 1.0] atol = ATOL rtol = RTOL - if eq_vec - @test model.diff.model.s ≈ [0.0, 0.0, 1.0, -1 / √2, 1 / √2] atol = ATOL rtol = - RTOL - @test model.diff.model.y ≈ [√2, 1.0, √2, 1.0, -1.0] atol = ATOL rtol = - RTOL - else - @test model.diff.model.s ≈ [0.0, 1.0, -1 / √2, 1 / √2, 0.0] atol = ATOL rtol = - RTOL - @test model.diff.model.y ≈ [1.0, √2, 1.0, -1.0, √2] atol = ATOL rtol = - RTOL - end - dx = [1.12132144; 1 / √2; 1 / √2] - for (i, vi) in enumerate([x, y, t]) - @test dx[i] ≈ MOI.get(model, DiffOpt.ForwardVariablePrimal(), vi) atol = - ATOL rtol = RTOL - end - # @test dx ≈ [1.12132144; 1/√2; 1/√2] atol=ATOL rtol=RTOL - # @test ds ≈ [0.0; 0.0; -2.92893438e-01; 1.12132144e+00; 7.07106999e-01] atol=ATOL rtol=RTOL - # @test dy ≈ [2.4142175; 5.00000557; 3.8284315; √2; -4.00000495] atol=ATOL rtol=RTOL + + ds = 0.0 + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), t) ≈ ds atol = ATOL rtol = + RTOL + + MOI.set(model, DiffOpt.ReverseVariablePrimal(), t, 1.0) + + DiffOpt.reverse_differentiate!(model) + + # FIXME: this is not working - https://github.com/jump-dev/DiffOpt.jl/issues/283 + # @test JuMP.coefficient(MOI.get(model, DiffOpt.ReverseConstraintFunction(), csoc).func.func.func, t.index) ≈ ds atol=ATOL rtol=RTOL + return end @@ -375,206 +399,82 @@ function test_differentiating_conic_with_PSD_and_SOC_constraints() return end -function test_differentiating_conic_with_PSD_and_POS_constraints() +function _build_simple_sdp() # refer psdt2test, https://github.com/jump-dev/MathOptInterface.jl/blob/master/src/Test/contconic.jl#L4306 # find equivalent diffcp program here - https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb - model = DiffOpt.diff_optimizer(SCS.Optimizer) - MOI.set(model, MOI.Silent(), true) - x = MOI.add_variables(model, 7) - @test MOI.get(model, MOI.NumberOfVariables()) == 7 - η = 10.0 - c1 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.(1, MOI.ScalarAffineTerm.(-1.0, x[1:6])), - [η], - ), - MOI.Nonnegatives(1), - ) - c2 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.(1:6, MOI.ScalarAffineTerm.(1.0, x[1:6])), - zeros(6), - ), - MOI.Nonnegatives(6), - ) - α = 0.8 - δ = 0.9 - c3 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.( - [fill(1, 7); fill(2, 5); fill(3, 6)], - MOI.ScalarAffineTerm.( - [ - δ / 2, - α, - δ, - δ / 4, - δ / 8, - 0.0, - -1.0, - -δ / (2 * √2), - -δ / 4, - 0, - -δ / (8 * √2), - 0.0, - δ / 2, - δ - α, - 0, - δ / 8, - δ / 4, - -1.0, - ], - [x[1:7]; x[1:3]; x[5:6]; x[1:3]; x[5:7]], - ), - ), - zeros(3), - ), - MOI.PositiveSemidefiniteConeTriangle(2), - ) - c4 = MOI.add_constraint( - model, - MOI.VectorAffineFunction( - MOI.VectorAffineTerm.( - 1, - MOI.ScalarAffineTerm.(0.0, [x[1:3]; x[5:6]]), - ), - [0.0], - ), - MOI.Zeros(1), - ) - MOI.set( - model, - MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), - MOI.ScalarAffineFunction([MOI.ScalarAffineTerm(1.0, x[7])], 0.0), - ) - MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) - MOI.optimize!(model) - # dc = ones(7) - MOI.set( - model, - DiffOpt.ForwardObjectiveFunction(), - MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(ones(7), x), 0.0), - ) - # db = ones(11) - # dA = ones(11, 7) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c1, - MOI.Utilities.vectorize(ones(1, 7) * x + ones(1)), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c2, - MOI.Utilities.vectorize(ones(6, 7) * x + ones(6)), - ) - MOI.set( + # Make a JuMP model backed by DiffOpt.diff_optimizer(SCS.Optimizer) + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + set_silent(model) # just to suppress solver output + + @variable(model, x[1:3]) + + @constraint(model, c1, sum(x[i] for i in 1:3) == 4) + + @constraint(model, c2[i = 1:3], x[i] ≥ 0) + + @constraint(model, x[1] == 2) + + @constraint( model, - DiffOpt.ForwardConstraintFunction(), c3, - MOI.Utilities.vectorize(ones(3, 7) * x + ones(3)), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c4, - MOI.Utilities.vectorize(ones(1, 7) * x + ones(1)), + LinearAlgebra.Symmetric([ + x[3]+1 2 + 2 2x[3]+2 + ]) in PSDCone() ) - DiffOpt.forward_differentiate!(model) - @test model.diff.model.x ≈ - [20 / 3.0, 0.0, 10 / 3.0, 0.0, 0.0, 0.0, 1.90192379] atol = ATOL rtol = - RTOL - @test model.diff.model.s ≈ [ - 0.0, - 0.0, - 20 / 3.0, - 0.0, - 10 / 3.0, - 0.0, - 0.0, - 0.0, - 4.09807621, - -2.12132, - 1.09807621, - ] atol = ATOL rtol = RTOL - @test model.diff.model.y ≈ [ - 0.0, - 0.19019238, - 0.0, - 0.12597667, - 0.0, - 0.14264428, - 0.14264428, - 0.01274047, - 0.21132487, - 0.408248, - 0.78867513, - ] atol = ATOL rtol = RTOL - atol = 0.3 - rtol = 0.01 - # compare these with https://github.com/AKS1996/jump-gsoc-2020/blob/master/diffcp_sdp_3_py.ipynb - # results are not exactly as: 1. there is some residual error 2. diffcp results are SCS specific, hence scaled - dx = [-39.6066, 10.8953, -14.9189, 10.9054, 10.883, 10.9118, -21.7508] - for (i, vi) in enumerate(x) - @test dx[i] ≈ MOI.get(model, DiffOpt.ForwardVariablePrimal(), vi) atol = - atol rtol = rtol + + @objective(model, Min, 4x[3] + x[2]) + return model +end + +function test_differentiating_conic_with_PSD_constraints() + model = _build_simple_sdp() + optimize!(model) + x = model[:x] + c1 = model[:c1] + c2 = model[:c2] + sx = value.(x) + @test sx ≈ [2.0, 3.0 - sqrt(2), sqrt(2) - 1] atol = ATOL rtol = RTOL + + for i in 1:3 + _model = _build_simple_sdp() + JuMP.set_normalized_coefficient(_model[:c1], _model[:x][i], 1.001) + optimize!(_model) + _dx = (value(_model[:x][i]) - value(sx[i])) / 0.001 + i in (1, 3) ? (@test abs(_dx) < 0.05) : (@test -1.6 < _dx < -1.45) + MOI.set(model, DiffOpt.ForwardConstraintFunction(), c1, x[i] + 0.0) + DiffOpt.forward_differentiate!(model) + _dx = MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) + i in (1, 3) ? (@test abs(_dx) < 0.05) : (@test -1.6 < _dx < -1.45) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x[i], 1.0) + DiffOpt.reverse_differentiate!(model) + _dx = JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), c1), + x[i], + ) + i in (1, 3) ? (@test abs(_dx) < 0.05) : (@test -1.6 < _dx < -1.45) + DiffOpt.empty_input_sensitivities!(model) end - # @test dy ≈ [0.0, -3.56905, 0.0, -0.380035, 0.0, -0.41398, -0.385321, -0.00743119, -0.644986, -0.550542, -2.36765] atol=atol rtol=rtol - # @test ds ≈ [0.0, 0.0, -50.4973, 0.0, -25.8066, 0.0, 0.0, 0.0, -7.96528, -1.62968, -2.18925] atol=atol rtol=rtol - # TODO: future example, how to differentiate wrt a specific constraint/variable, refer QPLib article for more - dA = zeros(11, 7) - dA[3:8, 1:6] = Matrix{Float64}(LinearAlgebra.I, 6, 6) # differentiating only wrt POS constraint c2 - db = zeros(11) - dc = zeros(7) - # db = zeros(11) - # dA = zeros(11, 7) - # dA[3:8, 1:6] = Matrix{Float64}(LinearAlgebra.I, 6, 6) # differentiating only wrt POS constraint c2 - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c1, - MOI.Utilities.zero_with_output_dimension( - MOI.VectorAffineFunction{Float64}, - 1, - ), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c2, - MOI.Utilities.vectorize(ones(6) .* x[1:6]), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c3, - MOI.Utilities.zero_with_output_dimension( - MOI.VectorAffineFunction{Float64}, - 3, - ), - ) - MOI.set( - model, - DiffOpt.ForwardConstraintFunction(), - c4, - MOI.Utilities.zero_with_output_dimension( - MOI.VectorAffineFunction{Float64}, - 1, - ), - ) - DiffOpt.forward_differentiate!(model) - # for (i, vi) in enumerate(X) - # @test 0.0 ≈ MOI.get(model, - # DiffOpt.ForwardVariablePrimal(), vi) atol=1e-2 rtol=RTOL - # end - # TODO add a test here, probably on duals - # # note that there's no change in the PSD slack values or dual optimas - # @test dy ≈ [0.0, 0.0, 0.0, 0.125978, 0.0, 0.142644, 0.142641, 0.0127401, 0.0, 0.0, 0.0] atol=atol rtol=RTOL - # @test ds ≈ [0.0, 0.0, -6.66672, 0.0, -3.33336, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] atol=atol rtol=RTOL + for i in 1:3 + DiffOpt.empty_input_sensitivities!(model) + _model = _build_simple_sdp() + JuMP.set_normalized_coefficient(_model[:c2][i], _model[:x][i], 1.001) + optimize!(_model) + _dx = (value(_model[:x][i]) - value(sx[i])) / 0.001 + @test abs(_dx) < 0.15 + MOI.set(model, DiffOpt.ForwardConstraintFunction(), c2[i], x[i] + 0.0) + DiffOpt.forward_differentiate!(model) + _dx = MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[i]) + @test abs(_dx) < 0.15 + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x[i], 1.0) + DiffOpt.reverse_differentiate!(model) + _dx = JuMP.coefficient( + MOI.get(model, DiffOpt.ReverseConstraintFunction(), c2[i]), + x[i], + ) + @test abs(_dx) < 0.15 + end + return end diff --git a/test/jump.jl b/test/jump.jl index 8055f1f1..bdb031b7 100644 --- a/test/jump.jl +++ b/test/jump.jl @@ -31,6 +31,49 @@ function runtests() return end +function test_single_variable_objective_forward() + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + @variable(model, x[1:7] >= 0) + @constraint(model, c1, sum(x[i] for i in 1:6) == 10) + @constraint(model, c2, x[7] == 10) + @constraint( + model, + c3, + LinearAlgebra.Symmetric([ + x[7] 0.0 + 0.0 x[1] + ]) in PSDCone() + ) + @objective(model, Max, x[7]) + optimize!(model) + MOI.set(model, DiffOpt.ForwardObjectiveFunction(), sum(x)) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x[7]) ≈ 0 atol = ATOL + return +end + +function test_single_variable_objective_reverse() + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) + @variable(model, x[1:7] >= 0) + @constraint(model, c1, sum(x[i] for i in 1:6) == 10) + @constraint(model, c2, x[7] == 10) + @constraint( + model, + c3, + LinearAlgebra.Symmetric([ + x[7] 0.0 + 0.0 x[1] + ]) in PSDCone() + ) + @objective(model, Max, x[7]) + optimize!(model) + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x[7], 1.0) + DiffOpt.reverse_differentiate!(model) + func = MOI.get(model, DiffOpt.ReverseObjectiveFunction()) + @test JuMP.coefficient(func, x[7]) ≈ 0.0 atol = ATOL rtol = RTOL + return +end + function test_forward_on_trivial_qp() # using example on https://osqp.org/docs/examples/setup-and-solve.html Q = [4.0 1.0; 1.0 2.0] diff --git a/test/jump_wrapper.jl b/test/jump_wrapper.jl index dae907c3..1720246e 100644 --- a/test/jump_wrapper.jl +++ b/test/jump_wrapper.jl @@ -33,9 +33,9 @@ function test_jump_api() (DiffOpt.quadratic_diff_model, HiGHS.Optimizer), (DiffOpt.quadratic_diff_model, SCS.Optimizer), (DiffOpt.quadratic_diff_model, Ipopt.Optimizer), - # (DiffOpt.conic_diff_model, HiGHS.Optimizer), - # (DiffOpt.conic_diff_model, SCS.Optimizer), # conicmodel has a issue with sign - # (DiffOpt.conic_diff_model, Ipopt.Optimizer), + (DiffOpt.conic_diff_model, HiGHS.Optimizer), + (DiffOpt.conic_diff_model, SCS.Optimizer), + (DiffOpt.conic_diff_model, Ipopt.Optimizer), # (DiffOpt.nonlinear_diff_model, HiGHS.Optimizer), # SQF ctr not supported? # (DiffOpt.nonlinear_diff_model, SCS.Optimizer), # returns zero for sensitivity (DiffOpt.nonlinear_diff_model, Ipopt.Optimizer), From 3cea22f09097b75594a0d507fd447ab503a05618 Mon Sep 17 00:00:00 2001 From: joaquimg Date: Mon, 19 May 2025 23:31:20 -0700 Subject: [PATCH 6/6] fix parameters support --- src/NonLinearProgram/NonLinearProgram.jl | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index 6b816b4f..e09d4cf4 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -113,14 +113,20 @@ function MOI.supports_constraint( S<:Union{ MOI.GreaterThan{Float64}, MOI.LessThan{Float64}, - # MOI.Interval{Float64}, MOI.EqualTo{Float64}, - MOI.Parameter{Float64}, }, } return true end +function MOI.supports_constraint( + ::Form, + ::Type{MOI.VariableIndex}, + ::Type{MOI.Parameter{Float64}}, +) + return true +end + function _add_leq_geq( form::Form, idx::MOI.ConstraintIndex,