Skip to content

Commit

Permalink
Improved Raw Expression NLP API (#2672)
Browse files Browse the repository at this point in the history
  • Loading branch information
pulsipher authored Aug 24, 2021
1 parent 5a73967 commit 2e23b5e
Show file tree
Hide file tree
Showing 5 changed files with 116 additions and 9 deletions.
34 changes: 28 additions & 6 deletions docs/src/manual/nlp.md
Original file line number Diff line number Diff line change
Expand Up @@ -569,14 +569,39 @@ which can be queried using the [`NLPEvaluator`](@ref).

!!! warning
This section requires advanced knowledge of Julia's `Expr`. You should read
the [Expressions and evaluation](https://docs.julialang.org/en/v1/manual/metaprogramming/#Expressions-and-evaluation) section of the Julia documentation first.
the [Expressions and evaluation](https://docs.julialang.org/en/v1/manual/metaprogramming/#Expressions-and-evaluation)
section of the Julia documentation first.

In addition to the [`@NLobjective`](@ref) and [`@NLconstraint`](@ref) macros, it
is also possible to provide Julia `Expr` objects directly by using
In addition to the [`@NLexpression`](@ref), [`@NLobjective`](@ref) and
[`@NLconstraint`](@ref) macros, it is also possible to provide Julia `Expr`
objects directly by using [`add_NL_expression`](@ref),
[`set_NL_objective`](@ref) and [`add_NL_constraint`](@ref).

This input form may be useful if the expressions are generated programmatically.

### Add a nonlinear expression

Use [`add_NL_expression`](@ref) to add a nonlinear expression to the model.

```jldoctest; setup=:(using JuMP; model = Model())
julia> @variable(model, x)
x
julia> expr = :($(x) + sin($(x)^2))
:(x + sin(x ^ 2))
julia> expr_ref = add_NL_expression(model, expr)
"Reference to nonlinear expression #1"
```
This is equivalent to
```jldoctest; setup=:(using JuMP; model = Model(); @variable(model, x))
julia> @NLexpression(model, expr_ref, x + sin(x^2))
"Reference to nonlinear expression #1"
```

!!! note
You must interpolate the variables directly into the expression `expr`.

### Set the objective function

Use [`set_NL_objective`](@ref) to set a nonlinear objective.
Expand All @@ -592,9 +617,6 @@ This is equivalent to
julia> @NLobjective(model, Min, x + x^2)
```

!!! note
You must interpolate the variables directly into the expression `expr`.

!!! note
You must use `MOI.MIN_SENSE` or `MOI.MAX_SENSE` instead of `Min` and `Max`.

Expand Down
1 change: 1 addition & 0 deletions docs/src/reference/nlp.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ add_NL_constraint
@NLexpression
@NLexpressions
NonlinearExpression
add_NL_expression
```

## [Objectives](@id ref_nl_objectives)
Expand Down
23 changes: 23 additions & 0 deletions src/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1998,6 +1998,29 @@ function register(
)
end

"""
add_NL_expression(model::Model, expr::Expr)
Add a nonlinear expression `expr` to `model`.
This function is most useful if the expression `expr` is generated
programmatically, and you cannot use [`@NLexpression`](@ref).
## Notes
* You must interpolate the variables directly into the expression `expr`.
## Examples
```jldoctest; setup=:(using JuMP; model = Model(); @variable(model, x))
julia> add_NL_expression(model, :(\$(x) + \$(x)^2))
"Reference to nonlinear expression #1"
```
"""
function add_NL_expression(model::Model, ex)
return NonlinearExpression(model, _NonlinearExprData(model, ex))
end

"""
set_NL_objective(model::Model, sense::MOI.OptimizationSense, expr::Expr)
Expand Down
47 changes: 47 additions & 0 deletions src/parse_nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -560,6 +560,53 @@ function _Derivatives.expr_to_nodedata(
return nothing
end

function _Derivatives.expr_to_nodedata(
ex::GenericAffExpr,
nd::Vector{NodeData},
values::Vector{Float64},
parentid,
r::_Derivatives.UserOperatorRegistry,
)
push!(nd, NodeData(CALL, operator_to_id[:+], parentid))
sum_parent = length(nd)
if !iszero(ex.constant)
_Derivatives.expr_to_nodedata(ex.constant, nd, values, sum_parent, r)
end
for (v, c) in ex.terms
if isone(c) # Optimization: no need for * node.
_Derivatives.expr_to_nodedata(v, nd, values, sum_parent, r)
else
push!(nd, NodeData(CALL, operator_to_id[:*], sum_parent))
mult_parent = length(nd)
_Derivatives.expr_to_nodedata(c, nd, values, mult_parent, r)
_Derivatives.expr_to_nodedata(v, nd, values, mult_parent, r)
end
end
return
end

function _Derivatives.expr_to_nodedata(
ex::GenericQuadExpr,
nd::Vector{NodeData},
values::Vector{Float64},
parentid,
r::_Derivatives.UserOperatorRegistry,
)
push!(nd, NodeData(CALL, operator_to_id[:+], parentid))
sum_parent = length(nd)
_Derivatives.expr_to_nodedata(ex.aff, nd, values, sum_parent, r)
for (xy, c) in ex.terms
push!(nd, NodeData(CALL, operator_to_id[:*], sum_parent))
mult_parent = length(nd)
_Derivatives.expr_to_nodedata(xy.a, nd, values, mult_parent, r)
_Derivatives.expr_to_nodedata(xy.b, nd, values, mult_parent, r)
if !isone(c) # Optimization: no need for * node.
_Derivatives.expr_to_nodedata(c, nd, values, mult_parent, r)
end
end
return
end

# Construct a _NonlinearExprData from a Julia expression.
# VariableRef objects should be spliced into the expression.
function _NonlinearExprData(m::Model, ex::Expr)
Expand Down
20 changes: 17 additions & 3 deletions test/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -825,25 +825,39 @@ end
@test jac_values [1.0, 0.0, 1.0, 3.0]
end

@testset "set_NL_objective and add_NL_constraint" begin
@testset "add_NL_expression, set_NL_objective, and add_NL_constraint" begin
model = Model()
@variable(model, x)
@variable(model, y)
JuMP.set_NL_objective(model, MOI.MIN_SENSE, :($x^2 + $y^2))
@expression(model, aff, x + 2y - 3)
@expression(model, quad, x^2 + 2y^2 - x)
nlexpr = JuMP.add_NL_expression(model, :($x^2 + $y^2))
JuMP.set_NL_objective(model, MOI.MIN_SENSE, :(2 * $nlexpr))
JuMP.add_NL_constraint(model, :($x + $y <= 1))
JuMP.add_NL_constraint(model, :($x + $y >= 1))
JuMP.add_NL_constraint(model, :($x + $y == 1))
JuMP.add_NL_constraint(model, :(0 <= $x + $y <= 1))
JuMP.add_NL_constraint(model, :($aff == 1))
JuMP.add_NL_constraint(model, :($quad == 1))

d = JuMP.NLPEvaluator(model)
MOI.initialize(d, [:ExprGraph])
xidx = x.index
yidx = y.index
@test MOI.objective_expr(d) == :(x[$xidx]^2.0 + x[$yidx]^2.0)
@test MOI.objective_expr(d) == :(2.0 * (x[$xidx]^2.0 + x[$yidx]^2.0))
@test MOI.constraint_expr(d, 1) == :((x[$xidx] + x[$yidx]) - 1.0 <= 0.0)
@test MOI.constraint_expr(d, 2) == :((x[$xidx] + x[$yidx]) - 1.0 >= 0.0)
@test MOI.constraint_expr(d, 3) == :((x[$xidx] + x[$yidx]) - 1.0 == 0.0)
@test MOI.constraint_expr(d, 4) == :(0.0 <= x[$xidx] + x[$yidx] <= 1.0)
@test MOI.constraint_expr(d, 5) ==
:((-3.0 + x[$xidx] + 2.0 * x[$yidx]) - 1.0 == 0.0)
@test MOI.constraint_expr(d, 6) == :(
(
+(-1.0 * x[$xidx]) +
x[$xidx] * x[$xidx] +
x[$yidx] * x[$yidx] * 2.0
) - 1.0 == 0.0
)
end

@testset "Test views on Hessian functions" begin
Expand Down

0 comments on commit 2e23b5e

Please sign in to comment.