Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for Tuple functions with epigraph matrix sets #3427

Closed
wants to merge 10 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions docs/src/tutorials/conic/ellipse_approx.jl
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ m, n = size(S)
[i in 1:m],
S[i, :]' * Z * S[i, :] - 2 * S[i, :]' * z + s <= 1,
)
@constraint(model, [t; vec(Z)] in MOI.RootDetConeSquare(n))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We use the syntax in examples, but what about documenting it in the general case, along with its limitations?

Effectively, the tuple vectorized by concatenating the elements, and doesn't concern itself with whether the structure of the tuple makes sense for the set.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess my concern was more: is this a good idea and should we move forward with it. It adds two ways to write each constraint (vector or tuple) which might be confusing for users.

@constraint(model, (t, Z) in MOI.RootDetConeSquare(n))
@objective(model, Max, t)
optimize!(model)
Test.@test termination_status(model) == OPTIMAL #src
Expand Down Expand Up @@ -208,9 +208,11 @@ set_silent(model)
## The former constraint S[i, :]' * Z * S[i, :] - 2 * S[i, :]' * z + s <= 1
f = [1 - S[i, :]' * Z * S[i, :] + 2 * S[i, :]' * z - s for i in 1:m]
@constraint(model, f in MOI.Nonnegatives(m))
## The former constraint [t; vec(Z)] in MOI.RootDetConeSquare(n)
Z_upper = [Z[i, j] for j in 1:n for i in 1:j]
@constraint(model, 1 * vcat(t, Z_upper) .+ 0 in MOI.RootDetConeTriangle(n))
## The former constraint (t, Z) in MOI.RootDetConeSquare(n)
@constraint(
model,
(1.0 * t, LinearAlgebra.Symmetric(1.0 .* Z)) in MOI.RootDetConeTriangle(n),
)
## The former @objective(model, Max, t)
@objective(model, Max, 1 * t + 0)
optimize!(model)
Expand Down
7 changes: 2 additions & 5 deletions docs/src/tutorials/conic/experiment_design.jl
Original file line number Diff line number Diff line change
Expand Up @@ -210,11 +210,8 @@ set_silent(dOpt)
@variable(dOpt, t)
@objective(dOpt, Max, t)
@constraint(dOpt, sum(np) <= n)
E = V * LinearAlgebra.diagm(0 => np ./ n) * V'
@constraint(
dOpt,
[t, 1, (E[i, j] for i in 1:q for j in 1:i)...] in MOI.LogDetConeTriangle(q)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe this PR is really just asking for a nicer way to vec a symmetric matrix

)
E = LinearAlgebra.Symmetric(V * LinearAlgebra.diagm(0 => np ./ n) * V')
@constraint(dOpt, (t, 1.0, E) in MOI.LogDetConeTriangle(q))
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What is the error thrown if E is not symmetric

optimize!(dOpt)
objective_value(dOpt)
#-
Expand Down
2 changes: 1 addition & 1 deletion docs/src/tutorials/conic/min_ellipse.jl
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ end
# the conic reformulation:

@variable(model, log_det_P)
@constraint(model, [log_det_P; 1; vec(P²)] in MOI.LogDetConeSquare(n))
@constraint(model, (log_det_P, 1, P²) in MOI.LogDetConeSquare(n))
@objective(model, Max, log_det_P)

# Now, solve the program:
Expand Down
74 changes: 64 additions & 10 deletions docs/src/tutorials/conic/tips_and_tricks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ set_silent(model)
@variable(model, x[1:3])
@variable(model, t)
@constraint(model, sum(x) == 1)
@constraint(model, [t; x] in SecondOrderCone())
@constraint(model, (t, x) in SecondOrderCone())
@objective(model, Min, t)
optimize!(model)
value(t), value.(x)
Expand All @@ -117,7 +117,7 @@ set_silent(model)
@variable(model, θ)
@variable(model, t)
@expression(model, residuals, θ * data .- target)
@constraint(model, [t; 0.5; residuals] in RotatedSecondOrderCone())
@constraint(model, (t, 0.5, residuals) in RotatedSecondOrderCone())
@objective(model, Min, t)
optimize!(model)
value(θ), value(t)
Expand All @@ -140,7 +140,7 @@ set_silent(model)
@variable(model, x == 1.5)
@variable(model, z)
@objective(model, Min, z)
@constraint(model, [x, 1, z] in MOI.ExponentialCone())
@constraint(model, (x, 1, z) in MOI.ExponentialCone())
optimize!(model)
value(z), exp(1.5)

Expand All @@ -153,7 +153,7 @@ set_silent(model)
@variable(model, x)
@variable(model, z == 1.5)
@objective(model, Max, x)
@constraint(model, [x, 1, z] in MOI.ExponentialCone())
@constraint(model, (x, 1, z) in MOI.ExponentialCone())
optimize!(model)
value(x), log(1.5)

Expand All @@ -170,7 +170,7 @@ set_silent(model)
@objective(model, Min, t)
@variable(model, u[1:N])
@constraint(model, sum(u) <= 1)
@constraint(model, [i = 1:N], [x[i] - t, 1, u[i]] in MOI.ExponentialCone())
@constraint(model, [i = 1:N], (x[i] - t, 1, u[i]) in MOI.ExponentialCone())
optimize!(model)
value(t), log(sum(exp.(x0)))

Expand Down Expand Up @@ -214,7 +214,7 @@ set_silent(model)
@objective(model, Max, sum(t))
@constraint(model, sum(x) == 1)
@constraint(model, A * x .<= b)
@constraint(model, [i = 1:n], [t[i], x[i], 1] in MOI.ExponentialCone())
@constraint(model, [i = 1:n], (t[i], x[i], 1) in MOI.ExponentialCone())
optimize!(model)
objective_value(model)

Expand All @@ -232,7 +232,7 @@ set_silent(model)
@objective(model, Max, -t)
@constraint(model, sum(x) == 1)
@constraint(model, A * x .<= b)
@constraint(model, [t; ones(n); x] in MOI.RelativeEntropyCone(2n + 1))
@constraint(model, (t, ones(n), x) in MOI.RelativeEntropyCone(2n + 1))
odow marked this conversation as resolved.
Show resolved Hide resolved
optimize!(model)
objective_value(model)

Expand All @@ -252,7 +252,7 @@ model = Model(SCS.Optimizer)
set_silent(model)
@variable(model, t)
@variable(model, x >= 1.5)
@constraint(model, [t, 1, x] in MOI.PowerCone(1 / 3))
@constraint(model, (t, 1, x) in MOI.PowerCone(1 / 3))
@objective(model, Min, t)
optimize!(model)
value(t), value(x)
Expand All @@ -273,7 +273,7 @@ function p_norm(x::Vector, p)
set_silent(model)
@variable(model, r[1:N])
@variable(model, t)
@constraint(model, [i = 1:N], [r[i], t, x[i]] in MOI.PowerCone(1 / p))
@constraint(model, [i = 1:N], (r[i], t, x[i]) in MOI.PowerCone(1 / p))
@constraint(model, sum(r) == t)
@objective(model, Min, t)
optimize!(model)
Expand Down Expand Up @@ -334,10 +334,64 @@ set_silent(model)
@variable(model, x[1:4])
@variable(model, t)
@constraint(model, sum(x) == 1)
@constraint(model, [t; x] in MOI.GeometricMeanCone(5))
@constraint(model, (t, x) in MOI.GeometricMeanCone(5))
optimize!(model)
value(t), value.(x)

# ## RootDetCone

# The [`MOI.RootDetConeSquare`](@ref) is a cone of the form:
# ```math
# K = \{ (t, X) \in \mathbb{R}^{1+d^2} : t \le \det(X)^{\frac{1}{d}} \}
# ```

model = Model(SCS.Optimizer)
set_silent(model)
@variable(model, t)
@variable(model, X[1:2, 1:2])
@objective(model, Max, t)
@constraint(model, (t, X) in MOI.RootDetConeSquare(2))
@constraint(model, X .== [2 1; 1 3])
optimize!(model)
value(t), sqrt(LinearAlgebra.det(value.(X)))

# If `X` is symmetric, then you can use [`MOI.RootDetConeTriangle`](@ref)
# instead. This can be more efficient because the solver does not need to add
# additional constraints to ensure `X` is symmetric.

@constraint(
model,
(t, LinearAlgebra.Symmetric(X)) in MOI.RootDetConeTriangle(2),
)

# ## LogDetCone

# The [`MOI.LogDetConeSquare`](@ref) is a cone of the form:
# ```math
# K = \{ (t, u, X) \in \mathbb{R}^{2+d^2} : t \le u \log(\det(X / u)) \}
# ```

model = Model(SCS.Optimizer)
set_silent(model)
@variable(model, t)
@variable(model, u)
@variable(model, X[1:2, 1:2])
@objective(model, Max, t)
@constraint(model, (t, u, X) in MOI.LogDetConeSquare(2))
@constraint(model, X .== [2 1; 1 3])
@constraint(model, u == 0.5)
optimize!(model)
value(t), 0.5 * log(LinearAlgebra.det(value.(X) ./ 0.5))

# If `X` is symmetric, then you can use [`MOI.LogDetConeTriangle`](@ref)
# instead. This can be more efficient because the solver does not need to add
# additional constraints to ensure `X` is symmetric.

@constraint(
model,
(t, u, LinearAlgebra.Symmetric(X)) in MOI.LogDetConeTriangle(2),
)

# ## Other Cones and Functions

# For other cones supported by JuMP, check out the
Expand Down
4 changes: 4 additions & 0 deletions src/print.jl
Original file line number Diff line number Diff line change
Expand Up @@ -696,6 +696,10 @@ function function_string(
return str * "\\end{bmatrix}"
end

function function_string(mime::MIME, f::Tuple)
return string("(", join([function_string(mime, fi) for fi in f], ", "), ")")
end

function function_string(mode, constraint::AbstractConstraint)
f = reshape_vector(jump_function(constraint), shape(constraint))
return function_string(mode, f)
Expand Down
117 changes: 117 additions & 0 deletions src/sd.jl
Original file line number Diff line number Diff line change
Expand Up @@ -768,3 +768,120 @@ function build_variable(
x = _vectorize_variables(_error, variables)
return VariablesConstrainedOnCreation(x, set, SymmetricMatrixShape(n))
end

"""
TupleShape(args::Pair{<:AbstractShape,Int}...) <: AbstractShape

A shape for representing tuple-valued functions, where each element of the tuple
is a different [`AbstractShape`](@ref) mapped to the number of scalar elements
in that shape.

## Example

```jldoctest
julia> shape = TupleShape(ScalarShape() => 1, SymmetricMatrixShape(2) => 3);
```
"""
struct TupleShape{T} <: AbstractShape
shapes::T
TupleShape(args::Pair{<:AbstractShape,Int}...) = new{typeof(args)}(args)
blegat marked this conversation as resolved.
Show resolved Hide resolved
end

function vectorize(f::Tuple, shape::TupleShape)
vectors = [vectorize(fi, si[1]) for (fi, si) in zip(f, shape.shapes)]
return reduce(vcat, vectors)
end

reshape_set(set::MOI.AbstractVectorSet, ::TupleShape) = set

function reshape_vector(v::Vector{T}, shape::TupleShape) where {T}
out = Any[]
offset = 0
for (s, dimension) in shape.shapes
push!(out, reshape_vector(v[offset.+(1:dimension)], s))
offset += dimension
end
return tuple(out...)
end

_shape_from_function(::Union{Real,AbstractJuMPScalar}) = ScalarShape() => 1
_shape_from_function(x::AbstractVector) = VectorShape() => length(x)

function build_constraint(
error_fn::Function,
f::Tuple,
set::Union{MOI.AbstractScalarSet,MOI.AbstractVectorSet},
)
return error_fn(
"The tuple function $(typeof(f)) is not supported for a set of type " *
"$(typeof(set)).",
)
end

function build_constraint(
error_fn::Function,
f::Tuple{Vararg{Union{Real,AbstractJuMPScalar,AbstractVector}}},
set::MOI.AbstractVectorSet,
)
shape = TupleShape(_shape_from_function.(f)...)
return VectorConstraint(vectorize(f, shape), set, shape)
end

function build_constraint(
error_fn::Function,
f::Tuple{<:Union{Real,AbstractJuMPScalar},<:LinearAlgebra.Symmetric},
set::MOI.RootDetConeTriangle,
)
n = LinearAlgebra.checksquare(f[2])
shape = TupleShape(
ScalarShape() => 1,
SymmetricMatrixShape(n) => div(n * (n + 1), 2),
)
return VectorConstraint(vectorize(f, shape), set, shape)
end

function build_constraint(
error_fn::Function,
f::Tuple{<:Union{Real,AbstractJuMPScalar},<:AbstractMatrix},
set::MOI.RootDetConeSquare,
)
n = LinearAlgebra.checksquare(f[2])
shape = TupleShape(ScalarShape() => 1, SquareMatrixShape(n) => n^2)
return VectorConstraint(vectorize(f, shape), set, shape)
end

function build_constraint(
error_fn::Function,
f::Tuple{
<:Union{Real,AbstractJuMPScalar},
<:Union{Real,AbstractJuMPScalar},
<:LinearAlgebra.Symmetric,
},
set::MOI.LogDetConeTriangle,
)
n = LinearAlgebra.checksquare(f[3])
shape = TupleShape(
ScalarShape() => 1,
ScalarShape() => 1,
SymmetricMatrixShape(n) => div(n * (n + 1), 2),
)
return VectorConstraint(vectorize(f, shape), set, shape)
end

function build_constraint(
error_fn::Function,
f::Tuple{
<:Union{Real,AbstractJuMPScalar},
<:Union{Real,AbstractJuMPScalar},
<:AbstractMatrix,
},
set::MOI.LogDetConeSquare,
)
n = LinearAlgebra.checksquare(f[3])
shape = TupleShape(
ScalarShape() => 1,
ScalarShape() => 1,
SquareMatrixShape(n) => n^2,
)
return VectorConstraint(vectorize(f, shape), set, shape)
end
10 changes: 10 additions & 0 deletions src/sets.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,16 @@ function build_constraint(
return build_constraint(_error, func, moi_set(set, length(func)))
end

function build_constraint(
error_fn::Function,
f::Tuple{Vararg{Union{Real,AbstractJuMPScalar,AbstractVector},N}},
set::AbstractVectorSet,
) where {N}
shape = TupleShape(_shape_from_function.(f)...)
args = vectorize(f, shape)
return VectorConstraint(args, moi_set(set, length(args)), shape)
end

"""
build_constraint(
_error::Function,
Expand Down
2 changes: 2 additions & 0 deletions src/shapes.jl
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,8 @@ Shape of scalar constraints.
"""
struct ScalarShape <: AbstractShape end
reshape_vector(α, ::ScalarShape) = α
reshape_vector(x::Vector, ::ScalarShape) = first(x)
vectorize(x, ::ScalarShape) = x

"""
VectorShape
Expand Down
Loading
Loading