From 83df5c2128d0d02535e1758f5371839ece74250b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 18 Dec 2018 12:02:14 -0500 Subject: [PATCH] Update to MOI v0.8 and ModelMode --- REQUIRE | 2 +- docs/src/constraints.md | 2 +- docs/src/quickstart.md | 20 +++++----- docs/src/solvers.md | 14 +++---- docs/src/variables.md | 4 +- src/JuMP.jl | 44 ++++++++++----------- src/constraints.jl | 14 +++---- src/copy.jl | 12 +++--- src/macros.jl | 18 ++++----- src/nlp.jl | 2 +- src/objective.jl | 4 +- src/optimizer_interface.jl | 43 +++++++++++---------- src/print.jl | 12 +++--- src/variables.jl | 2 +- test/JuMPExtension.jl | 3 +- test/constraint.jl | 4 +- test/generate_and_solve.jl | 78 +++++++++++++++++++------------------- test/hygiene.jl | 2 +- test/model.jl | 2 +- test/nlp.jl | 2 +- test/nlp_solver.jl | 78 +++++++++++++++++++------------------- test/objective.jl | 18 ++++----- test/print.jl | 12 +++--- 23 files changed, 197 insertions(+), 195 deletions(-) diff --git a/REQUIRE b/REQUIRE index 7ab83d8feb6..198354b6ea6 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,5 @@ julia 0.7 -MathOptInterface 0.7 0.8 +MathOptInterface 0.8 0.9 ForwardDiff 0.5 0.11 Calculus DataStructures diff --git a/docs/src/constraints.md b/docs/src/constraints.md index 32364c4dd3f..cf793477eab 100644 --- a/docs/src/constraints.md +++ b/docs/src/constraints.md @@ -154,7 +154,7 @@ DocTestSetup = quote @objective(model, Max, -2x); JuMP.optimize!(model); mock = JuMP.backend(model).optimizer.model; - MOI.set(mock, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mock, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mock, MOI.ConstraintDual(), JuMP.optimizer_index(con), -2.0) end ``` diff --git a/docs/src/quickstart.md b/docs/src/quickstart.md index 7c4986fdf88..4901c98edf5 100644 --- a/docs/src/quickstart.md +++ b/docs/src/quickstart.md @@ -30,8 +30,8 @@ julia> model = Model(with_optimizer(GLPK.Optimizer)) A JuMP Model Feasibility problem with: Variables: 0 -Model mode: Automatic -CachingOptimizer state: NoOptimizer +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. ``` @@ -99,9 +99,9 @@ DocTestSetup = quote # Now we load in the solution. Using a caching optimizer removes the need to # load a solver such as GLPK for building the documentation. mock = JuMP.backend(model).optimizer.model - MOI.set(mock, MOI.TerminationStatus(), MOI.Optimal) - MOI.set(mock, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set(mock, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mock, MOI.TerminationStatus(), MOI.OPTIMAL) + MOI.set(mock, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) + MOI.set(mock, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mock, MOI.ResultCount(), 1) MOI.set(mock, MOI.ObjectiveValue(), 10.6) MOI.set(mock, MOI.VariablePrimal(), JuMP.optimizer_index(x), 2.0) @@ -120,9 +120,9 @@ to a setting such as a time limit. We can ask the solver why it stopped using the `JuMP.termination_status` function: ```jldoctest quickstart_example julia> JuMP.termination_status(model) -Optimal::TerminationStatusCode = 1 +OPTIMAL::TerminationStatusCode = 1 ``` -In this case, `GLPK` returned `Optimal`, this mean that it has found the optimal +In this case, `GLPK` returned `OPTIMAL`, this mean that it has found the optimal solution. ```@meta @@ -134,14 +134,14 @@ a primal-dual pair of feasible solutions with zero duality gap. We can verify the primal and dual status as follows to confirm this: ```jldoctest quickstart_example julia> JuMP.primal_status(model) -FeasiblePoint::ResultStatusCode = 1 +FEASIBLE_POINT::ResultStatusCode = 1 julia> JuMP.dual_status(model) -FeasiblePoint::ResultStatusCode = 1 +FEASIBLE_POINT::ResultStatusCode = 1 ``` Note that the primal and dual status only inform that the primal and dual solutions are feasible and it is only because we verified that the termination -status is `Optimal` that we can conclude that they form an optimal solution. +status is `OPTIMAL` that we can conclude that they form an optimal solution. Finally, we can query the result of the optimization. First, we can query the objective value: diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 0d815991b45..d99e0109d7f 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -32,18 +32,18 @@ that is completely transparent to JuMP. While the MOI API may seem very demanding, it allows MOI models to be a succession of lightweight MOI layers that fill the gap between JuMP requirements and the solver capabilities. -JuMP models can be created in three different modes: Automatic, Manual and -Direct. +JuMP models can be created in three different modes: `AUTOMATIC`, `MANUAL` and +`DIRECT`. ## Automatic and Manual modes -In Automatic and Manual modes, two MOI layers are automatically applied to the -optimizer: +In `AUTOMATIC` and `MANUAL` modes, two MOI layers are automatically applied to +the optimizer: * `CachingOptimizer`: maintains a cache of the model so that when the optimizer does not support an incremental change to the model, the optimizer's internal model can be discarded and restored from the cache just before optimization. - The `CachingOptimizer` has two different modes: Automatic and Manual + The `CachingOptimizer` has two different modes: `AUTOMATIC` and `MANUAL` corresponding to the two JuMP modes with the same names. * `LazyBridgeOptimizer` (this can be disabled using the `bridge_constraints` keyword argument to [`Model`](@ref) constructor): when a constraint added is @@ -81,8 +81,8 @@ TODO: how to control the caching optimizer states ## Direct mode -JuMP models can be created in Direct mode using the [`JuMP.direct_model`](@ref) -function. +JuMP models can be created in `DIRECT` mode using the +[`JuMP.direct_model`](@ref) function. ```@docs JuMP.direct_model ``` diff --git a/docs/src/variables.md b/docs/src/variables.md index 07aa964c89b..8418cc0240d 100644 --- a/docs/src/variables.md +++ b/docs/src/variables.md @@ -30,8 +30,8 @@ julia> model = Model() A JuMP Model Feasibility problem with: Variables: 0 -Model mode: Automatic -CachingOptimizer state: NoOptimizer +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. julia> @variable(model, x[1:2]) diff --git a/src/JuMP.jl b/src/JuMP.jl index 8d34163433e..676739f5535 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -129,11 +129,11 @@ end # Model # Model has three modes: -# 1) Automatic: moi_backend field holds a CachingOptimizer in Automatic mode. -# 2) Manual: moi_backend field holds a CachingOptimizer in Manual mode. -# 3) Direct: moi_backend field holds an AbstractOptimizer. No extra copy of the model is stored. The moi_backend must support add_constraint etc. +# 1) AUTOMATIC: moi_backend field holds a CachingOptimizer in AUTOMATIC mode. +# 2) MANUAL: moi_backend field holds a CachingOptimizer in MANUAL mode. +# 3) DIRECT: moi_backend field holds an AbstractOptimizer. No extra copy of the model is stored. The moi_backend must support add_constraint etc. # Methods to interact with the CachingOptimizer are defined in solverinterface.jl. -@enum ModelMode Automatic Manual Direct +@enum ModelMode AUTOMATIC MANUAL DIRECT abstract type AbstractModel end # All `AbstractModel`s must define methods for these functions: @@ -152,8 +152,8 @@ mutable struct Model <: AbstractModel variable_to_fix::Dict{MOIVAR, MOIFIX} variable_to_integrality::Dict{MOIVAR, MOIINT} variable_to_zero_one::Dict{MOIVAR, MOIBIN} - # In Manual and Automatic modes, CachingOptimizer. - # In Direct mode, will hold an AbstractOptimizer. + # In MANUAL and AUTOMATIC modes, CachingOptimizer. + # In DIRECT mode, will hold an AbstractOptimizer. moi_backend::MOI.AbstractOptimizer # Hook into a solve call...function of the form f(m::Model; kwargs...), # where kwargs get passed along to subsequent solve calls. @@ -171,7 +171,7 @@ mutable struct Model <: AbstractModel end """ - Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, + Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.AUTOMATIC, bridge_constraints::Bool=true) Return a new JuMP model without any optimizer; the model is stored the model in @@ -182,7 +182,7 @@ optimizer are automatically bridged to equivalent supported constraints when an appropriate transformation is defined in the `MathOptInterface.Bridges` module or is defined in another module and is explicitely added. """ -function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, +function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.AUTOMATIC, solver=nothing) if solver !== nothing error("The solver= keyword is no longer available in JuMP 0.19 and " * @@ -197,7 +197,7 @@ end """ Model(optimizer_factory::OptimizerFactory; - caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, + caching_mode::MOIU.CachingOptimizerMode=MOIU.AUTOMATIC, bridge_constraints::Bool=true) Return a new JuMP model using the optimizer factory `optimizer_factory` to @@ -272,19 +272,19 @@ MathOptInterface or solver-specific functionality. """ backend(model::Model) = model.moi_backend -moi_mode(model::MOI.ModelLike) = Direct +moi_mode(model::MOI.ModelLike) = DIRECT function moi_mode(model::MOIU.CachingOptimizer) - if model.mode == MOIU.Automatic - return Automatic + if model.mode == MOIU.AUTOMATIC + return AUTOMATIC else - return Manual + return MANUAL end end """ mode(model::Model) -Return mode (Direct, Automatic, Manual) of model. +Return mode (DIRECT, AUTOMATIC, MANUAL) of model. """ mode(model::Model) = moi_mode(backend(model)) # The type of backend(model) is unknown so we directly redirect to another @@ -313,14 +313,14 @@ end solver_name(model::Model) If available, returns the `SolverName` property of the underlying optimizer. -Returns `"No optimizer attached"` in `Automatic` or `Manual` modes when no +Returns `"No optimizer attached"` in `AUTOMATIC` or `MANUAL` modes when no optimizer is attached. Returns "SolverName() attribute not implemented by the optimizer." if the attribute is not implemented. """ function solver_name(model::Model) - if mode(model) != Direct && - MOIU.state(backend(model)) == MOIU.NoOptimizer + if mode(model) != DIRECT && + MOIU.state(backend(model)) == MOIU.NO_OPTIMIZER return "No optimizer attached." else return try_get_solver_name(backend(model)) @@ -451,19 +451,19 @@ Base.copy(v::AbstractArray{VariableRef}, new_model::Model) = (var -> VariableRef function optimizer_index(v::VariableRef) model = owner_model(v) - if mode(model) == Direct + if mode(model) == DIRECT return index(v) else - @assert backend(model).state == MOIU.AttachedOptimizer + @assert backend(model).state == MOIU.ATTACHED_OPTIMIZER return backend(model).model_to_optimizer_map[index(v)] end end function optimizer_index(cr::ConstraintRef{Model}) - if mode(cr.model) == Direct + if mode(cr.model) == DIRECT return index(cr) else - @assert backend(cr.model).state == MOIU.AttachedOptimizer + @assert backend(cr.model).state == MOIU.ATTACHED_OPTIMIZER return backend(cr.model).model_to_optimizer_map[index(cr)] end end @@ -478,7 +478,7 @@ return false. See also [`dual`](@ref) and [`shadow_price`](@ref). """ -has_duals(model::Model) = dual_status(model) != MOI.NoSolution +has_duals(model::Model) = dual_status(model) != MOI.NO_SOLUTION """ dual(cr::ConstraintRef) diff --git a/src/constraints.jl b/src/constraints.jl index f0148884b7f..f356b231830 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -218,7 +218,7 @@ end function moi_add_constraint(model::MOI.ModelLike, f::MOI.AbstractFunction, s::MOI.AbstractSet) if !MOI.supports_constraint(model, typeof(f), typeof(s)) - if moi_mode(model) == Direct + if moi_mode(model) == DIRECT bridge_message = "." elseif moi_bridge_constraints(model) bridge_message = " and there are no bridges that can reformulate it into supported constraints." @@ -282,8 +282,8 @@ end The change in the objective from an infinitesimal relaxation of the constraint. This value is computed from [`dual`](@ref) and can be queried only when -`has_duals` is `true` and the objective sense is `MinSense` or `MaxSense` -(not `FeasibilitySense`). For linear constraints, the shadow prices differ at +`has_duals` is `true` and the objective sense is `MIN_SENSE` or `MAX_SENSE` +(not `FEASIBILITY_SENSE`). For linear constraints, the shadow prices differ at most in sign from the `dual` value depending on the objective sense. ## Notes @@ -308,9 +308,9 @@ function shadow_price_less_than_(dual_value, sense::MOI.OptimizationSense) # shadow price is nonnegative (because relaxing a constraint can only # improve the objective). By MOI convention, a feasible dual on a LessThan # set is nonpositive, so we flip the sign when maximizing. - if sense == MOI.MaxSense + if sense == MOI.MAX_SENSE return -dual_value - elseif sense == MOI.MinSense + elseif sense == MOI.MIN_SENSE return dual_value else error("The shadow price is not available because the objective sense " * @@ -322,9 +322,9 @@ end function shadow_price_greater_than_(dual_value, sense::MOI.OptimizationSense) # By MOI convention, a feasible dual on a GreaterThan set is nonnegative, # so we flip the sign when minimizing. (See comment in the method above). - if sense == MOI.MaxSense + if sense == MOI.MAX_SENSE return dual_value - elseif sense == MOI.MinSense + elseif sense == MOI.MIN_SENSE return -dual_value else error("The shadow price is not available because the objective sense " * diff --git a/src/copy.jl b/src/copy.jl index 5e76695fdc8..66214f52b75 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -68,7 +68,7 @@ the reference map. ## Note -Model copy is not supported in Direct mode, i.e. when a model is constructed +Model copy is not supported in `DIRECT` mode, i.e. when a model is constructed using the [`direct_model`](@ref) constructor instead of the [`Model`](@ref) constructor. Moreover, independently on whether an optimizer was provided at model construction, the new model will have no optimizer, i.e., an optimizer @@ -90,10 +90,10 @@ cref_new = reference_map[cref] ``` """ function copy_model(model::Model) - if mode(model) == Direct - error("Cannot copy a model in Direct mode. Use the `Model` constructor", - " instead of the `direct_model` constructor to be able to copy", - " the constructed model.") + if mode(model) == DIRECT + error("Cannot copy a model in `DIRECT` mode. Use the `Model` ", + "constructor instead of the `direct_model` constructor to be ", + "able to copy the constructed model.") end caching_mode = backend(model).mode new_model = Model(caching_mode = caching_mode) @@ -147,7 +147,7 @@ and its copy. ## Note -Model copy is not supported in Direct mode, i.e. when a model is constructed +Model copy is not supported in `DIRECT` mode, i.e. when a model is constructed using the [`direct_model`](@ref) constructor instead of the [`Model`](@ref) constructor. Moreover, independently on whether an optimizer was provided at model construction, the new model will have no optimizer, i.e., an optimizer diff --git a/src/macros.jl b/src/macros.jl index 549563e839c..7c410b87b2f 100644 --- a/src/macros.jl +++ b/src/macros.jl @@ -796,7 +796,7 @@ Constructs a vector of `QuadConstraint` objects. Similar to `@QuadConstraint`, e Return an expression whose value is an `MOI.OptimizationSense` corresponding to `sense`. Sense is either the symbol `:Min` or `:Max`, corresponding -respectively to `MOI.MinSense` and `MOI.MaxSense` or it is another symbol, +respectively to `MOI.MIN_SENSE` and `MOI.MAX_SENSE` or it is another symbol, which should be the name of a variable or expression whose value is an `MOI.OptimizationSense`. In the last case, the expression throws an error using the `_error` @@ -804,9 +804,9 @@ function in case the value is not an `MOI.OptimizationSense`. """ function moi_sense(_error::Function, sense) if sense == :Min - expr = MOI.MinSense + expr = MOI.MIN_SENSE elseif sense == :Max - expr = MOI.MaxSense + expr = MOI.MAX_SENSE else # Refers to a variable that holds the sense. # TODO: Better document this behavior @@ -829,8 +829,8 @@ end @objective(model::Model, sense, func) Set the objective sense to `sense` and objective function to `func`. The -objective sense can be either `Min`, `Max`, `MathOptInterface.MinSense`, -`MathOptInterface.MaxSense` or `MathOptInterface.FeasibilitySense`; see +objective sense can be either `Min`, `Max`, `MathOptInterface.MIN_SENSE`, +`MathOptInterface.MAX_SENSE` or `MathOptInterface.FEASIBILITY_SENSE`; see [`MathOptInterface.ObjectiveSense`](http://www.juliaopt.org/MathOptInterface.jl/v0.6/apireference.html#MathOptInterface.ObjectiveSense). In order to set the sense programatically, i.e., when `sense` is a Julia variable whose value is the sense, one of the three @@ -847,8 +847,8 @@ julia> model = Model() A JuMP Model Feasibility problem with: Variables: 0 -Model mode: Automatic -CachingOptimizer state: NoOptimizer +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. julia> @variable(model, x) @@ -867,8 +867,8 @@ julia> @objective(model, Max, 2x - 1) To set a quadratic objective and set the objective sense programatically, do as follows: ```jldoctest @objective -julia> sense = JuMP.MOI.MinSense -MinSense::OptimizationSense = 0 +julia> sense = JuMP.MOI.MIN_SENSE +MIN_SENSE::OptimizationSense = 0 julia> @objective(model, sense, x^2 - 2x + 1) x² - 2 x + 1 diff --git a/src/nlp.jl b/src/nlp.jl index d1b25a325ae..41f2e71ffaf 100644 --- a/src/nlp.jl +++ b/src/nlp.jl @@ -1153,7 +1153,7 @@ function register(m::Model, s::Symbol, dimension::Integer, f::Function, ∇f::Fu end # TODO: Add a docstring. -# Ex: set_NL_objective(model, MOI.MinSense, :($x + $y^2)) +# Ex: set_NL_objective(model, MOI.MIN_SENSE, :($x + $y^2)) function set_NL_objective(model::Model, sense::MOI.OptimizationSense, x) return set_objective(model, sense, NonlinearExprData(model, x)) end diff --git a/src/objective.jl b/src/objective.jl index 8496708f3e9..e124af3551f 100644 --- a/src/objective.jl +++ b/src/objective.jl @@ -103,8 +103,8 @@ julia> model = Model() A JuMP Model Feasibility problem with: Variables: 0 -Model mode: Automatic -CachingOptimizer state: NoOptimizer +Model mode: AUTOMATIC +CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. julia> @variable(model, x) diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 44b030d7497..e030b666f99 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -4,32 +4,32 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. function error_if_direct_mode(model::Model, func::Symbol) - if mode(model) == Direct - error("The `$func` function is not supported in Direct mode.") + if mode(model) == DIRECT + error("The `$func` function is not supported in DIRECT mode.") end end # These methods directly map to CachingOptimizer methods. # They cannot be called in Direct mode. -function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer, +function MOIU.reset_optimizer(model::Model, optimizer::MOI.AbstractOptimizer, bridge_constraints::Bool=true) - error_if_direct_mode(model, :resetoptimizer!) - MOIU.resetoptimizer!(backend(model), optimizer) + error_if_direct_mode(model, :reset_optimizer) + MOIU.reset_optimizer(backend(model), optimizer) end -function MOIU.resetoptimizer!(model::Model) - error_if_direct_mode(model, :resetoptimizer!) - MOIU.resetoptimizer!(backend(model)) +function MOIU.reset_optimizer(model::Model) + error_if_direct_mode(model, :reset_optimizer) + MOIU.reset_optimizer(backend(model)) end -function MOIU.dropoptimizer!(model::Model) - error_if_direct_mode(model, :dropoptimizer!) - MOIU.dropoptimizer!(backend(model)) +function MOIU.drop_optimizer(model::Model) + error_if_direct_mode(model, :drop_optimizer) + MOIU.drop_optimizer(backend(model)) end -function MOIU.attachoptimizer!(model::Model) - error_if_direct_mode(model, :attachoptimizer!) - MOIU.attachoptimizer!(backend(model)) +function MOIU.attach_optimizer(model::Model) + error_if_direct_mode(model, :attach_optimizer) + MOIU.attach_optimizer(backend(model)) end function set_optimizer(model::Model, optimizer_factory::OptimizerFactory; @@ -41,16 +41,17 @@ function set_optimizer(model::Model, optimizer_factory::OptimizerFactory; # If default_copy_to without names is supported, no need for a second # cache. if !MOIU.supports_default_copy_to(optimizer, false) - if mode(model) == Manual + if mode(model) == MANUAL # TODO figure out what to do in manual mode with the two caches - error("Bridges in Manual mode with an optimizer not supporting `default_copy_to` is not supported yet") + error("Bridges in `MANUAL` mode with an optimizer not ", + "supporting `default_copy_to` is not supported yet") end universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}()) optimizer = MOIU.CachingOptimizer(universal_fallback, optimizer) end optimizer = MOI.Bridges.fullbridgeoptimizer(optimizer, Float64) end - MOIU.resetoptimizer!(model, optimizer) + MOIU.reset_optimizer(model, optimizer) end """ @@ -92,15 +93,15 @@ function optimize!(model::Model, end if optimizer_factory !== nothing - if mode(model) == Direct - error("An optimizer factory cannot be provided at the `optimize` call in Direct mode.") + if mode(model) == DIRECT + error("An optimizer factory cannot be provided at the `optimize` call in DIRECT mode.") end - if MOIU.state(backend(model)) != MOIU.NoOptimizer + if MOIU.state(backend(model)) != MOIU.NO_OPTIMIZER error("An optimizer factory cannot both be provided in the `Model` constructor and at the `optimize` call.") end set_optimizer(model, optimizer_factory, bridge_constraints=bridge_constraints) - MOIU.attachoptimizer!(model) + MOIU.attach_optimizer(model) end # If the user or an extension has provided an optimize hook, call diff --git a/src/print.jl b/src/print.jl index d9e705c0474..f1392a72507 100644 --- a/src/print.jl +++ b/src/print.jl @@ -150,9 +150,9 @@ function Base.show(io::IO, model::Model) plural(n) = (n==1 ? "" : "s") println(io, "A JuMP Model") sense = objective_sense(model) - if sense == MOI.MaxSense + if sense == MOI.MAX_SENSE print(io, "Maximization") - elseif sense == MOI.MinSense + elseif sense == MOI.MIN_SENSE print(io, "Minimization") else print(io, "Feasibility") @@ -176,7 +176,7 @@ function Base.show(io::IO, model::Model) end model_mode = mode(model) println(io, "Model mode: ", model_mode) - if model_mode == Manual || model_mode == Automatic + if model_mode == MANUAL || model_mode == AUTOMATIC println(io, "CachingOptimizer state: ", MOIU.state(backend(model))) end @@ -202,14 +202,14 @@ function model_string(print_mode, model::Model) eol = ijl ? "\\\\\n" : "\n" sense = objective_sense(model) str = "" - if sense == MOI.MaxSense + if sense == MOI.MAX_SENSE str *= ijl ? "\\max" : "Max" - elseif sense == MOI.MinSense + elseif sense == MOI.MIN_SENSE str *= ijl ? "\\min" : "Min" else str *= ijl ? "\\text{feasibility}" : "Feasibility" end - if sense != MOI.FeasibilitySense + if sense != MOI.FEASIBILITY_SENSE if ijl str *= "\\quad" end diff --git a/src/variables.jl b/src/variables.jl index c6e3da11026..f2806e648c4 100644 --- a/src/variables.jl +++ b/src/variables.jl @@ -589,7 +589,7 @@ return false. See also [`value`](@ref). """ -has_values(model::Model) = primal_status(model) != MOI.NoSolution +has_values(model::Model) = primal_status(model) != MOI.NO_SOLUTION @Base.deprecate setvalue(v::VariableRef, val::Number) set_start_value(v, val) diff --git a/test/JuMPExtension.jl b/test/JuMPExtension.jl index 7647e679360..a7ecd1d2e57 100644 --- a/test/JuMPExtension.jl +++ b/test/JuMPExtension.jl @@ -28,7 +28,8 @@ mutable struct MyModel <: JuMP.AbstractModel Dict{Int, String}(), # Variables 0, Dict{ConstraintIndex, JuMP.AbstractConstraint}(), Dict{ConstraintIndex, String}(), # Constraints - MOI.FeasibilitySense, zero(JuMP.GenericAffExpr{Float64, MyVariableRef}), + MOI.FEASIBILITY_SENSE, + zero(JuMP.GenericAffExpr{Float64, MyVariableRef}), Dict{Symbol, Any}()) end end diff --git a/test/constraint.jl b/test/constraint.jl index e67d98fa27d..e4e14ff4e31 100644 --- a/test/constraint.jl +++ b/test/constraint.jl @@ -339,8 +339,8 @@ function test_shadow_price(model_string, constraint_dual, constraint_shadow) eval_objective_value=false, eval_variable_constraint_dual=false)) mock_optimizer = JuMP.backend(model).optimizer.model - MOI.set(mock_optimizer, MOI.TerminationStatus(), MOI.Optimal) - MOI.set(mock_optimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mock_optimizer, MOI.TerminationStatus(), MOI.OPTIMAL) + MOI.set(mock_optimizer, MOI.DualStatus(), MOI.FEASIBLE_POINT) JuMP.optimize!(model) @testset "shadow price of $constraint_name" for constraint_name in keys(constraint_dual) diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index 23d42c703b5..196a9e8021a 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -44,11 +44,11 @@ eval_objective_value=false)) mockoptimizer = JuMP.backend(m).optimizer.model - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Optimal) + MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) - MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) + MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(y), 0.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c), -1.0) @@ -58,15 +58,15 @@ #@test JuMP.isattached(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.Optimal - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.OPTIMAL + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value(x) == 1.0 @test JuMP.value(y) == 0.0 @test JuMP.value(x + y) == 1.0 @test JuMP.objective_value(m) == -1.0 - @test JuMP.dual_status(m) == MOI.FeasiblePoint + @test JuMP.dual_status(m) == MOI.FEASIBLE_POINT @test JuMP.dual(c) == -1 @test JuMP.dual(JuMP.UpperBoundRef(x)) == 0.0 @test JuMP.dual(JuMP.LowerBoundRef(y)) == 1.0 @@ -82,11 +82,11 @@ @objective(m, Min, -x) c = @constraint(m, x + y <= 1) - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Optimal) + MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) - MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) + MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(y), 0.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c), -1.0) @@ -98,15 +98,15 @@ #@test JuMP.isattached(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.Optimal - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.OPTIMAL + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value(x) == 1.0 @test JuMP.value(y) == 0.0 @test JuMP.value(x + y) == 1.0 @test JuMP.objective_value(m) == -1.0 - @test JuMP.dual_status(m) == MOI.FeasiblePoint + @test JuMP.dual_status(m) == MOI.FEASIBLE_POINT @test JuMP.dual(c) == -1 @test JuMP.dual(JuMP.UpperBoundRef(x)) == 0.0 @test JuMP.dual(JuMP.LowerBoundRef(y)) == 1.0 @@ -119,7 +119,7 @@ m = Model(with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false), - caching_mode = MOIU.Automatic) + caching_mode = MOIU.AUTOMATIC) @variable(m, x == 1.0, Int) @variable(m, y, Bin) @objective(m, Max, x) @@ -140,24 +140,24 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["xfix", "xint", "ybin"]) - MOIU.attachoptimizer!(m) + MOIU.attach_optimizer(m) mockoptimizer = JuMP.backend(m).optimizer.model - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Optimal) + MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ObjectiveValue(), 1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) - MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(y), 0.0) - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.NoSolution) + MOI.set(mockoptimizer, MOI.DualStatus(), MOI.NO_SOLUTION) JuMP.optimize!(m) #@test JuMP.isattached(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.Optimal - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.OPTIMAL + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value(x) == 1.0 @test JuMP.value(y) == 0.0 @@ -193,11 +193,11 @@ eval_objective_value=false)) mockoptimizer = JuMP.backend(m).optimizer.model - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Optimal) + MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) - MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) + MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(y), 0.0) MOI.set(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizer_index(c1), -1.0) @@ -207,14 +207,14 @@ #@test JuMP.isattached(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.Optimal - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.OPTIMAL + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value(x) == 1.0 @test JuMP.value(y) == 0.0 @test JuMP.objective_value(m) == -1.0 - @test JuMP.dual_status(m) == MOI.FeasiblePoint + @test JuMP.dual_status(m) == MOI.FEASIBLE_POINT @test JuMP.dual(c1) == -1.0 @test JuMP.dual(c2) == 2.0 @test JuMP.dual(c3) == 3.0 @@ -250,13 +250,13 @@ mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false, eval_variable_constraint_dual=false) - MOIU.resetoptimizer!(m, mockoptimizer) - MOIU.attachoptimizer!(m) + MOIU.reset_optimizer(m, mockoptimizer) + MOIU.attach_optimizer(m) - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Optimal) + MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ResultCount(), 1) - MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) + MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(y), 0.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(z), 0.0) @@ -268,8 +268,8 @@ #@test JuMP.isattached(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.Optimal - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.OPTIMAL + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value(x) == 1.0 @test JuMP.value(y) == 0.0 @@ -311,13 +311,13 @@ mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false, eval_variable_constraint_dual=false) - MOIU.resetoptimizer!(m, mockoptimizer) - MOIU.attachoptimizer!(m) + MOIU.reset_optimizer(m, mockoptimizer) + MOIU.attach_optimizer(m) - MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Optimal) + MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.OPTIMAL) MOI.set(mockoptimizer, MOI.ResultCount(), 1) - MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set(mockoptimizer, MOI.PrimalStatus(), MOI.FEASIBLE_POINT) + MOI.set(mockoptimizer, MOI.DualStatus(), MOI.FEASIBLE_POINT) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x[1,1]), 1.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x[1,2]), 2.0) MOI.set(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizer_index(x[2,2]), 4.0) @@ -330,8 +330,8 @@ JuMP.optimize!(m) - @test JuMP.termination_status(m) == MOI.Optimal - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.OPTIMAL + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.has_values(m) @test JuMP.value.(x) == [1.0 2.0; 2.0 4.0] diff --git a/test/hygiene.jl b/test/hygiene.jl index b524a8a7ead..a79c473dfea 100644 --- a/test/hygiene.jl +++ b/test/hygiene.jl @@ -12,7 +12,7 @@ using Test import JuMP model = JuMP.Model() -sense = JuMP.MathOptInterface.MinSense +sense = JuMP.MathOptInterface.MIN_SENSE JuMP.@variable(model, x >= 0) r = 3:5 JuMP.@variable(model, y[i=r] <= i) diff --git a/test/model.jl b/test/model.jl index 07f5fc3105c..e0948b1c85f 100644 --- a/test/model.jl +++ b/test/model.jl @@ -158,7 +158,7 @@ function dummy_optimizer_hook(::JuMP.AbstractModel) end @testset "Model copy" begin for copy_model in (true, true) @testset "Using $(copy_model ? "JuMP.copy_model" : "Base.copy")" begin - for caching_mode in (MOIU.Automatic, MOIU.Manual) + for caching_mode in (MOIU.AUTOMATIC, MOIU.MANUAL) @testset "In $caching_mode mode" begin model = Model(caching_mode = caching_mode) model.optimize_hook = dummy_optimizer_hook diff --git a/test/nlp.jl b/test/nlp.jl index a3211a26b86..4980465c1cf 100644 --- a/test/nlp.jl +++ b/test/nlp.jl @@ -463,7 +463,7 @@ model = Model() @variable(model, x) @variable(model, y) - JuMP.set_NL_objective(model, MOI.MinSense, :($x^2 + $y^2)) + JuMP.set_NL_objective(model, MOI.MIN_SENSE, :($x^2 + $y^2)) JuMP.add_NL_constraint(model, :($x + $y <= 1)) JuMP.add_NL_constraint(model, :($x + $y >= 1)) JuMP.add_NL_constraint(model, :($x + $y == 1)) diff --git a/test/nlp_solver.jl b/test/nlp_solver.jl index 8b880568fd8..e6e538095ea 100644 --- a/test/nlp_solver.jl +++ b/test/nlp_solver.jl @@ -47,8 +47,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value.(x) ≈ [1.000000, 4.742999, 3.821150, 1.379408] atol=1e-3 end @@ -65,7 +65,7 @@ const MOI = MathOptInterface m = Model(with_optimizer(Ipopt.Optimizer, print_level=0)) initval = [1,5,5,1] @variable(m, 1 <= x[i=1:4] <= 5, start=initval[i]) - JuMP.set_NL_objective(m, MOI.MinSense, + JuMP.set_NL_objective(m, MOI.MIN_SENSE, :($(x[1]) * $(x[4]) * ($(x[1]) + $(x[2]) + $(x[3])) + $(x[3]))) JuMP.add_NL_constraint(m, @@ -78,8 +78,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value.(x) ≈ [1.000000, 4.742999, 3.821150, 1.379408] atol=1e-3 end @@ -126,8 +126,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ 5326.851310161077 atol=1e-5 end @@ -144,9 +144,9 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - # Ipopt returns AlmostLocallySolved and NearlyFeasiblePoint on this instance. - # @test JuMP.termination_status(m) == MOI.LocallySolved - # @test JuMP.primal_status(m) == MOI.FeasiblePoint + # Ipopt returns AlmostLOCALLY_SOLVED and NearlyFEASIBLE_POINT on this instance. + # @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + # @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ -45.77846971 atol=1e-5 end @@ -167,8 +167,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ -47.76109026 atol=1e-5 end @@ -188,8 +188,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ -47.76109026 atol=1e-5 end @@ -223,8 +223,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ -1768.80696 atol=1e-3 end @@ -269,8 +269,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT # This test occasionally fails, for unknown reasons. @test JuMP.objective_value(m) ≈ 97.588409 atol=1e-3 @@ -337,8 +337,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value.(x[1:4]) ≈ [8.0, 49.0, 3.0, 1.0] atol=1e-4 @test JuMP.objective_value(m) ≈ 664.82045 atol=1e-5 @@ -354,39 +354,39 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ u atol=1e-6 @NLobjective(m, Min, x) JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ l atol=1e-6 end @testset "Two-sided constraints (no macros)" begin m = Model(with_optimizer(Ipopt.Optimizer, print_level=0)) @variable(m, x) - JuMP.set_NL_objective(m, MOI.MaxSense, x) + JuMP.set_NL_objective(m, MOI.MAX_SENSE, x) l = -1 u = 1 JuMP.add_NL_constraint(m, :($l <= $x <= $u)) JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ u atol=1e-6 - JuMP.set_NL_objective(m, MOI.MinSense, x) + JuMP.set_NL_objective(m, MOI.MIN_SENSE, x) JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ l atol=1e-6 end @@ -403,8 +403,8 @@ const MOI = MathOptInterface function test_result() @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.value(x) ≈ 0.9774436 atol=1e-6 @@ -414,7 +414,7 @@ const MOI = MathOptInterface @test JuMP.value(r[4]) ≈ 0.0 atol=1e-6 @test JuMP.value(r[5]) ≈ 0.0 atol=1e-6 @test JuMP.value(r[6]) ≈ 6.0 atol=1e-6 - @test JuMP.dual_status(m) == MOI.FeasiblePoint + @test JuMP.dual_status(m) == MOI.FEASIBLE_POINT # Reduced costs @test JuMP.dual(JuMP.LowerBoundRef(x)) ≈ 0.0 atol=1e-6 @test JuMP.dual(JuMP.UpperBoundRef(y)) ≈ 0.0 atol=1e-6 @@ -455,8 +455,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ -1-4/sqrt(3) atol=1e-6 @test JuMP.value(x) + JuMP.value(y) ≈ -1/3 atol=1e-3 end @@ -470,8 +470,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ -1-4/sqrt(3) atol=1e-6 @test JuMP.value(x) + JuMP.value(y) ≈ -1/3 atol=1e-3 end @@ -484,8 +484,8 @@ const MOI = MathOptInterface JuMP.optimize!(m) @test JuMP.has_values(m) - @test JuMP.termination_status(m) == MOI.LocallySolved - @test JuMP.primal_status(m) == MOI.FeasiblePoint + @test JuMP.termination_status(m) == MOI.LOCALLY_SOLVED + @test JuMP.primal_status(m) == MOI.FEASIBLE_POINT @test JuMP.objective_value(m) ≈ sqrt(1/2) atol=1e-6 @test JuMP.value.(x) ≈ [sqrt(1/2), 0] atol=1e-6 end diff --git a/test/objective.jl b/test/objective.jl index 5960ab1a342..6ed6b7318e9 100644 --- a/test/objective.jl +++ b/test/objective.jl @@ -13,8 +13,8 @@ function objectives_test(ModelType::Type{<:JuMP.AbstractModel}, VariableRefType: @testset "objective_sense set and get" begin model = ModelType() - JuMP.set_objective_sense(model, MOI.FeasibilitySense) - @test JuMP.objective_sense(model) == MOI.FeasibilitySense + JuMP.set_objective_sense(model, MOI.FEASIBILITY_SENSE) + @test JuMP.objective_sense(model) == MOI.FEASIBILITY_SENSE end @testset "SingleVariable objectives" begin @@ -22,13 +22,13 @@ function objectives_test(ModelType::Type{<:JuMP.AbstractModel}, VariableRefType: @variable(m, x) @objective(m, Min, x) - @test JuMP.objective_sense(m) == MOI.MinSense + @test JuMP.objective_sense(m) == MOI.MIN_SENSE @test JuMP.objective_function_type(m) == VariableRefType @test JuMP.objective_function(m) == x @test JuMP.objective_function(m, VariableRefType) == x @objective(m, Max, x) - @test JuMP.objective_sense(m) == MOI.MaxSense + @test JuMP.objective_sense(m) == MOI.MAX_SENSE @test JuMP.objective_function_type(m) == VariableRefType @test JuMP.objective_function(m) == x @test JuMP.objective_function(m, VariableRefType) == x @@ -39,13 +39,13 @@ function objectives_test(ModelType::Type{<:JuMP.AbstractModel}, VariableRefType: @variable(m, x) @objective(m, Min, 2x) - @test JuMP.objective_sense(m) == MOI.MinSense + @test JuMP.objective_sense(m) == MOI.MIN_SENSE @test JuMP.objective_function_type(m) == AffExprType @test JuMP.isequal_canonical(JuMP.objective_function(m), 2x) @test JuMP.isequal_canonical(JuMP.objective_function(m, AffExprType), 2x) @objective(m, Max, x + 3x + 1) - @test JuMP.objective_sense(m) == MOI.MaxSense + @test JuMP.objective_sense(m) == MOI.MAX_SENSE @test JuMP.objective_function_type(m) == AffExprType @test JuMP.isequal_canonical(JuMP.objective_function(m), 4x + 1) @test JuMP.isequal_canonical(JuMP.objective_function(m, AffExprType), 4x + 1) @@ -56,7 +56,7 @@ function objectives_test(ModelType::Type{<:JuMP.AbstractModel}, VariableRefType: @variable(m, x) @objective(m, Min, x^2 + 2x) - @test JuMP.objective_sense(m) == MOI.MinSense + @test JuMP.objective_sense(m) == MOI.MIN_SENSE @test JuMP.objective_function_type(m) == QuadExprType @test JuMP.isequal_canonical(JuMP.objective_function(m), x^2 + 2x) @test JuMP.isequal_canonical(JuMP.objective_function(m, QuadExprType), x^2 + 2x) @@ -74,9 +74,9 @@ function objectives_test(ModelType::Type{<:JuMP.AbstractModel}, VariableRefType: m = ModelType() @variable(m, x) - sense = MOI.MinSense + sense = MOI.MIN_SENSE @objective(m, sense, 2x) - @test JuMP.objective_sense(m) == MOI.MinSense + @test JuMP.objective_sense(m) == MOI.MIN_SENSE @test JuMP.isequal_canonical(JuMP.objective_function(m, AffExprType), 2x) sense = :Min diff --git a/test/print.jl b/test/print.jl index 841b4540ca9..fbd2123cdc6 100644 --- a/test/print.jl +++ b/test/print.jl @@ -413,8 +413,8 @@ function printing_test(ModelType::Type{<:JuMP.AbstractModel}) `MathOptInterface.ScalarAffineFunction{Float64}`-in-`MathOptInterface.LessThan{Float64}`: 1 constraint `MathOptInterface.ScalarQuadraticFunction{Float64}`-in-`MathOptInterface.LessThan{Float64}`: 1 constraint `MathOptInterface.VectorAffineFunction{Float64}`-in-`MathOptInterface.SecondOrderCone`: 1 constraint - Model mode: Automatic - CachingOptimizer state: NoOptimizer + Model mode: AUTOMATIC + CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. Names registered in the model: a, a1, b, b1, c, c1, fi, u, x, y, z""", repl=:show) @@ -457,8 +457,8 @@ function printing_test(ModelType::Type{<:JuMP.AbstractModel}) `MathOptInterface.SingleVariable`-in-`MathOptInterface.ZeroOne`: 1 constraint `MathOptInterface.SingleVariable`-in-`MathOptInterface.Integer`: 1 constraint `MathOptInterface.ScalarQuadraticFunction{Float64}`-in-`MathOptInterface.LessThan{Float64}`: 1 constraint - Model mode: Automatic - CachingOptimizer state: NoOptimizer + Model mode: AUTOMATIC + CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. Names registered in the model: x, y""", repl=:show) @@ -471,8 +471,8 @@ function printing_test(ModelType::Type{<:JuMP.AbstractModel}) Feasibility problem with: Variable: 1 `MathOptInterface.ScalarAffineFunction{Float64}`-in-`MathOptInterface.LessThan{Float64}`: 1 constraint - Model mode: Automatic - CachingOptimizer state: NoOptimizer + Model mode: AUTOMATIC + CachingOptimizer state: NO_OPTIMIZER Solver name: No optimizer attached. Names registered in the model: x""", repl=:show) end