diff --git a/docs/Project.toml b/docs/Project.toml index 060076341..ad6cf088b 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -42,6 +42,9 @@ SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe" Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" +BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" +Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898" +Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6" [compat] AmplNLWriter = "1" @@ -86,3 +89,7 @@ SymbolicAnalysis = "0.3" Symbolics = "6" Tracker = ">= 0.2" Zygote = ">= 0.5" +BlackBoxOptim = "0.6" +Metaheuristics = "3" +Evolutionary = "0.11" + diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ca5b2385b..85f17cf93 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -67,3 +67,21 @@ prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, maxtime = 1000.0) ``` + +## Multi-objective optimization +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a vector of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: + +```@example MOO-BBO +using OptimizationBBO, Optimization, BlackBoxOptim +using SciMLBase: MultiObjectiveOptimizationFunction +u0 = [0.25, 0.25] +opt = OptimizationBBO.BBO_borg_moea() +function multi_obj_func(x, p) + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return (f1, f2) +end +mof = MultiObjectiveOptimizationFunction(multi_obj_func) +prob = Optimization.OptimizationProblem(mof, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) +sol = solve(prob, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) +``` diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 9fa582c74..86cf27b71 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -41,3 +41,20 @@ f = OptimizationFunction(rosenbrock) prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0]) sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100)) ``` + +## Multi-objective optimization +The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows: + +```@example MOO-Evolutionary +using Optimization, OptimizationEvolutionary, Evolutionary +function func(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] +end +initial_guess = [1.0, 1.0] +obj_func = MultiObjectiveOptimizationFunction(func) +algorithm = OptimizationEvolutionary.NSGA2() +problem = OptimizationProblem(obj_func, initial_guess) +result = solve(problem, algorithm) +``` diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index ae1694bcc..f95ad505a 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -70,3 +70,46 @@ sol = solve(prob, ECA(), use_initial = true, maxiters = 100000, maxtime = 1000.0 ### With Constraint Equations While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently does not relay these constraints. + + +## Multi-objective optimization +The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: + +```@example MOO-Metaheuristics +using Optimization, OptimizationEvolutionary,OptimizationMetaheuristics, Metaheuristics +function zdt1(x) + f1 = x[1] + g = 1 + 9 * mean(x[2:end]) + h = 1 - sqrt(f1 / g) + f2 = g * h + # In this example, we have no constraints + gx = [0.0] # Inequality constraints (not used) + hx = [0.0] # Equality constraints (not used) + return [f1, f2], gx, hx +end +multi_obj_fun = MultiObjectiveOptimizationFunction((x, p) -> zdt1(x)) + +# Define the problem bounds +lower_bounds = [0.0, 0.0, 0.0] +upper_bounds = [1.0, 1.0, 1.0] + +# Define the initial guess +initial_guess = [0.5, 0.5, 0.5] + +# Create the optimization problem +prob = OptimizationProblem(multi_obj_fun, initial_guess; lb = lower_bounds, ub = upper_bounds) + +nobjectives = 2 +npartitions = 100 + +# reference points (Das and Dennis's method) +weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) + +# Choose the algorithm and solve the problem +sol1 = solve(prob, Metaheuristics.NSGA2(); maxiters = 100, use_initial = true) +sol2 = solve(prob, Metaheuristics.NSGA3(); maxiters = 100, use_initial = true) +sol3 = solve(prob, Metaheuristics.SPEA2(); maxiters = 100, use_initial = true) +sol4 = solve(prob, Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001))) +sol5 = solve(prob, Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) +sol6 = solve(prob, Metaheuristics.SMS_EMOA(); maxiters = 100, use_initial = true) +``` diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 1b0c6e48f..9277fbcba 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -49,6 +49,8 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; abstol::Union{Number, Nothing} = nothing, reltol::Union{Number, Nothing} = nothing, verbose::Bool = false, + num_dimensions::Union{Number, Nothing} = nothing, + fitness_scheme::Union{String, Nothing} = nothing, kwargs...) if !isnothing(reltol) @warn "common reltol is currently not used by $(opt)" @@ -80,6 +82,16 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; mapped_args = (; mapped_args..., TraceMode = :silent) end + if isa(prob.f, MultiObjectiveOptimizationFunction) + if isnothing(num_dimensions) && isnothing(fitness_scheme) + mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true)) + elseif isnothing(num_dimensions) + mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = fitness_scheme) + elseif isnothing(fitness_scheme) + mapped_args = (; mapped_args..., NumDimensions = num_dimensions, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true)) + end + end + return mapped_args end @@ -177,13 +189,25 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ end end - opt_args = __map_optimizer_args(cache, cache.opt; - callback = cache.callback === Optimization.DEFAULT_CALLBACK && - cache.data === Optimization.DEFAULT_DATA ? - nothing : _cb, - cache.solver_args..., - maxiters = maxiters, - maxtime = maxtime) + if isa(cache.f, MultiObjectiveOptimizationFunction) + opt_args = __map_optimizer_args(cache, cache.opt; + callback = cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA ? + nothing : _cb, + cache.solver_args..., + maxiters = maxiters, + maxtime = maxtime, + num_dimensions = isnothing(cache.num_dimensions) ? nothing : cache.num_dimensions, + fitness_scheme = isnothing(cache.fitness_scheme) ? nothing : cache.fitness_scheme) + else + opt_args = __map_optimizer_args(cache, cache.opt; + callback = cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA ? + nothing : _cb, + cache.solver_args..., + maxiters = maxiters, + maxtime = maxtime) + end opt_setup = BlackBoxOptim.bbsetup(_loss; opt_args...) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index e5215fee8..08117d6b6 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -68,8 +68,8 @@ using Test mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) - sol_1 = solve(prob_1, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_1 = solve(prob_1, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_1 ≠ nothing println("Solution for Sphere and Rastrigin: ", sol_1) @@ -88,8 +88,8 @@ using Test mof_2 = MultiObjectiveOptimizationFunction(multi_obj_func_2) prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb = lb, ub = ub) - sol_2 = solve(prob_2, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_2 = solve(prob_2, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_2 ≠ nothing println("Solution for Rosenbrock and Ackley: ", sol_2) @@ -108,8 +108,8 @@ using Test mof_3 = MultiObjectiveOptimizationFunction(multi_obj_func_3) prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb = lb, ub = ub) - sol_3 = solve(prob_3, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_3 = solve(prob_3, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_3 ≠ nothing println("Solution for ZDT1: ", sol_3)