From 1d467aff827d45852e4c28618b03304660ec890e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 23 Jan 2024 13:56:15 +0100 Subject: [PATCH] Remove trailing commas --- .../examples/sensitivity-analysis-ridge.jl | 58 +++++++++++++------ docs/src/examples/sensitivity-analysis-svm.jl | 2 +- 2 files changed, 42 insertions(+), 18 deletions(-) diff --git a/docs/src/examples/sensitivity-analysis-ridge.jl b/docs/src/examples/sensitivity-analysis-ridge.jl index 0309fb59..dd523753 100644 --- a/docs/src/examples/sensitivity-analysis-ridge.jl +++ b/docs/src/examples/sensitivity-analysis-ridge.jl @@ -33,9 +33,9 @@ using JuMP import DiffOpt import Random -import Ipopt +import SCS import Plots -using LinearAlgebra: dot +using LinearAlgebra: dot, norm # ## Define and solve the problem @@ -59,14 +59,14 @@ Y = w * X .+ b + 0.8 * randn(N); function fit_ridge(X, Y, alpha = 0.1) N = length(Y) ## Initialize a JuMP Model with Ipopt solver - model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) + model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer)) set_silent(model) @variable(model, w) # angular coefficient @variable(model, b) # linear coefficient ## expression defining approximation error @expression(model, e[i = 1:N], Y[i] - w * X[i] - b) ## objective minimizing squared error and ridge penalty - @objective(model, Min, 1 / N * dot(e, e) + alpha * (w^2),) + @objective(model, Min, 1 / N * dot(e, e) + alpha * (w^2)) optimize!(model) return model, w, b # return model & variables end @@ -115,21 +115,45 @@ b̂ = value(b) # Sensitivity with respect to x and y -∇y = zero(X) -∇x = zero(X) -for i in 1:N - MOI.set( - model, - DiffOpt.ForwardObjectiveFunction(), - 2w^2 * X[i] + 2b * w - 2 * w * Y[i], - ) - DiffOpt.forward_differentiate!(model) - ∇x[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w) - MOI.set(model, DiffOpt.ForwardObjectiveFunction(), (2Y[i] - 2b - 2w * X[i])) - DiffOpt.forward_differentiate!(model) - ∇y[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w) +function sensitivities(model_constructor) + MOI.set(model, DiffOpt.ModelConstructor(), model_constructor) + ∇y = zero(X) + ∇x = zero(X) + for i in 1:N + MOI.set( + model, + DiffOpt.ForwardObjectiveFunction(), + 2w^2 * X[i] + 2b * w - 2 * w * Y[i], + ) + DiffOpt.forward_differentiate!(model) + ∇x[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w) + MOI.set( + model, + DiffOpt.ForwardObjectiveFunction(), + (2Y[i] - 2b - 2w * X[i]), + ) + DiffOpt.forward_differentiate!(model) + ∇y[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w) + end + return ∇x, ∇y end +# The sensitivities can either be obtained with the conic DiffOpt model + +∇x, ∇y = sensitivities(DiffOpt.ConicProgram.Model) + +# Or with the quadratic DiffOpt model + +∇x_quad, ∇y_quad = sensitivities(DiffOpt.QuadraticProgram.Model) + +# We can see that the tangent `∇x` obtained in both cases are close + +norm(∇x - ∇x_quad) + +# The same is true for the tangent obtained for `∇y` + +norm(∇y - ∇y_quad) + # Visualize point sensitivities with respect to regression points. p = Plots.scatter( diff --git a/docs/src/examples/sensitivity-analysis-svm.jl b/docs/src/examples/sensitivity-analysis-svm.jl index f948cb43..d5af2810 100644 --- a/docs/src/examples/sensitivity-analysis-svm.jl +++ b/docs/src/examples/sensitivity-analysis-svm.jl @@ -64,7 +64,7 @@ MOI.set(model, MOI.Silent(), true) # Define the objective and solve -@objective(model, Min, λ * LinearAlgebra.dot(w, w) + sum(ξ),) +@objective(model, Min, λ * LinearAlgebra.dot(w, w) + sum(ξ)) optimize!(model)