Skip to content

Commit

Permalink
Remove trailing commas
Browse files Browse the repository at this point in the history
  • Loading branch information
blegat committed Jan 23, 2024
1 parent 209040f commit 1d467af
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 18 deletions.
58 changes: 41 additions & 17 deletions docs/src/examples/sensitivity-analysis-ridge.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@
using JuMP
import DiffOpt
import Random
import Ipopt
import SCS
import Plots
using LinearAlgebra: dot
using LinearAlgebra: dot, norm

# ## Define and solve the problem

Expand All @@ -59,14 +59,14 @@ Y = w * X .+ b + 0.8 * randn(N);
function fit_ridge(X, Y, alpha = 0.1)
N = length(Y)
## Initialize a JuMP Model with Ipopt solver
model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer))
model = Model(() -> DiffOpt.diff_optimizer(SCS.Optimizer))
set_silent(model)
@variable(model, w) # angular coefficient
@variable(model, b) # linear coefficient
## expression defining approximation error
@expression(model, e[i = 1:N], Y[i] - w * X[i] - b)
## objective minimizing squared error and ridge penalty
@objective(model, Min, 1 / N * dot(e, e) + alpha * (w^2),)
@objective(model, Min, 1 / N * dot(e, e) + alpha * (w^2))
optimize!(model)
return model, w, b # return model & variables
end
Expand Down Expand Up @@ -115,21 +115,45 @@ b̂ = value(b)

# Sensitivity with respect to x and y

∇y = zero(X)
∇x = zero(X)
for i in 1:N
MOI.set(
model,
DiffOpt.ForwardObjectiveFunction(),
2w^2 * X[i] + 2b * w - 2 * w * Y[i],
)
DiffOpt.forward_differentiate!(model)
∇x[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w)
MOI.set(model, DiffOpt.ForwardObjectiveFunction(), (2Y[i] - 2b - 2w * X[i]))
DiffOpt.forward_differentiate!(model)
∇y[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w)
function sensitivities(model_constructor)
MOI.set(model, DiffOpt.ModelConstructor(), model_constructor)
∇y = zero(X)
∇x = zero(X)
for i in 1:N
MOI.set(
model,
DiffOpt.ForwardObjectiveFunction(),
2w^2 * X[i] + 2b * w - 2 * w * Y[i],
)
DiffOpt.forward_differentiate!(model)
∇x[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w)
MOI.set(
model,
DiffOpt.ForwardObjectiveFunction(),
(2Y[i] - 2b - 2w * X[i]),
)
DiffOpt.forward_differentiate!(model)
∇y[i] = MOI.get(model, DiffOpt.ForwardVariablePrimal(), w)
end
return ∇x, ∇y
end

# The sensitivities can either be obtained with the conic DiffOpt model

∇x, ∇y = sensitivities(DiffOpt.ConicProgram.Model)

# Or with the quadratic DiffOpt model

∇x_quad, ∇y_quad = sensitivities(DiffOpt.QuadraticProgram.Model)

# We can see that the tangent `∇x` obtained in both cases are close

norm(∇x - ∇x_quad)

# The same is true for the tangent obtained for `∇y`

norm(∇y - ∇y_quad)

# Visualize point sensitivities with respect to regression points.

p = Plots.scatter(
Expand Down
2 changes: 1 addition & 1 deletion docs/src/examples/sensitivity-analysis-svm.jl
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ MOI.set(model, MOI.Silent(), true)

# Define the objective and solve

@objective(model, Min, λ * LinearAlgebra.dot(w, w) + sum(ξ),)
@objective(model, Min, λ * LinearAlgebra.dot(w, w) + sum(ξ))

optimize!(model)

Expand Down

0 comments on commit 1d467af

Please sign in to comment.