Skip to content

Commit

Permalink
fixed examples
Browse files Browse the repository at this point in the history
  • Loading branch information
ThummeTo committed Nov 8, 2023
1 parent 5a4a001 commit e71b70a
Show file tree
Hide file tree
Showing 7 changed files with 23 additions and 21 deletions.
2 changes: 1 addition & 1 deletion examples/src/advanced_hybrid_ME.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@
"numAdditionalVRs = length(additionalVRs)\n",
"\n",
"net = Chain(\n",
" x -> simpleFMU(x=x, y_refs=additionalVRs),\n",
" x -> simpleFMU(x=x, dx_refs=:all, y_refs=additionalVRs),\n",
" preProc!,\n",
" Dense(numStates+numAdditionalVRs, 16, tanh),\n",
" postProc!,\n",
Expand Down
2 changes: 1 addition & 1 deletion examples/src/juliacon_2023.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -451,7 +451,7 @@
" gates = ScaleSum([1.0, 1.0, 0.0, 0.0], [[1,3], [2,4]]) # gates with sum\n",
"\n",
" # setup the NeuralFMU topology\n",
" model = Chain(x -> f(; x=x), # take `x`, put it into the FMU, retrieve `dx`\n",
" model = Chain(x -> f(; x=x, dx_refs=:all), # take `x`, put it into the FMU, retrieve `dx`\n",
" dx -> cache(dx), # cache `dx`\n",
" dx -> dx[4:6], # forward only dx[4, 5, 6]\n",
" preProcess, # pre-process `dx`\n",
Expand Down
2 changes: 1 addition & 1 deletion examples/src/mdpi_2022.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@
"gates = ScaleSum([1.0, 0.0]) # signal from FMU (#1 = 1.0), signal from ANN (#2 = 0.0)\n",
"\n",
"# setup the NeuralFMU topology\n",
"net = Chain(x -> fmu(; x=x), # take `x`, put it into the FMU, retrieve `dx`\n",
"net = Chain(x -> fmu(; x=x, dx_refs=:all), # take `x`, put it into the FMU, retrieve `dx`\n",
" dx -> cache(dx), # cache `dx`\n",
" dx -> dx[4:6], # forward only dx[4, 5, 6]\n",
" preProcess, # pre-process `dx`\n",
Expand Down
2 changes: 1 addition & 1 deletion examples/src/modelica_conference_2021.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -777,7 +777,7 @@
"end\n",
"\n",
"net = Chain(Dense(numStates, numStates, identity),\n",
" x -> simpleFMU(x=x),\n",
" x -> simpleFMU(x=x, dx_refs=:all),\n",
" Dense(numStates, 8, identity),\n",
" Dense(8, 8, tanh),\n",
" Dense(8, numStates))"
Expand Down
2 changes: 1 addition & 1 deletion examples/src/simple_hybrid_ME.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@
"# NeuralFMU setup\n",
"numStates = fmiGetNumberOfStates(simpleFMU)\n",
"\n",
"net = Chain(x -> simpleFMU(x=x),\n",
"net = Chain(x -> simpleFMU(x=x, dx_refs=:all),\n",
" Dense(numStates, 16, tanh),\n",
" Dense(16, 16, tanh),\n",
" Dense(16, numStates))"
Expand Down
3 changes: 2 additions & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ using FMIFlux
using Test
using FMIZoo
using FMIFlux.FMIImport
using FMIFlux.FMIImport.FMICore

import FMIFlux.FMISensitivity: FiniteDiff, ForwardDiff, ReverseDiff

Expand Down Expand Up @@ -50,7 +51,7 @@ function runtests(exportingTool)
@testset "Solution Gradients" begin
include("solution_gradients.jl")
end

for _GRADIENT (:ReverseDiff, :ForwardDiff) # , :FiniteDiff)

global GRADIENT = _GRADIENT
Expand Down
31 changes: 16 additions & 15 deletions test/solution_gradients.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ t_step = 0.05
t_stop = 2.0
tData = t_start:t_step:t_stop
posData = ones(Float64, length(tData))
x0_bb = [1.0, 0.0]

numStates = 2
solver = Tsit5()
Expand All @@ -48,7 +49,7 @@ net_bb = Chain(#Dense([1.0 0.0; 0.0 1.0], [0.0, 0.0], identity),
p_net_bb, re_bb = Flux.destructure(net_bb)

ff = ODEFunction{true}(fx_bb)
prob_bb = ODEProblem{true}(ff, x0, (t_start, t_stop), p_net_bb)
prob_bb = ODEProblem{true}(ff, x0_bb, (t_start, t_stop), p_net_bb)

function condition(out, x, t, integrator)
out[1] = x[1]-RADIUS
Expand Down Expand Up @@ -116,10 +117,10 @@ end

function mysolve(p; sensealg=nothing)
global solution, events # write
global prob, x0, posData, solver # read-only
global prob, x0_bb, posData, solver # read-only
events = 0

solution = prob(x0; p=p, solver=solver, saveat=tData)
solution = prob(x0_bb; p=p, solver=solver, saveat=tData)

return collect(u[1] for u in solution.states.u)
end
Expand Down Expand Up @@ -155,7 +156,7 @@ using FMIFlux.FMISensitivity.SciMLSensitivity
sensealg = ReverseDiffAdjoint()

c = nothing
c, x0 = FMIFlux.prepareSolveFMU(prob.fmu, c, fmi2TypeModelExchange, nothing, nothing, nothing, nothing, nothing, prob.parameters, prob.tspan[1], prob.tspan[end], nothing; x0=prob.x0, handleEvents=FMIFlux.handleEvents, cleanup=true)
c, _ = FMIFlux.prepareSolveFMU(prob.fmu, c, fmi2TypeModelExchange, nothing, nothing, nothing, nothing, nothing, prob.parameters, prob.tspan[1], prob.tspan[end], nothing; x0=prob.x0, handleEvents=FMIFlux.handleEvents, cleanup=true)

### START CHECK CONDITIONS

Expand All @@ -169,14 +170,14 @@ function condition_nfmu_check(x)
FMIFlux.condition!(prob, FMIFlux.getComponent(prob), buffer, x, t_start, nothing, [UInt32(1)])
return buffer
end
jac_fwd1 = ForwardDiff.jacobian(condition_bb_check, x0)
jac_fwd2 = ForwardDiff.jacobian(condition_nfmu_check, x0)
jac_fwd1 = ForwardDiff.jacobian(condition_bb_check, x0_bb)
jac_fwd2 = ForwardDiff.jacobian(condition_nfmu_check, x0_bb)

jac_rwd1 = ReverseDiff.jacobian(condition_bb_check, x0)
jac_rwd2 = ReverseDiff.jacobian(condition_nfmu_check, x0)
jac_rwd1 = ReverseDiff.jacobian(condition_bb_check, x0_bb)
jac_rwd2 = ReverseDiff.jacobian(condition_nfmu_check, x0_bb)

jac_fin1 = FiniteDiff.finite_difference_jacobian(condition_bb_check, x0)
jac_fin2 = FiniteDiff.finite_difference_jacobian(condition_nfmu_check, x0)
jac_fin1 = FiniteDiff.finite_difference_jacobian(condition_bb_check, x0_bb)
jac_fin2 = FiniteDiff.finite_difference_jacobian(condition_nfmu_check, x0_bb)

atol = 1e-8
@test isapprox(jac_fin1, jac_fwd1; atol=atol)
Expand Down Expand Up @@ -206,17 +207,17 @@ atol = 1e-8
# t_first_event_time = 0.451523640985728
# x_first_event_right = [2.2250738585072014e-308, 3.1006128426489954]

# jac_con1 = ForwardDiff.jacobian(affect_bb_check, x0)
# jac_con2 = ForwardDiff.jacobian(affect_nfmu_check, x0)
# jac_con1 = ForwardDiff.jacobian(affect_bb_check, x0_bb)
# jac_con2 = ForwardDiff.jacobian(affect_nfmu_check, x0_bb)

# jac_con1 = ReverseDiff.jacobian(affect_bb_check, x0)
# jac_con2 = ReverseDiff.jacobian(affect_nfmu_check, x0)
# jac_con1 = ReverseDiff.jacobian(affect_bb_check, x0_bb)
# jac_con2 = ReverseDiff.jacobian(affect_nfmu_check, x0_bb)

###

# Solution (plain)
losssum(p_net; sensealg=sensealg)
#@test length(solution.events) == NUMEVENTS
@test length(solution.events) == NUMEVENTS

losssum_bb(p_net_bb; sensealg=sensealg)
@test events == NUMEVENTS
Expand Down

0 comments on commit e71b70a

Please sign in to comment.