diff --git a/Project.toml b/Project.toml index 3f35539b..5355cb08 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "FMIFlux" uuid = "fabad875-0d53-4e47-9446-963b74cae21f" -version = "0.10.2" +version = "0.10.3" [deps] ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" diff --git a/src/layers.jl b/src/layers.jl index abd3f54d..0ddb8c1b 100644 --- a/src/layers.jl +++ b/src/layers.jl @@ -11,18 +11,22 @@ import FMIImport: fmi2Real, fmi2ValueReferenceFormat """ ToDo. """ -struct FMUParameterRegistrator +struct FMUParameterRegistrator{T} fmu::FMU2 p_refs::AbstractArray{<:fmi2ValueReference} - p::AbstractArray{<:Real} + p::AbstractArray{T} - function FMUParameterRegistrator(fmu::FMU2, p_refs::fmi2ValueReferenceFormat, p::AbstractArray{<:Real}) + function FMUParameterRegistrator{T}(fmu::FMU2, p_refs::fmi2ValueReferenceFormat, p::AbstractArray{T}) where {T} @assert length(p_refs) == length(p) "`p_refs` and `p` need to be the same length!" p_refs = prepareValueReference(fmu, p_refs) fmu.optim_p_refs = p_refs fmu.optim_p = p return new(fmu, p_refs, p) end + + function FMUParameterRegistrator(fmu::FMU2, p_refs::fmi2ValueReferenceFormat, p::AbstractArray{T}) where {T} + return FMUParameterRegistrator{T}(fmu, p_refs, p) + end end export FMUParameterRegistrator diff --git a/src/neural.jl b/src/neural.jl index a9a5e808..59a69623 100644 --- a/src/neural.jl +++ b/src/neural.jl @@ -426,11 +426,11 @@ function affectFMU!(nfmu::ME_NeuralFMU, c::FMU2Component, integrator, idx) # ToDo: Problem-related parameterization of optimize-call #result = optimize(x_seek -> f_optim(x_seek, nfmu, right_x_fmu), left_x, LBFGS(); autodiff = :forward) - #result = Optim.optimize(x_seek -> f_optim(x_seek, nfmu, right_x_fmu, idx, sign(indicators[idx])), left_x, NelderMead()) + #result = Optim.optimize(x_seek -> f_optim(x_seek, nfmu, right_x_fmu, idx, sign(indicators[idx])), left_x, Optim.NelderMead()) # if there is an ANN above the FMU, propaget FMU state through top ANN: if nfmu.modifiedState == true - result = Optim.optimize(x_seek -> f_optim(x_seek, nfmu, c, right_x_fmu), left_x, NelderMead()) + result = Optim.optimize(x_seek -> f_optim(x_seek, nfmu, c, right_x_fmu), left_x, Optim.NelderMead()) right_x = Optim.minimizer(result) else # if there is no ANN above, then: right_x = right_x_fmu diff --git a/src/scheduler.jl b/src/scheduler.jl index c76bd7a7..3acf0d3a 100644 --- a/src/scheduler.jl +++ b/src/scheduler.jl @@ -188,7 +188,7 @@ function initialize!(scheduler::BatchScheduler; runkwargs...) scheduler.runkwargs = runkwargs end - #scheduler.elementIndex = apply!(scheduler) + scheduler.elementIndex = apply!(scheduler) if scheduler.plotStep > 0 plot(scheduler, lastIndex) @@ -201,11 +201,11 @@ function update!(scheduler::BatchScheduler) scheduler.step += 1 - if scheduler.step % scheduler.applyStep == 0 + if scheduler.applyStep > 0 && scheduler.step % scheduler.applyStep == 0 scheduler.elementIndex = apply!(scheduler) end - if scheduler.step % scheduler.plotStep == 0 + if scheduler.plotStep > 0 && scheduler.step % scheduler.plotStep == 0 plot(scheduler, lastIndex) end end