From d6c6d3fe3d45280c0338194a23a31ddb9b026a54 Mon Sep 17 00:00:00 2001 From: ThummeTo <83663542+ThummeTo@users.noreply.github.com> Date: Tue, 17 Sep 2024 17:28:28 +0200 Subject: [PATCH] Jupyter hyperopt fix (#153) * Update juliacon_2023_distributedhyperopt.jl * added comment * description change --- examples/jupyter-src/juliacon_2023.ipynb | 4 +++- examples/jupyter-src/juliacon_2023_distributedhyperopt.jl | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/jupyter-src/juliacon_2023.ipynb b/examples/jupyter-src/juliacon_2023.ipynb index 5269cb0c..68861ca7 100644 --- a/examples/jupyter-src/juliacon_2023.ipynb +++ b/examples/jupyter-src/juliacon_2023.ipynb @@ -782,6 +782,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ + "If you want to do hyper parameter optimization, uncomment/remove all code that comes from here on. The following is for demonstration purpose only. \n", + "\n", "Let's check if the train function is working for a given set of hyperparameters." ] }, @@ -793,7 +795,7 @@ "source": [ "# check if the train function is working for a set of given (random) hyperparameters\n", "# ([ ETA, BETA1, BETA2, BATCHDUR, LASTWEIGHT, SCHEDULER, LOSS], RESOURCE, INDEX)\n", - "train!([0.0001, 0.9, 0.999, 4.0, 0.7, :Random, :MSE], 8.0, 1) " + "train!([0.0001, 0.9, 0.999, 4.0, 0.7, :Random, :MSE], 8.0, 1)" ] }, { diff --git a/examples/jupyter-src/juliacon_2023_distributedhyperopt.jl b/examples/jupyter-src/juliacon_2023_distributedhyperopt.jl index 4b56bc1b..ae0fdf3d 100644 --- a/examples/jupyter-src/juliacon_2023_distributedhyperopt.jl +++ b/examples/jupyter-src/juliacon_2023_distributedhyperopt.jl @@ -14,7 +14,7 @@ using DistributedHyperOpt # add via `add "https://github.com/ThummeTo/Distribu nprocs() workers = addprocs(5) -@everywhere include(joinpath(@__DIR__, "workshop_module.jl")) +@everywhere include(joinpath(@__DIR__, "juliacon_2023.jl")) # creating paths for log files (logs), parameter sets (params) and hyperparameter plots (plots) for dir ∈ ("logs", "params", "plots")