diff --git a/tuning/autotune_scvi.ipynb b/tuning/autotune_scvi.ipynb index db3ca6a..985cc1a 100644 --- a/tuning/autotune_scvi.ipynb +++ b/tuning/autotune_scvi.ipynb @@ -389,7 +389,7 @@ "source": [ "search_space = {\n", " \"model_params\": {\"n_hidden\": tune.choice([64, 128, 256]), \"n_layers\": tune.choice([1, 2, 3])},\n", - " \"train_params\": {\"max_epochs\": 100},\n", + " \"train_params\": {\"max_epochs\": 100, \"plan_kwargs\": {\"lr\": tune.loguniform(1e-4, 1e-2)}},\n", "}" ] }, @@ -400,9 +400,9 @@ "source": [ "There are a couple more arguments we should be aware of before fitting the tuner:\n", "\n", - "- `num_samples`: The total number of hyperparameter sets to sample from `search_space`. This is the total number of models that will be trained.\n", + "- `num_samples`: The total number of hyperparameter sets to sample from ``. This is the total number of models that will be trained.\n", "\n", - " For example, if we set `num_samples=2`, we might sample two models with the following hyperparameter configurations:\n", + " For example, if we set `num_samples=2`, based on the above search space, we might sample two models with the following hyperparameter configurations:\n", "\n", " ```python\n", " model1 = {\n",