diff --git a/nbs/common.base_model.ipynb b/nbs/common.base_model.ipynb
index 444074b98..ce3ac6601 100644
--- a/nbs/common.base_model.ipynb
+++ b/nbs/common.base_model.ipynb
@@ -34,7 +34,6 @@
     "import random\n",
     "import warnings\n",
     "from contextlib import contextmanager\n",
-    "from copy import deepcopy\n",
     "from dataclasses import dataclass\n",
     "\n",
     "import fsspec\n",
@@ -121,10 +120,6 @@
     "        random_seed,\n",
     "        loss,\n",
     "        valid_loss,\n",
-    "        optimizer,\n",
-    "        optimizer_kwargs,\n",
-    "        lr_scheduler,\n",
-    "        lr_scheduler_kwargs,\n",
     "        futr_exog_list,\n",
     "        hist_exog_list,\n",
     "        stat_exog_list,\n",
@@ -150,18 +145,6 @@
     "        self.train_trajectories = []\n",
     "        self.valid_trajectories = []\n",
     "\n",
-    "        # Optimization\n",
-    "        if optimizer is not None and not issubclass(optimizer, torch.optim.Optimizer):\n",
-    "            raise TypeError(\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n",
-    "        self.optimizer = optimizer\n",
-    "        self.optimizer_kwargs = optimizer_kwargs if optimizer_kwargs is not None else {}\n",
-    "\n",
-    "        # lr scheduler\n",
-    "        if lr_scheduler is not None and not issubclass(lr_scheduler, torch.optim.lr_scheduler.LRScheduler):\n",
-    "            raise TypeError(\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n",
-    "        self.lr_scheduler = lr_scheduler\n",
-    "        self.lr_scheduler_kwargs = lr_scheduler_kwargs if lr_scheduler_kwargs is not None else {}\n",
-    "\n",
     "        # customized by set_configure_optimizers()\n",
     "        self.config_optimizers = None\n",
     "\n",
@@ -412,41 +395,19 @@
     "\n",
     "    def configure_optimizers(self):\n",
     "        if self.config_optimizers is not None:\n",
+    "            # return the customized optimizer settings if specified\n",
     "            return self.config_optimizers\n",
-    "                \n",
-    "        if self.optimizer:\n",
-    "            optimizer_signature = inspect.signature(self.optimizer)\n",
-    "            optimizer_kwargs = deepcopy(self.optimizer_kwargs)\n",
-    "            if 'lr' in optimizer_signature.parameters:\n",
-    "                if 'lr' in optimizer_kwargs:\n",
-    "                    warnings.warn(\"ignoring learning rate passed in optimizer_kwargs, using the model's learning rate\")\n",
-    "                optimizer_kwargs['lr'] = self.learning_rate\n",
-    "            optimizer = self.optimizer(params=self.parameters(), **optimizer_kwargs)\n",
-    "        else:\n",
-    "            if self.optimizer_kwargs:\n",
-    "                warnings.warn(\n",
-    "                    \"ignoring optimizer_kwargs as the optimizer is not specified\"\n",
-    "                )\n",
-    "            optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)\n",
     "        \n",
-    "        lr_scheduler = {'frequency': 1, 'interval': 'step'}\n",
-    "        if self.lr_scheduler:\n",
-    "            lr_scheduler_signature = inspect.signature(self.lr_scheduler)\n",
-    "            lr_scheduler_kwargs = deepcopy(self.lr_scheduler_kwargs)\n",
-    "            if 'optimizer' in lr_scheduler_signature.parameters:\n",
-    "                if 'optimizer' in lr_scheduler_kwargs:\n",
-    "                    warnings.warn(\"ignoring optimizer passed in lr_scheduler_kwargs, using the model's optimizer\")\n",
-    "                    del lr_scheduler_kwargs['optimizer']\n",
-    "            lr_scheduler['scheduler'] = self.lr_scheduler(optimizer=optimizer, **lr_scheduler_kwargs)\n",
-    "        else:\n",
-    "            if self.lr_scheduler_kwargs:\n",
-    "                warnings.warn(\n",
-    "                    \"ignoring lr_scheduler_kwargs as the lr_scheduler is not specified\"\n",
-    "                )            \n",
-    "            lr_scheduler['scheduler'] = torch.optim.lr_scheduler.StepLR(\n",
+    "        # default choice\n",
+    "        optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)\n",
+    "        scheduler = {\n",
+    "            \"scheduler\": torch.optim.lr_scheduler.StepLR(\n",
     "                optimizer=optimizer, step_size=self.lr_decay_steps, gamma=0.5\n",
-    "            )\n",
-    "        return {'optimizer': optimizer, 'lr_scheduler': lr_scheduler}\n",
+    "            ),\n",
+    "            \"frequency\": 1,\n",
+    "            \"interval\": \"step\",\n",
+    "        }\n",
+    "        return {\"optimizer\": optimizer, \"lr_scheduler\": scheduler}\n",
     "\n",
     "    def set_configure_optimizers(\n",
     "            self, \n",
@@ -528,6 +489,22 @@
     "            model.load_state_dict(content[\"state_dict\"], strict=True)\n",
     "        return model"
    ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "077ea025",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "2b36e87a",
+   "metadata": {},
+   "outputs": [],
+   "source": []
   }
  ],
  "metadata": {
diff --git a/nbs/common.base_multivariate.ipynb b/nbs/common.base_multivariate.ipynb
index f1321600d..43e31a0ed 100644
--- a/nbs/common.base_multivariate.ipynb
+++ b/nbs/common.base_multivariate.ipynb
@@ -105,20 +105,12 @@
     "                 drop_last_loader=False,\n",
     "                 random_seed=1, \n",
     "                 alias=None,\n",
-    "                 optimizer=None,\n",
-    "                 optimizer_kwargs=None,\n",
-    "                 lr_scheduler=None,\n",
-    "                 lr_scheduler_kwargs=None,\n",
     "                 dataloader_kwargs=None,\n",
     "                 **trainer_kwargs):\n",
     "        super().__init__(\n",
     "            random_seed=random_seed,\n",
     "            loss=loss,\n",
-    "            valid_loss=valid_loss,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,            \n",
+    "            valid_loss=valid_loss,        \n",
     "            futr_exog_list=futr_exog_list,\n",
     "            hist_exog_list=hist_exog_list,\n",
     "            stat_exog_list=stat_exog_list,\n",
diff --git a/nbs/common.base_recurrent.ipynb b/nbs/common.base_recurrent.ipynb
index 7b0ed5585..38ac09dba 100644
--- a/nbs/common.base_recurrent.ipynb
+++ b/nbs/common.base_recurrent.ipynb
@@ -111,20 +111,12 @@
     "                 drop_last_loader=False,\n",
     "                 random_seed=1, \n",
     "                 alias=None,\n",
-    "                 optimizer=None,\n",
-    "                 optimizer_kwargs=None,\n",
-    "                 lr_scheduler=None,\n",
-    "                 lr_scheduler_kwargs=None,\n",
     "                 dataloader_kwargs=None,\n",
     "                 **trainer_kwargs):\n",
     "        super().__init__(\n",
     "            random_seed=random_seed,\n",
     "            loss=loss,\n",
     "            valid_loss=valid_loss,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            futr_exog_list=futr_exog_list,\n",
     "            hist_exog_list=hist_exog_list,\n",
     "            stat_exog_list=stat_exog_list,\n",
diff --git a/nbs/common.base_windows.ipynb b/nbs/common.base_windows.ipynb
index 80f12e5f5..ced5a7913 100644
--- a/nbs/common.base_windows.ipynb
+++ b/nbs/common.base_windows.ipynb
@@ -115,20 +115,12 @@
     "                 drop_last_loader=False,\n",
     "                 random_seed=1,\n",
     "                 alias=None,\n",
-    "                 optimizer=None,\n",
-    "                 optimizer_kwargs=None,\n",
-    "                 lr_scheduler=None,\n",
-    "                 lr_scheduler_kwargs=None,\n",
     "                 dataloader_kwargs=None,\n",
     "                 **trainer_kwargs):\n",
     "        super().__init__(\n",
     "            random_seed=random_seed,\n",
     "            loss=loss,\n",
     "            valid_loss=valid_loss,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            futr_exog_list=futr_exog_list,\n",
     "            hist_exog_list=hist_exog_list,\n",
     "            stat_exog_list=stat_exog_list,\n",
diff --git a/nbs/core.ipynb b/nbs/core.ipynb
index e916ef356..3a83b52a8 100644
--- a/nbs/core.ipynb
+++ b/nbs/core.ipynb
@@ -3172,15 +3172,22 @@
     "    mean = default_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
     "\n",
     "    # using a customized optimizer\n",
-    "    params.update({\n",
-    "        \"optimizer\": torch.optim.Adadelta,\n",
-    "        \"optimizer_kwargs\": {\"rho\": 0.45}, \n",
-    "    })\n",
+    "    optimizer = torch.optim.Adadelta(params=models2[0].parameters(), rho=0.75)\n",
+    "    scheduler=torch.optim.lr_scheduler.StepLR(\n",
+    "        optimizer=optimizer, step_size=10e7, gamma=0.5\n",
+    "    )\n",
+    "\n",
     "    models2 = [nf_model(**params)]\n",
+    "    models2[0].set_configure_optimizers(\n",
+    "        optimizer=optimizer,\n",
+    "        scheduler=scheduler,\n",
+    "    )\n",
+    "\n",
     "    nf2 = NeuralForecast(models=models2, freq='M')\n",
     "    nf2.fit(AirPassengersPanel_train)\n",
     "    customized_optimizer_predict = nf2.predict()\n",
     "    mean2 = customized_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
+    "\n",
     "    assert mean2 != mean"
    ]
   },
@@ -3194,100 +3201,18 @@
     "#| hide\n",
     "# test that if the user-defined optimizer is not a subclass of torch.optim.optimizer, failed with exception\n",
     "# tests cover different types of base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
-    "test_fail(lambda: NHITS(h=12, input_size=24, max_steps=10, optimizer=torch.nn.Module), contains=\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n",
-    "test_fail(lambda: RNN(h=12, input_size=24, max_steps=10, optimizer=torch.nn.Module), contains=\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n",
-    "test_fail(lambda: StemGNN(h=12, input_size=24, max_steps=10, n_series=2, optimizer=torch.nn.Module), contains=\"optimizer is not a valid subclass of torch.optim.Optimizer\")\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d908240f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "#| hide\n",
-    "# test that if we pass \"lr\" parameter, we expect warning and it ignores the passed in 'lr' parameter\n",
-    "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
     "\n",
-    "for nf_model in [NHITS, RNN, StemGNN]:\n",
-    "    params = {\n",
-    "        \"h\": 12, \n",
-    "        \"input_size\": 24, \n",
-    "        \"max_steps\": 1, \n",
-    "        \"optimizer\": torch.optim.Adadelta, \n",
-    "        \"optimizer_kwargs\": {\"lr\": 0.8, \"rho\": 0.45}\n",
-    "    }\n",
+    "for model_name in [NHITS, RNN, StemGNN]:\n",
+    "    params = {\"h\": 12, \"input_size\": 24, \"max_steps\": 10}\n",
     "    if nf_model.__name__ == \"StemGNN\":\n",
     "        params.update({\"n_series\": 2})\n",
-    "    models = [nf_model(**params)]\n",
-    "    nf = NeuralForecast(models=models, freq='M')\n",
-    "    with warnings.catch_warnings(record=True) as issued_warnings:\n",
-    "        warnings.simplefilter('always', UserWarning)\n",
-    "        nf.fit(AirPassengersPanel_train)\n",
-    "        assert any(\"ignoring learning rate passed in optimizer_kwargs, using the model's learning rate\" in str(w.message) for w in issued_warnings)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c97858b5-e6a0-4353-a48f-5a5460eb2314",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "#| hide\n",
-    "# test that if we pass \"optimizer_kwargs\" but not \"optimizer\", we expect a warning\n",
-    "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
     "\n",
-    "for nf_model in [NHITS, RNN, StemGNN]:\n",
-    "    params = {\n",
-    "        \"h\": 12, \n",
-    "        \"input_size\": 24, \n",
-    "        \"max_steps\": 1,\n",
-    "        \"optimizer_kwargs\": {\"lr\": 0.8, \"rho\": 0.45}\n",
-    "    }\n",
-    "    if nf_model.__name__ == \"StemGNN\":\n",
-    "        params.update({\"n_series\": 2})\n",
-    "    models = [nf_model(**params)]\n",
-    "    nf = NeuralForecast(models=models, freq='M')\n",
-    "    with warnings.catch_warnings(record=True) as issued_warnings:\n",
-    "        warnings.simplefilter('always', UserWarning)\n",
-    "        nf.fit(AirPassengersPanel_train)\n",
-    "        assert any(\"ignoring optimizer_kwargs as the optimizer is not specified\" in str(w.message) for w in issued_warnings)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "24142322",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "#| hide\n",
-    "# test customized lr_scheduler behavior such that the user defined lr_scheduler result should differ from default\n",
-    "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
-    "\n",
-    "for nf_model in [NHITS, RNN, StemGNN]:\n",
-    "    params = {\"h\": 12, \"input_size\": 24, \"max_steps\": 1}\n",
-    "    if nf_model.__name__ == \"StemGNN\":\n",
-    "        params.update({\"n_series\": 2})\n",
-    "    models = [nf_model(**params)]\n",
-    "    nf = NeuralForecast(models=models, freq='M')\n",
-    "    nf.fit(AirPassengersPanel_train)\n",
-    "    default_optimizer_predict = nf.predict()\n",
-    "    mean = default_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
-    "\n",
-    "    # using a customized lr_scheduler, default is StepLR\n",
-    "    params.update({\n",
-    "        \"lr_scheduler\": torch.optim.lr_scheduler.ConstantLR,\n",
-    "        \"lr_scheduler_kwargs\": {\"factor\": 0.78}, \n",
-    "    })\n",
-    "    models2 = [nf_model(**params)]\n",
-    "    nf2 = NeuralForecast(models=models2, freq='M')\n",
-    "    nf2.fit(AirPassengersPanel_train)\n",
-    "    customized_optimizer_predict = nf2.predict()\n",
-    "    mean2 = customized_optimizer_predict.loc[:, nf_model.__name__].mean()\n",
-    "    assert mean2 != mean"
+    "    model = model_name(**params)    \n",
+    "    optimizer = torch.nn.Module()\n",
+    "    scheduler = torch.optim.lr_scheduler.StepLR(\n",
+    "        optimizer=torch.optim.Adam(model.parameters()), step_size=10e7, gamma=0.5\n",
+    "    )        \n",
+    "    test_fail(lambda: model.set_configure_optimizers(optimizer=optimizer, scheduler=scheduler), contains=\"optimizer is not a valid instance of torch.optim.Optimizer\")\n"
    ]
   },
   {
@@ -3298,68 +3223,16 @@
    "outputs": [],
    "source": [
     "#| hide\n",
-    "# test that if the user-defined lr_scheduler is not a subclass of torch.optim.lr_scheduler, failed with exception\n",
+    "# test that if the user-defined scheduler is not a subclass of torch.optim.lr_scheduler, failed with exception\n",
     "# tests cover different types of base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
-    "test_fail(lambda: NHITS(h=12, input_size=24, max_steps=10, lr_scheduler=torch.nn.Module), contains=\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n",
-    "test_fail(lambda: RNN(h=12, input_size=24, max_steps=10, lr_scheduler=torch.nn.Module), contains=\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n",
-    "test_fail(lambda: StemGNN(h=12, input_size=24, max_steps=10, n_series=2, lr_scheduler=torch.nn.Module), contains=\"lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler\")\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b1d8bebb",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "#| hide\n",
-    "# test that if we pass in \"optimizer\" parameter, we expect warning and it ignores them\n",
-    "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
-    "\n",
-    "for nf_model in [NHITS, RNN, StemGNN]:\n",
-    "    params = {\n",
-    "        \"h\": 12, \n",
-    "        \"input_size\": 24, \n",
-    "        \"max_steps\": 1, \n",
-    "        \"lr_scheduler\": torch.optim.lr_scheduler.ConstantLR, \n",
-    "        \"lr_scheduler_kwargs\": {\"optimizer\": torch.optim.Adadelta, \"factor\": 0.22}\n",
-    "    }\n",
-    "    if nf_model.__name__ == \"StemGNN\":\n",
-    "        params.update({\"n_series\": 2})\n",
-    "    models = [nf_model(**params)]\n",
-    "    nf = NeuralForecast(models=models, freq='M')\n",
-    "    with warnings.catch_warnings(record=True) as issued_warnings:\n",
-    "        warnings.simplefilter('always', UserWarning)\n",
-    "        nf.fit(AirPassengersPanel_train)\n",
-    "        assert any(\"ignoring optimizer passed in lr_scheduler_kwargs, using the model's optimizer\" in str(w.message) for w in issued_warnings)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "06febece",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "#| hide\n",
-    "# test that if we pass in \"lr_scheduler_kwargs\" but not \"lr_scheduler\", we expect a warning\n",
-    "# tests consider models implemented using different base classes such as BaseWindows, BaseRecurrent, BaseMultivariate\n",
     "\n",
-    "for nf_model in [NHITS, RNN, StemGNN]:\n",
-    "    params = {\n",
-    "        \"h\": 12, \n",
-    "        \"input_size\": 24, \n",
-    "        \"max_steps\": 1,\n",
-    "        \"lr_scheduler_kwargs\": {\"optimizer\": torch.optim.Adadelta, \"factor\": 0.22}\n",
-    "    }\n",
+    "for model_name in [NHITS, RNN, StemGNN]:\n",
+    "    params = {\"h\": 12, \"input_size\": 24, \"max_steps\": 10}\n",
     "    if nf_model.__name__ == \"StemGNN\":\n",
     "        params.update({\"n_series\": 2})\n",
-    "    models = [nf_model(**params)]\n",
-    "    nf = NeuralForecast(models=models, freq='M')\n",
-    "    with warnings.catch_warnings(record=True) as issued_warnings:\n",
-    "        warnings.simplefilter('always', UserWarning)\n",
-    "        nf.fit(AirPassengersPanel_train)\n",
-    "        assert any(\"ignoring lr_scheduler_kwargs as the lr_scheduler is not specified\" in str(w.message) for w in issued_warnings)\n"
+    "    model = model_name(**params)\n",
+    "    optimizer = torch.optim.Adam(model.parameters())\n",
+    "    test_fail(lambda: model.set_configure_optimizers(optimizer=optimizer, scheduler=torch.nn.Module), contains=\"scheduler is not a valid instance of torch.optim.lr_scheduler.LRScheduler\")"
    ]
   },
   {
@@ -3493,7 +3366,6 @@
     "    models[0].set_configure_optimizers(\n",
     "        optimizer=optimizer,\n",
     "        scheduler=scheduler,\n",
-    "\n",
     "    )\n",
     "    nf2 = NeuralForecast(models=models, freq='M')\n",
     "    nf2.fit(AirPassengersPanel_train)\n",
diff --git a/nbs/models.autoformer.ipynb b/nbs/models.autoformer.ipynb
index 9c6567f2e..6db6deb68 100644
--- a/nbs/models.autoformer.ipynb
+++ b/nbs/models.autoformer.ipynb
@@ -458,10 +458,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -508,10 +504,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs=None,\n",
     "                 **trainer_kwargs):\n",
     "        super(Autoformer, self).__init__(h=h,\n",
@@ -537,10 +529,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.bitcn.ipynb b/nbs/models.bitcn.ipynb
index cd78bb194..8e9571de6 100644
--- a/nbs/models.bitcn.ipynb
+++ b/nbs/models.bitcn.ipynb
@@ -178,10 +178,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -221,10 +217,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs=None,\n",
     "                 **trainer_kwargs):\n",
     "        super(BiTCN, self).__init__(\n",
@@ -251,10 +243,6 @@
     "            random_seed=random_seed,\n",
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )\n",
diff --git a/nbs/models.deepar.ipynb b/nbs/models.deepar.ipynb
index c25e27bf9..1f93be176 100644
--- a/nbs/models.deepar.ipynb
+++ b/nbs/models.deepar.ipynb
@@ -183,10 +183,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -231,10 +227,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader = 0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -274,10 +266,6 @@
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
     "                                    random_seed=random_seed,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.deepnpts.ipynb b/nbs/models.deepnpts.ipynb
index 4f5e7ee9f..da83951b5 100644
--- a/nbs/models.deepnpts.ipynb
+++ b/nbs/models.deepnpts.ipynb
@@ -121,10 +121,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -166,10 +162,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader = 0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -206,10 +198,6 @@
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
     "                                    random_seed=random_seed,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.dilated_rnn.ipynb b/nbs/models.dilated_rnn.ipynb
index 4b3bd374f..7c556be3d 100644
--- a/nbs/models.dilated_rnn.ipynb
+++ b/nbs/models.dilated_rnn.ipynb
@@ -390,10 +390,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br> \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -430,10 +426,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(DilatedRNN, self).__init__(\n",
@@ -456,10 +448,6 @@
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
     "            random_seed=random_seed,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )\n",
diff --git a/nbs/models.dlinear.ipynb b/nbs/models.dlinear.ipynb
index ea1a38a43..57edcc945 100644
--- a/nbs/models.dlinear.ipynb
+++ b/nbs/models.dlinear.ipynb
@@ -162,10 +162,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -203,10 +199,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs=None,\n",
     "                 **trainer_kwargs):\n",
     "        super(DLinear, self).__init__(h=h,\n",
@@ -232,10 +224,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs)\n",
     "                                                                \n",
diff --git a/nbs/models.fedformer.ipynb b/nbs/models.fedformer.ipynb
index 2268c058d..47a13e205 100644
--- a/nbs/models.fedformer.ipynb
+++ b/nbs/models.fedformer.ipynb
@@ -451,10 +451,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -500,10 +496,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer=None,\n",
-    "                 optimizer_kwargs=None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(FEDformer, self).__init__(h=h,\n",
@@ -528,10 +520,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,                                    \n",
     "                                       **trainer_kwargs)\n",
     "        # Architecture\n",
diff --git a/nbs/models.gru.ipynb b/nbs/models.gru.ipynb
index 7f0608a5f..b3210e198 100644
--- a/nbs/models.gru.ipynb
+++ b/nbs/models.gru.ipynb
@@ -134,10 +134,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -175,10 +171,6 @@
     "                 random_seed=1,\n",
     "                 num_workers_loader=0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(GRU, self).__init__(\n",
@@ -201,10 +193,6 @@
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
     "            random_seed=random_seed,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )\n",
diff --git a/nbs/models.informer.ipynb b/nbs/models.informer.ipynb
index c8e30137c..1666abc67 100644
--- a/nbs/models.informer.ipynb
+++ b/nbs/models.informer.ipynb
@@ -306,10 +306,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -356,10 +352,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(Informer, self).__init__(h=h,\n",
@@ -385,10 +377,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.itransformer.ipynb b/nbs/models.itransformer.ipynb
index 5e134cfa0..f55a1927b 100644
--- a/nbs/models.itransformer.ipynb
+++ b/nbs/models.itransformer.ipynb
@@ -228,10 +228,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "    \n",
@@ -273,10 +269,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,  \n",
     "                 dataloader_kwargs = None,          \n",
     "                 **trainer_kwargs):\n",
     "        \n",
@@ -299,10 +291,6 @@
     "                                           random_seed=random_seed,\n",
     "                                           num_workers_loader=num_workers_loader,\n",
     "                                           drop_last_loader=drop_last_loader,\n",
-    "                                           optimizer=optimizer,\n",
-    "                                           optimizer_kwargs=optimizer_kwargs,\n",
-    "                                           lr_scheduler=lr_scheduler,\n",
-    "                                           lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                           dataloader_kwargs=dataloader_kwargs,\n",
     "                                           **trainer_kwargs)\n",
     "               \n",
diff --git a/nbs/models.kan.ipynb b/nbs/models.kan.ipynb
index ac7cc5e2b..93aa02fa3 100644
--- a/nbs/models.kan.ipynb
+++ b/nbs/models.kan.ipynb
@@ -362,8 +362,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>  \n",
     "\n",
@@ -411,8 +409,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        \n",
@@ -440,8 +436,6 @@
     "                                  num_workers_loader=num_workers_loader,\n",
     "                                  drop_last_loader=drop_last_loader,\n",
     "                                  random_seed=random_seed,\n",
-    "                                  optimizer=optimizer,\n",
-    "                                  optimizer_kwargs=optimizer_kwargs,\n",
     "                                  dataloader_kwargs = dataloader_kwargs,\n",
     "                                  **trainer_kwargs)\n",
     "        \n",
diff --git a/nbs/models.lstm.ipynb b/nbs/models.lstm.ipynb
index 3eb469306..464a539bb 100644
--- a/nbs/models.lstm.ipynb
+++ b/nbs/models.lstm.ipynb
@@ -121,10 +121,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -161,10 +157,6 @@
     "                 random_seed = 1,\n",
     "                 num_workers_loader = 0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(LSTM, self).__init__(\n",
@@ -187,10 +179,6 @@
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
     "            random_seed=random_seed,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )\n",
diff --git a/nbs/models.mlp.ipynb b/nbs/models.mlp.ipynb
index 46c09406f..075dd28e1 100644
--- a/nbs/models.mlp.ipynb
+++ b/nbs/models.mlp.ipynb
@@ -114,10 +114,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -153,10 +149,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -184,10 +176,6 @@
     "                                  num_workers_loader=num_workers_loader,\n",
     "                                  drop_last_loader=drop_last_loader,\n",
     "                                  random_seed=random_seed,\n",
-    "                                  optimizer=optimizer,\n",
-    "                                  optimizer_kwargs=optimizer_kwargs,\n",
-    "                                  lr_scheduler=lr_scheduler,\n",
-    "                                  lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                  dataloader_kwargs=dataloader_kwargs,\n",
     "                                  **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.mlpmultivariate.ipynb b/nbs/models.mlpmultivariate.ipynb
index 71abdfb04..b6fb8e302 100644
--- a/nbs/models.mlpmultivariate.ipynb
+++ b/nbs/models.mlpmultivariate.ipynb
@@ -108,10 +108,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -143,10 +139,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -170,10 +162,6 @@
     "                                  num_workers_loader=num_workers_loader,\n",
     "                                  drop_last_loader=drop_last_loader,\n",
     "                                  random_seed=random_seed,\n",
-    "                                  optimizer=optimizer,\n",
-    "                                  optimizer_kwargs=optimizer_kwargs,\n",
-    "                                  lr_scheduler=lr_scheduler,\n",
-    "                                  lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                  dataloader_kwargs=dataloader_kwargs,\n",
     "                                  **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.nbeats.ipynb b/nbs/models.nbeats.ipynb
index 9504770d5..5d28efdd3 100644
--- a/nbs/models.nbeats.ipynb
+++ b/nbs/models.nbeats.ipynb
@@ -270,10 +270,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -315,10 +311,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        \n",
@@ -348,10 +340,6 @@
     "                                     num_workers_loader=num_workers_loader,\n",
     "                                     drop_last_loader=drop_last_loader,\n",
     "                                     random_seed=random_seed,\n",
-    "                                     optimizer=optimizer,\n",
-    "                                     optimizer_kwargs=optimizer_kwargs,\n",
-    "                                     lr_scheduler=lr_scheduler,\n",
-    "                                     lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                     dataloader_kwargs=dataloader_kwargs,\n",
     "                                     **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.nbeatsx.ipynb b/nbs/models.nbeatsx.ipynb
index 9952c3cf9..5db08fec5 100644
--- a/nbs/models.nbeatsx.ipynb
+++ b/nbs/models.nbeatsx.ipynb
@@ -414,10 +414,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -465,10 +461,6 @@
     "        random_seed: int = 1,\n",
     "        num_workers_loader: int = 0,\n",
     "        drop_last_loader: bool = False,\n",
-    "        optimizer = None,\n",
-    "        optimizer_kwargs = None,\n",
-    "        lr_scheduler = None,\n",
-    "        lr_scheduler_kwargs = None,\n",
     "        dataloader_kwargs = None,\n",
     "        **trainer_kwargs,\n",
     "    ):\n",
@@ -502,10 +494,6 @@
     "                                      num_workers_loader=num_workers_loader,\n",
     "                                      drop_last_loader=drop_last_loader,\n",
     "                                      random_seed=random_seed,\n",
-    "                                      optimizer=optimizer,\n",
-    "                                      optimizer_kwargs=optimizer_kwargs,\n",
-    "                                      lr_scheduler=lr_scheduler,\n",
-    "                                      lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                      dataloader_kwargs=dataloader_kwargs,\n",
     "                                      **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.nhits.ipynb b/nbs/models.nhits.ipynb
index e844f4660..9b214ce62 100644
--- a/nbs/models.nhits.ipynb
+++ b/nbs/models.nhits.ipynb
@@ -303,10 +303,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -354,10 +350,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader = 0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -385,10 +377,6 @@
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
     "                                    random_seed=random_seed,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.nlinear.ipynb b/nbs/models.nlinear.ipynb
index b55d42204..1b922b883 100644
--- a/nbs/models.nlinear.ipynb
+++ b/nbs/models.nlinear.ipynb
@@ -102,10 +102,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -142,10 +138,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(NLinear, self).__init__(h=h,\n",
@@ -171,10 +163,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.patchtst.ipynb b/nbs/models.patchtst.ipynb
index 31064cc24..1088bc6d4 100644
--- a/nbs/models.patchtst.ipynb
+++ b/nbs/models.patchtst.ipynb
@@ -662,10 +662,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -719,10 +715,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(PatchTST, self).__init__(h=h,\n",
@@ -748,10 +740,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs) \n",
     "\n",
diff --git a/nbs/models.rmok.ipynb b/nbs/models.rmok.ipynb
index 017477c13..6245d0eb7 100644
--- a/nbs/models.rmok.ipynb
+++ b/nbs/models.rmok.ipynb
@@ -359,10 +359,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -401,10 +397,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,            \n",
     "                 **trainer_kwargs):\n",
     "        \n",
@@ -427,10 +419,6 @@
     "                                   random_seed=random_seed,\n",
     "                                   num_workers_loader=num_workers_loader,\n",
     "                                   drop_last_loader=drop_last_loader,\n",
-    "                                   optimizer=optimizer,\n",
-    "                                   optimizer_kwargs=optimizer_kwargs,\n",
-    "                                   lr_scheduler=lr_scheduler,\n",
-    "                                   lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                   dataloader_kwargs=dataloader_kwargs,\n",
     "                                   **trainer_kwargs)\n",
     "        \n",
diff --git a/nbs/models.rnn.ipynb b/nbs/models.rnn.ipynb
index f5e1a67b9..bd856c014 100644
--- a/nbs/models.rnn.ipynb
+++ b/nbs/models.rnn.ipynb
@@ -125,10 +125,6 @@
     "    `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.<br>\n",
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
     "\n",
@@ -168,10 +164,6 @@
     "                 random_seed=1,\n",
     "                 num_workers_loader=0,\n",
     "                 drop_last_loader=False,\n",
-    "                 optimizer=None,\n",
-    "                 optimizer_kwargs=None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,  \n",
     "                 dataloader_kwargs = None,               \n",
     "                 **trainer_kwargs):\n",
     "        super(RNN, self).__init__(\n",
@@ -194,10 +186,6 @@
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
     "            random_seed=random_seed,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )\n",
diff --git a/nbs/models.softs.ipynb b/nbs/models.softs.ipynb
index 978f3c2c2..05d30886f 100644
--- a/nbs/models.softs.ipynb
+++ b/nbs/models.softs.ipynb
@@ -200,10 +200,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "    \n",
@@ -243,10 +239,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None, \n",
     "                 dataloader_kwargs = None,           \n",
     "                 **trainer_kwargs):\n",
     "        \n",
@@ -269,10 +261,6 @@
     "                                    random_seed=random_seed,\n",
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "        \n",
diff --git a/nbs/models.stemgnn.ipynb b/nbs/models.stemgnn.ipynb
index b2222fc1c..54aad7471 100644
--- a/nbs/models.stemgnn.ipynb
+++ b/nbs/models.stemgnn.ipynb
@@ -204,10 +204,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -241,10 +237,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader = 0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -268,10 +260,6 @@
     "                                      num_workers_loader=num_workers_loader,\n",
     "                                      drop_last_loader=drop_last_loader,\n",
     "                                      random_seed=random_seed,\n",
-    "                                      optimizer=optimizer,\n",
-    "                                      optimizer_kwargs=optimizer_kwargs,\n",
-    "                                      lr_scheduler=lr_scheduler,\n",
-    "                                      lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                      dataloader_kwargs=dataloader_kwargs,\n",
     "                                      **trainer_kwargs)\n",
     "        # Quick fix for now, fix the model later.\n",
diff --git a/nbs/models.tcn.ipynb b/nbs/models.tcn.ipynb
index dee324513..25b6085de 100644
--- a/nbs/models.tcn.ipynb
+++ b/nbs/models.tcn.ipynb
@@ -126,10 +126,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "    \"\"\"\n",
@@ -166,10 +162,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader = 0,\n",
     "                 drop_last_loader = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None, \n",
     "                 dataloader_kwargs = None,                \n",
     "                 **trainer_kwargs):\n",
     "        super(TCN, self).__init__(\n",
@@ -192,10 +184,6 @@
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
     "            random_seed=random_seed,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs = dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )\n",
diff --git a/nbs/models.tft.ipynb b/nbs/models.tft.ipynb
index bae287acf..6ded2b3bb 100644
--- a/nbs/models.tft.ipynb
+++ b/nbs/models.tft.ipynb
@@ -696,10 +696,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -744,10 +740,6 @@
     "        num_workers_loader=0,\n",
     "        drop_last_loader=False,\n",
     "        random_seed: int = 1,\n",
-    "        optimizer=None,\n",
-    "        optimizer_kwargs=None,\n",
-    "        lr_scheduler=None,\n",
-    "        lr_scheduler_kwargs=None,\n",
     "        dataloader_kwargs = None,\n",
     "        **trainer_kwargs,\n",
     "    ):\n",
@@ -776,10 +768,6 @@
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
     "            random_seed=random_seed,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs,\n",
     "        )\n",
diff --git a/nbs/models.tide.ipynb b/nbs/models.tide.ipynb
index 6a16d2b2b..f635beec0 100644
--- a/nbs/models.tide.ipynb
+++ b/nbs/models.tide.ipynb
@@ -167,10 +167,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -216,10 +212,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -248,10 +240,6 @@
     "            random_seed=random_seed,\n",
     "            num_workers_loader=num_workers_loader,\n",
     "            drop_last_loader=drop_last_loader,\n",
-    "            optimizer=optimizer,\n",
-    "            optimizer_kwargs=optimizer_kwargs,\n",
-    "            lr_scheduler=lr_scheduler,\n",
-    "            lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "            dataloader_kwargs=dataloader_kwargs,\n",
     "            **trainer_kwargs\n",
     "        )   \n",
diff --git a/nbs/models.timellm.ipynb b/nbs/models.timellm.ipynb
index 67f4a03d1..a05c33156 100755
--- a/nbs/models.timellm.ipynb
+++ b/nbs/models.timellm.ipynb
@@ -291,10 +291,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>    \n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -348,10 +344,6 @@
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
     "                 random_seed: int = 1,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(TimeLLM, self).__init__(h=h,\n",
@@ -376,10 +368,6 @@
     "                                      num_workers_loader=num_workers_loader,\n",
     "                                      drop_last_loader=drop_last_loader,\n",
     "                                      random_seed=random_seed,\n",
-    "                                      optimizer=optimizer,\n",
-    "                                      optimizer_kwargs=optimizer_kwargs,\n",
-    "                                      lr_scheduler=lr_scheduler,\n",
-    "                                      lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                      dataloader_kwargs=dataloader_kwargs,\n",
     "                                      **trainer_kwargs)\n",
     "        \n",
diff --git a/nbs/models.timemixer.ipynb b/nbs/models.timemixer.ipynb
index 9bfdd9cc5..207d44b29 100644
--- a/nbs/models.timemixer.ipynb
+++ b/nbs/models.timemixer.ipynb
@@ -360,10 +360,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -410,10 +406,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,    \n",
     "                 dataloader_kwargs = None,        \n",
     "                 **trainer_kwargs):\n",
     "        \n",
@@ -436,10 +428,6 @@
     "                                    random_seed=random_seed,\n",
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "        \n",
diff --git a/nbs/models.timesnet.ipynb b/nbs/models.timesnet.ipynb
index 37e5d46e4..98eefe2f6 100644
--- a/nbs/models.timesnet.ipynb
+++ b/nbs/models.timesnet.ipynb
@@ -263,12 +263,6 @@
     "        Workers to be used by `TimeSeriesDataLoader`.\n",
     "    drop_last_loader : bool (default=False)\n",
     "        If True `TimeSeriesDataLoader` drops last non-full batch.\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)\n",
-    "        User specified optimizer instead of the default choice (Adam).\n",
-    "    `optimizer_kwargs`: dict, optional (defualt=None)\n",
-    "        List of parameters used by the user specified `optimizer`.\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>        \n",
     "    `dataloader_kwargs`: dict, optional (default=None)\n",
     "        List of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    **trainer_kwargs\n",
@@ -314,10 +308,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,       \n",
     "                 dataloader_kwargs = None,          \n",
     "                 **trainer_kwargs):\n",
     "        super(TimesNet, self).__init__(h=h,\n",
@@ -343,11 +333,7 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,  \n",
-    "                                       dataloader_kwargs=dataloader_kwargs,                                    \n",
+    "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs)\n",
     "\n",
     "        # Architecture\n",
diff --git a/nbs/models.tsmixer.ipynb b/nbs/models.tsmixer.ipynb
index 94a9e4125..c255c233c 100644
--- a/nbs/models.tsmixer.ipynb
+++ b/nbs/models.tsmixer.ipynb
@@ -250,10 +250,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -291,10 +287,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -318,10 +310,6 @@
     "                                    random_seed=random_seed,\n",
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "\n",
diff --git a/nbs/models.tsmixerx.ipynb b/nbs/models.tsmixerx.ipynb
index cb0ba72b6..d1f220823 100644
--- a/nbs/models.tsmixerx.ipynb
+++ b/nbs/models.tsmixerx.ipynb
@@ -274,10 +274,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>    \n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>    \n",
     "\n",
@@ -315,10 +311,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "\n",
@@ -342,10 +334,6 @@
     "                                    random_seed=random_seed,\n",
     "                                    num_workers_loader=num_workers_loader,\n",
     "                                    drop_last_loader=drop_last_loader,\n",
-    "                                    optimizer=optimizer,\n",
-    "                                    optimizer_kwargs=optimizer_kwargs,\n",
-    "                                    lr_scheduler=lr_scheduler,\n",
-    "                                    lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                    dataloader_kwargs=dataloader_kwargs,\n",
     "                                    **trainer_kwargs)\n",
     "        # Reversible InstanceNormalization layer\n",
diff --git a/nbs/models.vanillatransformer.ipynb b/nbs/models.vanillatransformer.ipynb
index b76cc9ba2..56cb5e33b 100644
--- a/nbs/models.vanillatransformer.ipynb
+++ b/nbs/models.vanillatransformer.ipynb
@@ -198,10 +198,6 @@
     "    `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>\n",
     "    `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>\n",
     "    `alias`: str, optional,  Custom name of the model.<br>\n",
-    "    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>\n",
-    "    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
-    "    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
-    "    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
     "    `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
     "    `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
     "\n",
@@ -245,10 +241,6 @@
     "                 random_seed: int = 1,\n",
     "                 num_workers_loader: int = 0,\n",
     "                 drop_last_loader: bool = False,\n",
-    "                 optimizer = None,\n",
-    "                 optimizer_kwargs = None,\n",
-    "                 lr_scheduler = None,\n",
-    "                 lr_scheduler_kwargs = None,\n",
     "                 dataloader_kwargs = None,\n",
     "                 **trainer_kwargs):\n",
     "        super(VanillaTransformer, self).__init__(h=h,\n",
@@ -273,10 +265,6 @@
     "                                       num_workers_loader=num_workers_loader,\n",
     "                                       drop_last_loader=drop_last_loader,\n",
     "                                       random_seed=random_seed,\n",
-    "                                       optimizer=optimizer,\n",
-    "                                       optimizer_kwargs=optimizer_kwargs,\n",
-    "                                       lr_scheduler=lr_scheduler,\n",
-    "                                       lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
     "                                       dataloader_kwargs=dataloader_kwargs,\n",
     "                                       **trainer_kwargs)\n",
     "\n",
diff --git a/neuralforecast/common/_base_model.py b/neuralforecast/common/_base_model.py
index 59f7d4a14..e6e7db0b6 100644
--- a/neuralforecast/common/_base_model.py
+++ b/neuralforecast/common/_base_model.py
@@ -8,7 +8,6 @@
 import random
 import warnings
 from contextlib import contextmanager
-from copy import deepcopy
 from dataclasses import dataclass
 
 import fsspec
@@ -72,10 +71,6 @@ def __init__(
         random_seed,
         loss,
         valid_loss,
-        optimizer,
-        optimizer_kwargs,
-        lr_scheduler,
-        lr_scheduler_kwargs,
         futr_exog_list,
         hist_exog_list,
         stat_exog_list,
@@ -101,26 +96,6 @@ def __init__(
         self.train_trajectories = []
         self.valid_trajectories = []
 
-        # Optimization
-        if optimizer is not None and not issubclass(optimizer, torch.optim.Optimizer):
-            raise TypeError(
-                "optimizer is not a valid subclass of torch.optim.Optimizer"
-            )
-        self.optimizer = optimizer
-        self.optimizer_kwargs = optimizer_kwargs if optimizer_kwargs is not None else {}
-
-        # lr scheduler
-        if lr_scheduler is not None and not issubclass(
-            lr_scheduler, torch.optim.lr_scheduler.LRScheduler
-        ):
-            raise TypeError(
-                "lr_scheduler is not a valid subclass of torch.optim.lr_scheduler.LRScheduler"
-            )
-        self.lr_scheduler = lr_scheduler
-        self.lr_scheduler_kwargs = (
-            lr_scheduler_kwargs if lr_scheduler_kwargs is not None else {}
-        )
-
         # customized by set_configure_optimizers()
         self.config_optimizers = None
 
@@ -389,47 +364,19 @@ def on_fit_start(self):
 
     def configure_optimizers(self):
         if self.config_optimizers is not None:
+            # return the customized optimizer settings if specified
             return self.config_optimizers
 
-        if self.optimizer:
-            optimizer_signature = inspect.signature(self.optimizer)
-            optimizer_kwargs = deepcopy(self.optimizer_kwargs)
-            if "lr" in optimizer_signature.parameters:
-                if "lr" in optimizer_kwargs:
-                    warnings.warn(
-                        "ignoring learning rate passed in optimizer_kwargs, using the model's learning rate"
-                    )
-                optimizer_kwargs["lr"] = self.learning_rate
-            optimizer = self.optimizer(params=self.parameters(), **optimizer_kwargs)
-        else:
-            if self.optimizer_kwargs:
-                warnings.warn(
-                    "ignoring optimizer_kwargs as the optimizer is not specified"
-                )
-            optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)
-
-        lr_scheduler = {"frequency": 1, "interval": "step"}
-        if self.lr_scheduler:
-            lr_scheduler_signature = inspect.signature(self.lr_scheduler)
-            lr_scheduler_kwargs = deepcopy(self.lr_scheduler_kwargs)
-            if "optimizer" in lr_scheduler_signature.parameters:
-                if "optimizer" in lr_scheduler_kwargs:
-                    warnings.warn(
-                        "ignoring optimizer passed in lr_scheduler_kwargs, using the model's optimizer"
-                    )
-                    del lr_scheduler_kwargs["optimizer"]
-            lr_scheduler["scheduler"] = self.lr_scheduler(
-                optimizer=optimizer, **lr_scheduler_kwargs
-            )
-        else:
-            if self.lr_scheduler_kwargs:
-                warnings.warn(
-                    "ignoring lr_scheduler_kwargs as the lr_scheduler is not specified"
-                )
-            lr_scheduler["scheduler"] = torch.optim.lr_scheduler.StepLR(
+        # default choice
+        optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate)
+        scheduler = {
+            "scheduler": torch.optim.lr_scheduler.StepLR(
                 optimizer=optimizer, step_size=self.lr_decay_steps, gamma=0.5
-            )
-        return {"optimizer": optimizer, "lr_scheduler": lr_scheduler}
+            ),
+            "frequency": 1,
+            "interval": "step",
+        }
+        return {"optimizer": optimizer, "lr_scheduler": scheduler}
 
     def set_configure_optimizers(
         self,
diff --git a/neuralforecast/common/_base_multivariate.py b/neuralforecast/common/_base_multivariate.py
index 0fdc3b94d..5acdf75eb 100644
--- a/neuralforecast/common/_base_multivariate.py
+++ b/neuralforecast/common/_base_multivariate.py
@@ -50,10 +50,6 @@ def __init__(
         drop_last_loader=False,
         random_seed=1,
         alias=None,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -61,10 +57,6 @@ def __init__(
             random_seed=random_seed,
             loss=loss,
             valid_loss=valid_loss,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             futr_exog_list=futr_exog_list,
             hist_exog_list=hist_exog_list,
             stat_exog_list=stat_exog_list,
diff --git a/neuralforecast/common/_base_recurrent.py b/neuralforecast/common/_base_recurrent.py
index 0479996c1..604eaddb8 100644
--- a/neuralforecast/common/_base_recurrent.py
+++ b/neuralforecast/common/_base_recurrent.py
@@ -50,10 +50,6 @@ def __init__(
         drop_last_loader=False,
         random_seed=1,
         alias=None,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -61,10 +57,6 @@ def __init__(
             random_seed=random_seed,
             loss=loss,
             valid_loss=valid_loss,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             futr_exog_list=futr_exog_list,
             hist_exog_list=hist_exog_list,
             stat_exog_list=stat_exog_list,
diff --git a/neuralforecast/common/_base_windows.py b/neuralforecast/common/_base_windows.py
index dd4a4c869..f83936fcb 100644
--- a/neuralforecast/common/_base_windows.py
+++ b/neuralforecast/common/_base_windows.py
@@ -53,10 +53,6 @@ def __init__(
         drop_last_loader=False,
         random_seed=1,
         alias=None,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -64,10 +60,6 @@ def __init__(
             random_seed=random_seed,
             loss=loss,
             valid_loss=valid_loss,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             futr_exog_list=futr_exog_list,
             hist_exog_list=hist_exog_list,
             stat_exog_list=stat_exog_list,
diff --git a/neuralforecast/models/autoformer.py b/neuralforecast/models/autoformer.py
index 815e57bc2..ffa081907 100644
--- a/neuralforecast/models/autoformer.py
+++ b/neuralforecast/models/autoformer.py
@@ -442,10 +442,6 @@ class Autoformer(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -494,10 +490,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -525,10 +517,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/bitcn.py b/neuralforecast/models/bitcn.py
index 53a775838..ed48fa5e0 100644
--- a/neuralforecast/models/bitcn.py
+++ b/neuralforecast/models/bitcn.py
@@ -116,10 +116,6 @@ class BiTCN(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -161,10 +157,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -192,10 +184,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/deepar.py b/neuralforecast/models/deepar.py
index 3d2a2fd94..06e0860c2 100644
--- a/neuralforecast/models/deepar.py
+++ b/neuralforecast/models/deepar.py
@@ -87,10 +87,6 @@ class DeepAR(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -139,10 +135,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -188,10 +180,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/deepnpts.py b/neuralforecast/models/deepnpts.py
index f958e71be..8ba95a2f8 100644
--- a/neuralforecast/models/deepnpts.py
+++ b/neuralforecast/models/deepnpts.py
@@ -49,10 +49,6 @@ class DeepNPTS(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -96,10 +92,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -142,10 +134,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/dilated_rnn.py b/neuralforecast/models/dilated_rnn.py
index d56cc5f08..a56d3ed0c 100644
--- a/neuralforecast/models/dilated_rnn.py
+++ b/neuralforecast/models/dilated_rnn.py
@@ -317,10 +317,6 @@ class DilatedRNN(BaseRecurrent):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -359,10 +355,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -386,10 +378,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/dlinear.py b/neuralforecast/models/dlinear.py
index 17965c869..c0ba3773c 100644
--- a/neuralforecast/models/dlinear.py
+++ b/neuralforecast/models/dlinear.py
@@ -75,10 +75,6 @@ class DLinear(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -118,10 +114,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -149,10 +141,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/fedformer.py b/neuralforecast/models/fedformer.py
index 89e2fe3ef..2073fde45 100644
--- a/neuralforecast/models/fedformer.py
+++ b/neuralforecast/models/fedformer.py
@@ -440,10 +440,6 @@ class FEDformer(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -491,10 +487,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -521,10 +513,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/gru.py b/neuralforecast/models/gru.py
index 9a6d92325..da24a52e7 100644
--- a/neuralforecast/models/gru.py
+++ b/neuralforecast/models/gru.py
@@ -52,10 +52,6 @@ class GRU(BaseRecurrent):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -95,10 +91,6 @@ def __init__(
         random_seed=1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -122,10 +114,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/informer.py b/neuralforecast/models/informer.py
index 8b115cebd..82ad48f55 100644
--- a/neuralforecast/models/informer.py
+++ b/neuralforecast/models/informer.py
@@ -226,10 +226,6 @@ class Informer(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -278,10 +274,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -309,10 +301,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/itransformer.py b/neuralforecast/models/itransformer.py
index 9e577a71d..b651ca730 100644
--- a/neuralforecast/models/itransformer.py
+++ b/neuralforecast/models/itransformer.py
@@ -134,10 +134,6 @@ class iTransformer(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -180,10 +176,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -208,10 +200,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/kan.py b/neuralforecast/models/kan.py
index 29d7b1d00..74ea0b099 100644
--- a/neuralforecast/models/kan.py
+++ b/neuralforecast/models/kan.py
@@ -284,8 +284,6 @@ class KAN(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -334,8 +332,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -365,8 +361,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/lstm.py b/neuralforecast/models/lstm.py
index e89db3628..2f1e832e1 100644
--- a/neuralforecast/models/lstm.py
+++ b/neuralforecast/models/lstm.py
@@ -50,10 +50,6 @@ class LSTM(BaseRecurrent):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -92,10 +88,6 @@ def __init__(
         random_seed=1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -119,10 +111,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/mlp.py b/neuralforecast/models/mlp.py
index 0794ac7c3..40cc8ce31 100644
--- a/neuralforecast/models/mlp.py
+++ b/neuralforecast/models/mlp.py
@@ -49,10 +49,6 @@ class MLP(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -90,10 +86,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -123,10 +115,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/mlpmultivariate.py b/neuralforecast/models/mlpmultivariate.py
index 7554bb44d..b25e6d2e7 100644
--- a/neuralforecast/models/mlpmultivariate.py
+++ b/neuralforecast/models/mlpmultivariate.py
@@ -43,10 +43,6 @@ class MLPMultivariate(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -80,10 +76,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -109,10 +101,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/nbeats.py b/neuralforecast/models/nbeats.py
index 02280fb79..0957abffc 100644
--- a/neuralforecast/models/nbeats.py
+++ b/neuralforecast/models/nbeats.py
@@ -228,10 +228,6 @@ class NBEATS(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -275,10 +271,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -310,10 +302,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/nbeatsx.py b/neuralforecast/models/nbeatsx.py
index 811392a66..4fb461db2 100644
--- a/neuralforecast/models/nbeatsx.py
+++ b/neuralforecast/models/nbeatsx.py
@@ -315,10 +315,6 @@ class NBEATSx(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -366,10 +362,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -404,10 +396,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/nhits.py b/neuralforecast/models/nhits.py
index ce5caeaaa..1d1bb9dd1 100644
--- a/neuralforecast/models/nhits.py
+++ b/neuralforecast/models/nhits.py
@@ -226,10 +226,6 @@ class NHITS(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -279,10 +275,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -312,10 +304,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/nlinear.py b/neuralforecast/models/nlinear.py
index 4909ddbd3..3480fc48c 100644
--- a/neuralforecast/models/nlinear.py
+++ b/neuralforecast/models/nlinear.py
@@ -39,10 +39,6 @@ class NLinear(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -81,10 +77,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -112,10 +104,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/patchtst.py b/neuralforecast/models/patchtst.py
index 0b2029fd4..3d92a532d 100644
--- a/neuralforecast/models/patchtst.py
+++ b/neuralforecast/models/patchtst.py
@@ -836,10 +836,6 @@ class PatchTST(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -895,10 +891,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -926,10 +918,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/rmok.py b/neuralforecast/models/rmok.py
index 35db80aca..4061f36c8 100644
--- a/neuralforecast/models/rmok.py
+++ b/neuralforecast/models/rmok.py
@@ -284,10 +284,6 @@ class RMoK(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -327,10 +323,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -355,10 +347,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/rnn.py b/neuralforecast/models/rnn.py
index f5d60f42a..f950c5d99 100644
--- a/neuralforecast/models/rnn.py
+++ b/neuralforecast/models/rnn.py
@@ -50,10 +50,6 @@ class RNN(BaseRecurrent):
     `random_seed`: int=1, random_seed for pytorch initializer and numpy generators.<br>
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `alias`: str, optional,  Custom name of the model.<br>
 
@@ -95,10 +91,6 @@ def __init__(
         random_seed=1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -122,10 +114,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/softs.py b/neuralforecast/models/softs.py
index cb425200a..6112c3d80 100644
--- a/neuralforecast/models/softs.py
+++ b/neuralforecast/models/softs.py
@@ -109,10 +109,6 @@ class SOFTS(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -153,10 +149,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -181,10 +173,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/stemgnn.py b/neuralforecast/models/stemgnn.py
index 85a014e65..4fa2ccf40 100644
--- a/neuralforecast/models/stemgnn.py
+++ b/neuralforecast/models/stemgnn.py
@@ -169,10 +169,6 @@ class StemGNN(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -208,10 +204,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -237,10 +229,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/tcn.py b/neuralforecast/models/tcn.py
index fd900512c..fdbd1cdd1 100644
--- a/neuralforecast/models/tcn.py
+++ b/neuralforecast/models/tcn.py
@@ -47,10 +47,6 @@ class TCN(BaseRecurrent):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
     """
@@ -89,10 +85,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader=0,
         drop_last_loader=False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -116,10 +108,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/tft.py b/neuralforecast/models/tft.py
index f96d5646b..faadec9d5 100644
--- a/neuralforecast/models/tft.py
+++ b/neuralforecast/models/tft.py
@@ -457,10 +457,6 @@ class TFT(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -505,10 +501,6 @@ def __init__(
         num_workers_loader=0,
         drop_last_loader=False,
         random_seed: int = 1,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -537,10 +529,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/tide.py b/neuralforecast/models/tide.py
index ec98c2b13..257972570 100644
--- a/neuralforecast/models/tide.py
+++ b/neuralforecast/models/tide.py
@@ -81,10 +81,6 @@ class TiDE(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -132,10 +128,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -165,10 +157,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/timellm.py b/neuralforecast/models/timellm.py
index aa9276f72..93bd52c84 100644
--- a/neuralforecast/models/timellm.py
+++ b/neuralforecast/models/timellm.py
@@ -214,10 +214,6 @@ class TimeLLM(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -272,10 +268,6 @@ def __init__(
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
         random_seed: int = 1,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -302,10 +294,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/timemixer.py b/neuralforecast/models/timemixer.py
index 5585539bd..57e081ea5 100644
--- a/neuralforecast/models/timemixer.py
+++ b/neuralforecast/models/timemixer.py
@@ -285,10 +285,6 @@ class TimeMixer(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -336,10 +332,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -364,10 +356,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )
diff --git a/neuralforecast/models/timesnet.py b/neuralforecast/models/timesnet.py
index aab548382..87ed9ca56 100644
--- a/neuralforecast/models/timesnet.py
+++ b/neuralforecast/models/timesnet.py
@@ -182,12 +182,6 @@ class TimesNet(BaseWindows):
         Workers to be used by `TimeSeriesDataLoader`.
     drop_last_loader : bool (default=False)
         If True `TimeSeriesDataLoader` drops last non-full batch.
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional (default=None)
-        User specified optimizer instead of the default choice (Adam).
-    `optimizer_kwargs`: dict, optional (defualt=None)
-        List of parameters used by the user specified `optimizer`.
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional (default=None)
         List of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     **trainer_kwargs
@@ -235,10 +229,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -266,10 +256,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/tsmixer.py b/neuralforecast/models/tsmixer.py
index 0d68e1e4c..23a3e4b99 100644
--- a/neuralforecast/models/tsmixer.py
+++ b/neuralforecast/models/tsmixer.py
@@ -160,10 +160,6 @@ class TSMixer(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -203,10 +199,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -232,10 +224,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/tsmixerx.py b/neuralforecast/models/tsmixerx.py
index 24897d442..b8fed092f 100644
--- a/neuralforecast/models/tsmixerx.py
+++ b/neuralforecast/models/tsmixerx.py
@@ -188,10 +188,6 @@ class TSMixerx(BaseMultivariate):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -231,10 +227,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs
     ):
@@ -260,10 +252,6 @@ def __init__(
             random_seed=random_seed,
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs
         )
diff --git a/neuralforecast/models/vanillatransformer.py b/neuralforecast/models/vanillatransformer.py
index 69fcc9c4d..c41eec20b 100644
--- a/neuralforecast/models/vanillatransformer.py
+++ b/neuralforecast/models/vanillatransformer.py
@@ -117,10 +117,6 @@ class VanillaTransformer(BaseWindows):
     `num_workers_loader`: int=os.cpu_count(), workers to be used by `TimeSeriesDataLoader`.<br>
     `drop_last_loader`: bool=False, if True `TimeSeriesDataLoader` drops last non-full batch.<br>
     `alias`: str, optional,  Custom name of the model.<br>
-    `optimizer`: Subclass of 'torch.optim.Optimizer', optional, user specified optimizer instead of the default choice (Adam).<br>
-    `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>
-    `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>
-    `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>
     `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>
     `**trainer_kwargs`: int,  keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>
 
@@ -166,10 +162,6 @@ def __init__(
         random_seed: int = 1,
         num_workers_loader: int = 0,
         drop_last_loader: bool = False,
-        optimizer=None,
-        optimizer_kwargs=None,
-        lr_scheduler=None,
-        lr_scheduler_kwargs=None,
         dataloader_kwargs=None,
         **trainer_kwargs,
     ):
@@ -196,10 +188,6 @@ def __init__(
             num_workers_loader=num_workers_loader,
             drop_last_loader=drop_last_loader,
             random_seed=random_seed,
-            optimizer=optimizer,
-            optimizer_kwargs=optimizer_kwargs,
-            lr_scheduler=lr_scheduler,
-            lr_scheduler_kwargs=lr_scheduler_kwargs,
             dataloader_kwargs=dataloader_kwargs,
             **trainer_kwargs,
         )