diff --git a/flaml/automl.py b/flaml/automl.py index ae74ec9ad3..6b43bf125b 100644 --- a/flaml/automl.py +++ b/flaml/automl.py @@ -402,7 +402,7 @@ def _validate_data(self, X_train_all, y_train_all, dataframe, label, self._X_train_all, self._y_train_all = \ self._transformer.fit_transform(X, y, self._state.task) self._label_transformer = self._transformer.label_transformer - + self._sample_weight_full = self._state.fit_kwargs.get('sample_weight') if X_val is not None and y_val is not None: if not (isinstance(X_val, np.ndarray) or issparse(X_val) or @@ -446,7 +446,8 @@ def _prepare_data(self, self._X_train_all, self._y_train_all if issparse(X_train_all): X_train_all = X_train_all.tocsr() - if self._state.task != 'regression': + if self._state.task != 'regression' and self._state.fit_kwargs.get( + 'sample_weight') is None: # logger.info(f"label {pd.unique(y_train_all)}") label_set, counts = np.unique(y_train_all, return_counts=True) # augment rare classes @@ -1093,8 +1094,9 @@ def _search(self): self._state.best_loss)) else: logger.info(f"no enough budget for learner {estimator}") - self.estimator_list.remove(estimator) - self._estimator_index -= 1 + if self._estimator_index is not None: + self.estimator_list.remove(estimator) + self._estimator_index -= 1 if self._retrain_full and best_config_sig and not better and ( self._search_states[self._best_estimator].sample_size == self._state.data_size) and (est_retrain_time <= @@ -1151,7 +1153,11 @@ def _search(self): stacker = Stacker(estimators, best_m, n_jobs=self._state.n_jobs, passthrough=True) - stacker.fit(self._X_train_all, self._y_train_all) + if self._sample_weight_full is not None: + self._state.fit_kwargs[ + 'sample_weight'] = self._sample_weight_full + stacker.fit(self._X_train_all, self._y_train_all, + **self._state.fit_kwargs) logger.info(f'ensemble: {stacker}') self._trained_estimator = stacker self._trained_estimator.model = stacker diff --git a/flaml/searcher/flow2.py b/flaml/searcher/flow2.py index a4f244b96a..912e0961c9 100644 --- a/flaml/searcher/flow2.py +++ b/flaml/searcher/flow2.py @@ -121,8 +121,8 @@ def _init_search(self): self._unordered_cat_hp = {} self._cat_hp_cost = {} for key, domain in self.space.items(): - assert not isinstance(domain, dict), \ - key+"'s domain is grid search which is not supported in FLOW2." + assert not (isinstance(domain, dict) and 'grid_search' in domain + ), key+"'s domain is grid search which is not supported in FLOW2." if callable(getattr(domain, 'get_sampler', None)): self._tunable_keys.append(key) sampler = domain.get_sampler() diff --git a/flaml/tune/README.md b/flaml/tune/README.md index c1a5b21c55..f7e4773a77 100644 --- a/flaml/tune/README.md +++ b/flaml/tune/README.md @@ -6,6 +6,7 @@ The API is compatible with ray tune. Example: ```python +# require: pip install flaml[blendsearch] from flaml import tune import time @@ -42,6 +43,7 @@ print(analysis.best_config) # the best config Or, using ray tune's API: ```python +# require: pip install flaml[blendsearch] ray[tune] from ray import tune as raytune from flaml import CFO, BlendSearch import time @@ -146,6 +148,7 @@ based on optimism in face of uncertainty. Example: ```python +# require: pip install flaml[blendsearch] from flaml import BlendSearch tune.run(... search_alg = BlendSearch(points_to_evaluate=[init_config]), diff --git a/flaml/version.py b/flaml/version.py index d31c31eaeb..788da1fb31 100644 --- a/flaml/version.py +++ b/flaml/version.py @@ -1 +1 @@ -__version__ = "0.2.3" +__version__ = "0.2.4" diff --git a/notebook/finetune_transformer_demo.ipynb b/notebook/finetune_transformer_demo.ipynb index 2e88890c5d..a7254516e5 100644 --- a/notebook/finetune_transformer_demo.ipynb +++ b/notebook/finetune_transformer_demo.ipynb @@ -6,11 +6,16 @@ "source": [ "This notebook uses the Huggingface transformers library to finetune a transformer model.\n", "\n", - "**Requirements.** This notebook has additional requirements:\n", - "\n", - "```bash\n", - "pip install -r transformers_requirements.txt\n", - "```" + "**Requirements.** This notebook has additional requirements:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install torch transformers datasets ipywidgets" ] }, { @@ -699,7 +704,7 @@ "source": [ "### Step 3. Launch with `flaml.tune.run`\n", "\n", - "We are now ready to laungh the tuning using `flaml.tune.run`:" + "We are now ready to launch the tuning using `flaml.tune.run`:" ], "cell_type": "markdown", "metadata": {} @@ -766,9 +771,13 @@ ], "metadata": { "kernelspec": { - "display_name": "flaml", - "language": "python", - "name": "flaml" + "name": "python3", + "display_name": "Python 3.7.7 64-bit ('flaml': conda)", + "metadata": { + "interpreter": { + "hash": "bfcd9a6a9254a5e160761a1fd7a9e444f011592c6770d9f4180dde058a9df5dd" + } + } }, "language_info": { "codemirror_mode": { @@ -780,7 +789,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.7.7-final" } }, "nbformat": 4, diff --git a/notebook/flaml_azureml.ipynb b/notebook/flaml_azureml.ipynb index cd176c3868..57754adb41 100644 --- a/notebook/flaml_azureml.ipynb +++ b/notebook/flaml_azureml.ipynb @@ -35,7 +35,16 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install flaml[notebook,azureml]" + ] + }, + { + "cell_type": "code", + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -62,7 +71,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 3, "metadata": { "slideshow": { "slide_type": "subslide" @@ -72,9 +81,10 @@ "outputs": [ { "output_type": "stream", - "name": "stdout", + "name": "stderr", "text": [ - "load dataset from ./openml_ds1169.pkl\n", + "DEBUG - Data pickle file already exists and is up to date.\n", + "download dataset from openml\n", "Dataset name: airlines\n", "X_train.shape: (404537, 7), y_train.shape: (404537,);\n", "X_test.shape: (134846, 7), y_test.shape: (134846,)\n" @@ -100,7 +110,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 4, "metadata": { "slideshow": { "slide_type": "slide" @@ -115,7 +125,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 5, "metadata": { "slideshow": { "slide_type": "slide" @@ -135,7 +145,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 6, "metadata": { "slideshow": { "slide_type": "slide" @@ -147,64 +157,122 @@ "output_type": "stream", "name": "stderr", "text": [ - "[flaml.automl: 02-05 13:30:29] {820} INFO - Evaluation method: holdout\n", - "[flaml.automl: 02-05 13:30:29] {545} INFO - Using StratifiedKFold\n", - "[flaml.automl: 02-05 13:30:29] {841} INFO - Minimizing error metric: 1-accuracy\n", - "[flaml.automl: 02-05 13:30:29] {861} INFO - List of ML learners in AutoML Run: ['lgbm', 'rf', 'xgboost']\n", - "[flaml.automl: 02-05 13:30:29] {920} INFO - iteration 0 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:32] {1074} INFO - at 0.9s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", - "[flaml.automl: 02-05 13:30:32] {920} INFO - iteration 1 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:32] {1074} INFO - at 3.3s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", - "[flaml.automl: 02-05 13:30:32] {920} INFO - iteration 2 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:35] {1074} INFO - at 3.8s,\tbest lgbm's error=0.3751,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 02-05 13:30:35] {920} INFO - iteration 3 current learner xgboost\n", - "[flaml.automl: 02-05 13:30:35] {1074} INFO - at 6.6s,\tbest xgboost's error=0.3753,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 02-05 13:30:35] {920} INFO - iteration 4 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:36] {1074} INFO - at 7.3s,\tbest lgbm's error=0.3751,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 02-05 13:30:36] {920} INFO - iteration 5 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:39] {1074} INFO - at 8.6s,\tbest lgbm's error=0.3558,\tbest lgbm's error=0.3558\n", - "[flaml.automl: 02-05 13:30:39] {920} INFO - iteration 6 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:42] {1074} INFO - at 11.4s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", - "[flaml.automl: 02-05 13:30:42] {920} INFO - iteration 7 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:43] {1074} INFO - at 14.1s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", - "[flaml.automl: 02-05 13:30:43] {920} INFO - iteration 8 current learner lgbm\n", - "[flaml.automl: 02-05 13:30:46] {1074} INFO - at 15.1s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:30:46] {920} INFO - iteration 9 current learner xgboost\n", - "[flaml.automl: 02-05 13:30:46] {1074} INFO - at 17.6s,\tbest xgboost's error=0.3753,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:30:46] {920} INFO - iteration 10 current learner xgboost\n", - "[flaml.automl: 02-05 13:30:47] {1074} INFO - at 18.5s,\tbest xgboost's error=0.3753,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:30:47] {920} INFO - iteration 11 current learner rf\n", - "[flaml.automl: 02-05 13:30:52] {1074} INFO - at 22.8s,\tbest rf's error=0.3861,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:30:52] {920} INFO - iteration 12 current learner rf\n", - "[flaml.automl: 02-05 13:30:55] {1074} INFO - at 26.7s,\tbest rf's error=0.3861,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:30:55] {920} INFO - iteration 13 current learner rf\n", - "[flaml.automl: 02-05 13:31:01] {1074} INFO - at 32.0s,\tbest rf's error=0.3861,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:31:01] {920} INFO - iteration 14 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:02] {1074} INFO - at 32.9s,\tbest xgboost's error=0.3750,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:31:02] {920} INFO - iteration 15 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:03] {1074} INFO - at 34.2s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:31:03] {920} INFO - iteration 16 current learner rf\n", - "[flaml.automl: 02-05 13:31:09] {1074} INFO - at 40.2s,\tbest rf's error=0.3861,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:31:09] {920} INFO - iteration 17 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:10] {1074} INFO - at 41.4s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:31:10] {920} INFO - iteration 18 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:12] {1074} INFO - at 42.8s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 02-05 13:31:12] {920} INFO - iteration 19 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:15] {1074} INFO - at 44.4s,\tbest lgbm's error=0.3412,\tbest lgbm's error=0.3412\n", - "[flaml.automl: 02-05 13:31:15] {920} INFO - iteration 20 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:19] {1074} INFO - at 48.1s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", - "[flaml.automl: 02-05 13:31:19] {920} INFO - iteration 21 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:19] {1074} INFO - at 50.5s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", - "[flaml.automl: 02-05 13:31:19] {920} INFO - iteration 22 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:25] {1074} INFO - at 54.7s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", - "[flaml.automl: 02-05 13:31:25] {920} INFO - iteration 23 current learner rf\n", - "[flaml.automl: 02-05 13:31:29] {1074} INFO - at 60.2s,\tbest rf's error=0.3861,\tbest lgbm's error=0.3311\n", - "[flaml.automl: 02-05 13:31:29] {1114} INFO - selected model: LGBMClassifier(colsample_bytree=0.9997863921359742,\n", + "[flaml.automl: 02-17 13:46:26] {840} INFO - Evaluation method: holdout\n", + "INFO - Evaluation method: holdout\n", + "[flaml.automl: 02-17 13:46:26] {565} INFO - Using StratifiedKFold\n", + "INFO - Using StratifiedKFold\n", + "[flaml.automl: 02-17 13:46:26] {861} INFO - Minimizing error metric: 1-accuracy\n", + "INFO - Minimizing error metric: 1-accuracy\n", + "[flaml.automl: 02-17 13:46:26] {881} INFO - List of ML learners in AutoML Run: ['lgbm', 'rf', 'xgboost']\n", + "INFO - List of ML learners in AutoML Run: ['lgbm', 'rf', 'xgboost']\n", + "[flaml.automl: 02-17 13:46:26] {940} INFO - iteration 0 current learner lgbm\n", + "INFO - iteration 0 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:29] {1094} INFO - at 0.9s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", + "INFO - at 0.9s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", + "[flaml.automl: 02-17 13:46:29] {940} INFO - iteration 1 current learner lgbm\n", + "INFO - iteration 1 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:30] {1094} INFO - at 3.6s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", + "INFO - at 3.6s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", + "[flaml.automl: 02-17 13:46:30] {940} INFO - iteration 2 current learner lgbm\n", + "INFO - iteration 2 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:33] {1094} INFO - at 4.5s,\tbest lgbm's error=0.3751,\tbest lgbm's error=0.3751\n", + "INFO - at 4.5s,\tbest lgbm's error=0.3751,\tbest lgbm's error=0.3751\n", + "[flaml.automl: 02-17 13:46:33] {940} INFO - iteration 3 current learner xgboost\n", + "INFO - iteration 3 current learner xgboost\n", + "[flaml.automl: 02-17 13:46:37] {1094} INFO - at 8.8s,\tbest xgboost's error=0.3750,\tbest xgboost's error=0.3750\n", + "INFO - at 8.8s,\tbest xgboost's error=0.3750,\tbest xgboost's error=0.3750\n", + "[flaml.automl: 02-17 13:46:37] {940} INFO - iteration 4 current learner rf\n", + "INFO - iteration 4 current learner rf\n", + "[flaml.automl: 02-17 13:46:42] {1094} INFO - at 16.1s,\tbest rf's error=0.3877,\tbest xgboost's error=0.3750\n", + "INFO - at 16.1s,\tbest rf's error=0.3877,\tbest xgboost's error=0.3750\n", + "[flaml.automl: 02-17 13:46:42] {940} INFO - iteration 5 current learner lgbm\n", + "INFO - iteration 5 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:43] {1094} INFO - at 16.8s,\tbest lgbm's error=0.3751,\tbest xgboost's error=0.3750\n", + "INFO - at 16.8s,\tbest lgbm's error=0.3751,\tbest xgboost's error=0.3750\n", + "[flaml.automl: 02-17 13:46:43] {940} INFO - iteration 6 current learner lgbm\n", + "INFO - iteration 6 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:46] {1094} INFO - at 18.2s,\tbest lgbm's error=0.3558,\tbest lgbm's error=0.3558\n", + "INFO - at 18.2s,\tbest lgbm's error=0.3558,\tbest lgbm's error=0.3558\n", + "[flaml.automl: 02-17 13:46:46] {940} INFO - iteration 7 current learner lgbm\n", + "INFO - iteration 7 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:49] {1094} INFO - at 21.0s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", + "INFO - at 21.0s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", + "[flaml.automl: 02-17 13:46:49] {940} INFO - iteration 8 current learner lgbm\n", + "INFO - iteration 8 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:50] {1094} INFO - at 23.8s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", + "INFO - at 23.8s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", + "[flaml.automl: 02-17 13:46:50] {940} INFO - iteration 9 current learner lgbm\n", + "INFO - iteration 9 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:53] {1094} INFO - at 25.3s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "INFO - at 25.3s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:46:53] {940} INFO - iteration 10 current learner lgbm\n", + "INFO - iteration 10 current learner lgbm\n", + "[flaml.automl: 02-17 13:46:54] {1094} INFO - at 28.2s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "INFO - at 28.2s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:46:54] {940} INFO - iteration 11 current learner rf\n", + "INFO - iteration 11 current learner rf\n", + "[flaml.automl: 02-17 13:46:59] {1094} INFO - at 32.9s,\tbest rf's error=0.3847,\tbest lgbm's error=0.3470\n", + "INFO - at 32.9s,\tbest rf's error=0.3847,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:46:59] {940} INFO - iteration 12 current learner xgboost\n", + "INFO - iteration 12 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:00] {1094} INFO - at 33.6s,\tbest xgboost's error=0.3750,\tbest lgbm's error=0.3470\n", + "INFO - at 33.6s,\tbest xgboost's error=0.3750,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:47:00] {940} INFO - iteration 13 current learner lgbm\n", + "INFO - iteration 13 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:01] {1094} INFO - at 35.3s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "INFO - at 35.3s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:47:01] {940} INFO - iteration 14 current learner xgboost\n", + "INFO - iteration 14 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:02] {1094} INFO - at 36.1s,\tbest xgboost's error=0.3750,\tbest lgbm's error=0.3470\n", + "INFO - at 36.1s,\tbest xgboost's error=0.3750,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:47:02] {940} INFO - iteration 15 current learner lgbm\n", + "INFO - iteration 15 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:04] {1094} INFO - at 37.5s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "INFO - at 37.5s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:47:04] {940} INFO - iteration 16 current learner xgboost\n", + "INFO - iteration 16 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:04] {1094} INFO - at 38.1s,\tbest xgboost's error=0.3736,\tbest lgbm's error=0.3470\n", + "INFO - at 38.1s,\tbest xgboost's error=0.3736,\tbest lgbm's error=0.3470\n", + "[flaml.automl: 02-17 13:47:04] {940} INFO - iteration 17 current learner lgbm\n", + "INFO - iteration 17 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:08] {1094} INFO - at 39.7s,\tbest lgbm's error=0.3412,\tbest lgbm's error=0.3412\n", + "INFO - at 39.7s,\tbest lgbm's error=0.3412,\tbest lgbm's error=0.3412\n", + "[flaml.automl: 02-17 13:47:08] {940} INFO - iteration 18 current learner lgbm\n", + "INFO - iteration 18 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:12] {1094} INFO - at 43.6s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", + "INFO - at 43.6s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", + "[flaml.automl: 02-17 13:47:12] {940} INFO - iteration 19 current learner lgbm\n", + "INFO - iteration 19 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:12] {1094} INFO - at 46.0s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", + "INFO - at 46.0s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", + "[flaml.automl: 02-17 13:47:12] {940} INFO - iteration 20 current learner lgbm\n", + "INFO - iteration 20 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:19] {1094} INFO - at 50.8s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", + "INFO - at 50.8s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", + "[flaml.automl: 02-17 13:47:19] {940} INFO - iteration 21 current learner lgbm\n", + "INFO - iteration 21 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:22] {1094} INFO - at 56.2s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", + "INFO - at 56.2s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", + "[flaml.automl: 02-17 13:47:22] {940} INFO - iteration 22 current learner rf\n", + "INFO - iteration 22 current learner rf\n", + "[flaml.automl: 02-17 13:47:22] {1096} INFO - no enough budget for learner rf\n", + "INFO - no enough budget for learner rf\n", + "[flaml.automl: 02-17 13:47:22] {940} INFO - iteration 23 current learner xgboost\n", + "INFO - iteration 23 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:22] {1096} INFO - no enough budget for learner xgboost\n", + "INFO - no enough budget for learner xgboost\n", + "[flaml.automl: 02-17 13:47:22] {1135} INFO - selected model: LGBMClassifier(colsample_bytree=0.9997863921359742,\n", + " learning_rate=0.1564464373197609, max_bin=511,\n", + " min_child_weight=7.427173668000723, n_estimators=18,\n", + " num_leaves=1846, objective='binary',\n", + " reg_alpha=6.349231150788211e-09, reg_lambda=0.8927146483558472)\n", + "INFO - selected model: LGBMClassifier(colsample_bytree=0.9997863921359742,\n", " learning_rate=0.1564464373197609, max_bin=511,\n", " min_child_weight=7.427173668000723, n_estimators=18,\n", " num_leaves=1846, objective='binary',\n", " reg_alpha=6.349231150788211e-09, reg_lambda=0.8927146483558472)\n", - "[flaml.automl: 02-05 13:31:29] {875} INFO - fit succeeded\n" + "[flaml.automl: 02-17 13:47:22] {895} INFO - fit succeeded\n", + "INFO - fit succeeded\n" ] } ], @@ -228,7 +296,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 7, "metadata": { "slideshow": { "slide_type": "slide" @@ -240,7 +308,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}\nBest accuracy on validation data: 0.6689\nTraining duration of best run: 4.18 s\n" + "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}\nBest accuracy on validation data: 0.6689\nTraining duration of best run: 4.858 s\n" ] } ], @@ -254,7 +322,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 8, "metadata": { "slideshow": { "slide_type": "slide" @@ -273,7 +341,7 @@ ] }, "metadata": {}, - "execution_count": 22 + "execution_count": 8 } ], "source": [ @@ -282,7 +350,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 9, "metadata": { "slideshow": { "slide_type": "slide" @@ -298,7 +366,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 10, "metadata": { "slideshow": { "slide_type": "slide" @@ -324,7 +392,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 11, "metadata": { "slideshow": { "slide_type": "slide" @@ -336,7 +404,10 @@ "output_type": "stream", "name": "stdout", "text": [ - "accuracy = 0.6681918633107397\nroc_auc = 0.7208412179342409\nlog_loss = 0.6064652793713222\nf1 = 0.5838518559855651\n" + "accuracy = 0.6681918633107397\n", + "roc_auc = 0.7208412179342409\n", + "log_loss = 0.6064652793713222\n", + "f1 = 0.5838518559855651\n" ] } ], @@ -362,7 +433,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 12, "metadata": { "slideshow": { "slide_type": "subslide" @@ -374,7 +445,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 51.0, 'min_child_weight': 20.0, 'learning_rate': 0.8834537640176922, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.9837052481490312, 'reg_alpha': 4.482246955743696e-08, 'reg_lambda': 0.028657570201141073}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 51.0, 'min_child_weight': 20.0, 'learning_rate': 0.8834537640176922, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.9837052481490312, 'reg_alpha': 4.482246955743696e-08, 'reg_lambda': 0.028657570201141073}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 15.0, 'max_leaves': 165.0, 'min_child_weight': 11.09973081317571, 'learning_rate': 1.0, 'subsample': 0.9847553005974036, 'log_max_bin': 9.0, 'colsample_bytree': 0.9508927355861483, 'reg_alpha': 2.031936014930936e-06, 'reg_lambda': 0.00624701632609755}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 15.0, 'max_leaves': 165.0, 'min_child_weight': 11.09973081317571, 'learning_rate': 1.0, 'subsample': 0.9847553005974036, 'log_max_bin': 9.0, 'colsample_bytree': 0.9508927355861483, 'reg_alpha': 2.031936014930936e-06, 'reg_lambda': 0.00624701632609755}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 1073.0, 'min_child_weight': 5.630999649172112, 'learning_rate': 0.32864729892819683, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.99236562733598, 'reg_alpha': 1.978160373587824e-09, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 1073.0, 'min_child_weight': 5.630999649172112, 'learning_rate': 0.32864729892819683, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.99236562733598, 'reg_alpha': 1.978160373587824e-09, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}}\n" + "{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482}}\n{'Current Learner': 'xgboost', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}, 'Best Learner': 'xgboost', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'colsample_bylevel': 1.0, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 51.0, 'min_child_weight': 20.0, 'learning_rate': 0.8834537640176922, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.9837052481490312, 'reg_alpha': 4.482246955743696e-08, 'reg_lambda': 0.028657570201141073}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 51.0, 'min_child_weight': 20.0, 'learning_rate': 0.8834537640176922, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.9837052481490312, 'reg_alpha': 4.482246955743696e-08, 'reg_lambda': 0.028657570201141073}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 15.0, 'max_leaves': 165.0, 'min_child_weight': 11.09973081317571, 'learning_rate': 1.0, 'subsample': 0.9847553005974036, 'log_max_bin': 9.0, 'colsample_bytree': 0.9508927355861483, 'reg_alpha': 2.031936014930936e-06, 'reg_lambda': 0.00624701632609755}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 15.0, 'max_leaves': 165.0, 'min_child_weight': 11.09973081317571, 'learning_rate': 1.0, 'subsample': 0.9847553005974036, 'log_max_bin': 9.0, 'colsample_bytree': 0.9508927355861483, 'reg_alpha': 2.031936014930936e-06, 'reg_lambda': 0.00624701632609755}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 1073.0, 'min_child_weight': 5.630999649172112, 'learning_rate': 0.32864729892819683, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.99236562733598, 'reg_alpha': 1.978160373587824e-09, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 1073.0, 'min_child_weight': 5.630999649172112, 'learning_rate': 0.32864729892819683, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.99236562733598, 'reg_alpha': 1.978160373587824e-09, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}}\n" ] } ], @@ -389,7 +460,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 13, "metadata": { "slideshow": { "slide_type": "slide" @@ -400,8 +471,8 @@ "output_type": "display_data", "data": { "text/plain": "
", - "image/svg+xml": "\r\n\r\n\r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3deZxU5Zn3/8+XZmt2sEHZFFDADRUlLol7oqAxLtGo8cky5jcaZ7KYmJjRzGSbjM8k4ck60ThqjFlcogYVE5a4a4wbBKRBQBGVtVmEZmmapbuv3x91Gou2uimgq6ur6vt+vfpFn/vcp+o6InXVuVdFBGZmZk11yHcAZmbWPjlBmJlZRk4QZmaWkROEmZll5ARhZmYZOUGYmVlGThBme0HSKZIW5jsOs1xygrCCI+ltSR/JZwwR8VxEjM7V60saL+lZSZskrZH0jKTzc/V+Zpk4QZhlIKksj+99CfAA8DtgCLA/8G3gY3vxWpLkf+e2V/w/jhUNSR0k3SDpTUnvSrpfUr+08w9IqpK0Ifl2fkTaubsk/UrSFEk1wBnJk8rXJc1JrvmjpK5J/dMlLUu7vtm6yflvSFopaYWkf5YUkg7JcA8CfgJ8PyLuiIgNEdEQEc9ExFVJne9K+kPaNcOS1+uYHD8t6SZJzwNbgG9KmtHkfb4qaXLyexdJ/0/SEkmrJN0qqXwf/zqsCDhBWDH5MnAhcBowCFgP3Jx2fiowEhgA/AO4u8n1VwA3AT2BvyVllwITgOHAUcA/tfD+GetKmgBcB3wEOCSJrzmjgaHAgy3UycangatJ3cv/AKMljUw7fwVwT/L7D4FRwDFJfINJPbFYiXOCsGLyeeDfI2JZRGwDvgtc0vjNOiLujIhNaeeOltQ77fpHIuL55Bv71qTsFxGxIiLWAY+S+hBtTnN1LwV+ExHzImIL8L0WXmO/5M+VWd91Zncl71cXERuAR4BPAiSJ4lBgcvLEchXw1YhYFxGbgP8LXL6P729FwAnCislBwEOSqiVVA/OBemB/SWWSfpA0P20E3k6uqUi7fmmG16xK+30L0KOF92+u7qAmr53pfRq9m/w5sIU62Wj6HveQJAhSTw8PJ8mqP9ANmJn2321aUm4lzgnCislS4JyI6JP20zUilpP6ULyAVDNPb2BYco3Srs/V0sYrSXU2NxraQt2FpO7j4hbq1JD6UG90QIY6Te/lr0CFpGNIJYrG5qW1QC1wRNp/s94R0VIitBLhBGGFqpOkrmk/HYFbgZskHQQgqb+kC5L6PYFtpL6hdyPVjNJW7geulHSYpG600L4fqfX3rwO+JelKSb2SzveTJd2WVJsNnCrpwKSJ7MbdBRARdaT6NSYC/YDHkvIG4Hbgp5IGAEgaLGn8Xt+tFQ0nCCtUU0h98238+S7wc2Ay8FdJm4AXgROS+r8D3gGWA68l59pEREwFfgE8BSwCXkhObWum/oPAZcDngBXAKuC/SPUjEBGPAX8E5gAzgT9nGco9pJ6gHkgSRqN/S+J6MWl+e5xUZ7mVOHnDILO2JekwYC7QpckHtVm74icIszYg6SJJnSX1JTWs9FEnB2vvnCDM2sbngTXAm6RGVv1LfsMx2z03MZmZWUZ+gjAzs4w65juA1lRRURHDhg3LdxhmZgVj5syZayMi48TIokoQw4YNY8aMGbuvaGZmAEh6p7lzOW1ikjRB0kJJiyTd0Eyd0yXNljRP0jNJ2eikrPFno6Sv5DJWMzPbVc6eIJL19G8GzgKWAa9ImhwRr6XV6QPcAkyIiCWNMzkjYiHJQmfJ6ywHHspVrGZm9n65fII4HlgUEYsjYjtwH6m1cNJdAUyKiCUAEbE6w+t8GHgzIpp9DDIzs9aXywQxmF1XlFyWlKUbBfRNNjiZKekzGV7ncuDeHMVoZmbNyGUntTKUNZ100RE4jtRTQjnwgqQXI+J1AEmdgfNpYTEySVeT2hiFAw88sBXCNjMzyG2CWMauyxoPIbXwWNM6ayOiBqiR9CxwNPB6cv4c4B8Rsaq5N4mI24DbAMaNG+dZf2ZWMh6etZyJ0xeyorqWQX3KuX78aC4c27ShZu/lsonpFWCkpOHJk8DlpFbaTPcIcIqkjskyyCeQ2uSl0Sdx85KZ2fs8PGs5N06qZHl1LQEsr67lxkmVPDxreau9R84SRLIQ2ReB6aQ+9O+PiHmSrpF0TVJnPqndq+YALwN3RMRcgCRhnAVMylWMZmaFauL0hdTuqN+lrHZHPROnL2y198jpRLmImEJq3f70slubHE8ktYlJ02u38N7+vGZmlmZ5dW3G8hXNlO8Nr8VkZlZgVlTX0rks88f3oD7lrfY+ThBmZgVk2twqzvn5c0jQqWzXwaLlncq4fnzrbQZYVGsxmZkVq9rt9fzXX17j7peWMGZwb37xybG8urQ6p6OYnCDMzNq5BVUb+dI9s3hj9WY+f+oIvnb2aDp37MDwiu6tmhCacoIwM2unIoLfv/gO//WX+fTq2onffe54Th2VcWXunHCCMDNrh9bVbOcbD87h8fmrOGN0fyZ+4mgqenRp0xicIMzM2pm/v7mWr/5xNutrdvDt8w7nyg8NQ8q0elFuOUGYmbUTO+ob+Nnjr3PL028yvKI7v/7sBzhycO+8xeMEYWbWDixdt4Uv3TuL2UuruWzcUL5z/uF065zfj2gnCDOzPHtk9nL+46G5IPjlFWM576hB+Q4JcIIwM8ubmm11fGfyPB6cuYzjDurLzy8/hiF9u+U7rJ2cIMzM8qBy2Qa+fN8s3nm3hi9/eCRfPvMQOjazfEa+OEGYmbWhhobg1397ix9NX0BFjy7ce9WJnDCifa5L6gRhZtZGVm/aytfuf5Xn3ljLhCMO4AcXj6FPt875DqtZThBmZm3gqYWruf6BV9m0tY6bLjqSK44/MC9zG/aEE4SZWQ5tq6vnR9MW8uu/vcWhB/TknqtOZNT+PfMdVlacIMzMcuTNNZv58r2zmLdiI5896SBuPPcwunYqy3dYWXOCMDNrZRHBAzOW8Z3J8+jaqQO3f2YcZx2+f77D2mNOEGZmrWhD7Q7+/aFK/jxnJSeN2I+fXnYMB/Tumu+w9ooThJlZK5n5zjq+fO9sqjZu5frxo7nmtIMp69C+O6Jb4gRhZraP6huCW55axM+eeINBfbry4DUnMfbAvvkOa585QZiZ7YOVG2r5yn2zeemtdVxwzCC+f+GR9OraKd9htQonCDOzvTR9XhX/9qc5bK9r4MefOJqPHzu43c9t2BNOEGZmWXh41nImTl/IiupaBvbuyvCK7jz/5ruMGdybX3xyLMMruuc7xFbnBGFmthsPz1rOjZMqqd1RD8CKDVtZsWErZx46gFs/dRydO7avRfZaS3HelZlZK5o4feHO5JBuYdWmok0O4ARhZrZby6trM5avaKa8WDhBmJm14J13a+jYzFyGQX3K2ziatuUEYWbWjKcWruZj//M3OnfsQOcmm/mUdyrj+vGj8xRZ23CCMDNroqEh+MUTb/C5u15hSN9uTP/KqfzokqMY3KccAYP7lPPfHx/DhWMH5zvUnPIoJjOzNBtqd/C1+2fz+PzVfHzsYG66aAzlncsY2q9b0SeEppwgzMwSC6s2cc0fZrJ03Ra+d/4RfOakg4pq4tuecoIwMwP+PGcF33hwDt27dOS+q09k3LB++Q4p75wgzKyk1dU38MNpC7j9ubcYd1Bfbvk/xzKgV2Euz93actpJLWmCpIWSFkm6oZk6p0uaLWmepGfSyvtIelDSAknzJZ2Uy1jNrPSs3byNT//6ZW5/7i0+e9JB3HPViU4OaXL2BCGpDLgZOAtYBrwiaXJEvJZWpw9wCzAhIpZIGpD2Ej8HpkXEJZI6A91yFauZlZ7ZS6v5lz/MZF3Ndn78iaO5+Lgh+Q6p3cllE9PxwKKIWAwg6T7gAuC1tDpXAJMiYglARKxO6vYCTgX+KSnfDmzPYaxmVkLue3kJ335kHgN6deFP//JBjhzcO98htUu5bGIaDCxNO16WlKUbBfSV9LSkmZI+k5SPANYAv5E0S9IdkopvqUQza1Pb6uq5cdIcbphUyQkj+vHoF092cmhBLhNEprFh0eS4I3Ac8FFgPPAtSaOS8mOBX0XEWKAGaK4P42pJMyTNWLNmTasFb2bFZUV1LZf+74vc+/JSvnDGwdx15fH07d4532G1a7lsYloGDE07HgKsyFBnbUTUADWSngWOBp4DlkXES0m9B2kmQUTEbcBtAOPGjWuagMzM+Puba/nSPbPYVtfA/376OMYfcUC+QyoIuUwQrwAjJQ0HlgOXk+pzSPcI8EtJHYHOwAnATyOiStJSSaMjYiHwYXbtuzAza1bj5j7Lq2vp1bUjm7fVMaJ/D/7308dxcP8e+Q6vYOQsQUREnaQvAtOBMuDOiJgn6Zrk/K0RMV/SNGAO0ADcERFzk5f4EnB3MoJpMXBlrmI1s+LRdHOfjVvr6CD455OHOznsIUUUT6vMuHHjYsaMGfkOw8zaUENDsHLjVt5aU8PitZv54dQF1Gx//+Y+g/uU8/wNZ+YhwvZN0syIGJfpnGdSm1lBWF+zncVra1i8ZjNvra3Z5WdbXcNury/2zX1ywQnCzNqN2u31aR/8m1mclgSqt+zYWa9jB3Hgft0YUdGdU0ZWMLyiB8MrujOif3cuuvl5VmzY+r7XLvbNfXLBCcLM2lRdfQPL1tfy1tqaJAEkTwRrat73wT6wd1eGV3Tno2MG7kwAIyp6MKRvOR3LMo/S/8aEQ3fpg4DS2NwnF5wgzKzVRQRrNm3b5Qlg8ZpUMliybgs76t/r++zVtSMj+vfgxBH7JUkg9TQwrKIb3Trv+UdU454NE6cvZEV1LYP6lHP9+NElt5dDa3CCMLO9tnHrDt7eJQGkOorfWlOzS0dx544dGL5fd0YO6Mn4Iw7Y+TQwvKIHfbt1avU9Fy4cO9gJoRU4QZhZi7bV1bN03Zb3EsDORFDD2s3bdtaTYEjfcoZX9GDcQf2SBJD6GdS7nA4dSnfjnULlBGFm7xsq2pgE3lpbw7L1W2hIGw1f0aMzwyu68+FDBzA8SQIjKroztF83unYqy99NWKvbbYKQ1C8i1rVFMGaWW9kOFe3euYzh/btz9NA+XDh2MCOSJ4FhFd3pXd4pj3dgbSmbJ4iXJM0GfgNMjWKaWWdWhFpjqOiAnl1Kei9mS8kmQYwCPgJ8DvgfSX8E7oqI13MamZk1a1+Hig6v6MHQFoaKmkEWCSJ5YngMeEzSGcAfgH+V9CpwQ0S8kOMYzUrSvgwVHZ7WQbw3Q0XNILs+iP2ATwGfBlaRWkRvMnAM8AAwPJcBmhW7vRkqenYyVPTgHA4VNcvmq8ULwO+BCyNiWVr5DEm35iYss+LioaJWiLJJEKOb65iOiB+2cjxmBctDRa3YZJMg/irpExFRDSCpL3BfRIzPbWhm7VO2Q0W7dS5jeIWHilrhyiZB9G9MDgARsV7SgBzGZJZ3Hipqll2CqJd0YEQsAZB0EOC5EFbwWmOo6JC+5XTyUFErUtkkiH8H/ibpmeT4VODq3IVk1no8VNRs72UzD2KapGOBEwEBX42ItTmPzGwP7MtQ0RHJEtMeKmq2q2y/FtUDq4GuwOGSiIhncxeW2fvt7VDR95qEPFTUbE9kM1Hun4FrgSHAbFJPEi8A3v3bWt3eDBU989D+OzeZ8VBRs9aTzRPEtcAHgBcj4gxJhwLfy21YVuz2dKjoUUN6e6ioWRvLJkFsjYitkpDUJSIWSPLmrgbAw7OWN7u14x4NFe3XjRH9PVTUrD3JJkEsk9QHeJjUgn3rgRW5DcsKwcOzlu+yOfzy6lq+9sCr3PL0IjZvrfNQUbMCl80opouSX78r6SmgNzAtp1FZQbjpL/N3JodG9Q3BW2tr+NhRg3YZKjpsv+507+KhomaFpMV/sZI6AHMi4kiAiHimpfpW/CKCZ15fw23PLmZN2sihdHX1wU8uO6aNIzOz1tZigoiIBkmvps+kttK0va6BR19dwe3PLWZB1SYO6NWVXl07snFr3fvqDupTnocIzay1ZfPMPxCYJ+lloKaxMCLOz1lU1m5s2rqDe19ewp1/e5uqjVsZvX9P/t8njub8owcxpXLlLn0QAOWdyrh+vMcwmBWDbBKEh7SWoKoNW/nN829xz0tL2LStjpNG7Md/XzyG00f13zmqqHG0UnOjmMyssGXTSe1+hxKyoGojtz27mMmzV9AQwbljBvL5Uw9mzJDeGetfOHawE4JZkcpmJvUm3lu9tTPQCaiJiF65DMzaTkTwwuJ3ue3ZxTy9cA3lncr41IkH8f+dPJyh/brlOzwzy5NsniB6ph9LuhA4PmcRWc40ndR23Vmj6NSxA7c/u5jK5Ruo6NGZr589ik+deBB9unXOd7hmlmdqZjfRli+SXoyIE3MQzz4ZN25czJgxI99htEtNJ7VBamneAEZUdOeqU0dw0djBXsPIrMRImhkR4zKdy6aJ6eNphx2AcXjDoIIzcfrC901qC6Bf9848ft1pXuHUzN4nm1FMH0v7vQ54G7ggmxeXNAH4OVAG3BERP8hQ53TgZ6T6NtZGxGlJ+dvAJlJLjdc1l+EsOyuqazOWr6/Z7uRgZhll0wdx5d68sKQy4GbgLGAZ8IqkyRHxWlqdPsAtwISIWJJhr+szvDlR6xjUp5zlGZKEJ7WZWXN2u0KapN8mH+SNx30l3ZnFax8PLIqIxRGxHbiP9z95XAFMapylHRGrsw/d9sTnTh72vjJPajOzlmSzhOZREVHdeBAR64GxWVw3GFiadrwsKUs3Cugr6WlJMyV9Ju1cAH9NypvdA1vS1ZJmSJqxZs2aLMIqTa8u3UCnMnFAr64IGNynnP/++BjPYTCzZmXTB9FBUt8kMSCpX5bXZWrYbtq53RE4DvgwUA68kIyQeh34UESsSJqdHpO0INM2pxFxG3AbpEYxZRFXyZm9tJrJr67gS2cewtfO9hODmWUnmw/6HwN/l/QgqQ/4S4GbsrhuGTA07XgI799HYhmpjukaoEbSs8DRwOsRsQJSzU6SHiLVZOV9sPdQRHDTX16jokdnPn/awfkOx8wKyG6bmCLid8DFwCpgDfDxiPh9Fq/9CjBS0nBJnYHLgclN6jwCnCKpo6RuwAnAfEndJfUEkNQdOBuYm+1N2Xumz1vFK2+v56tnjaKH92Mwsz2QzTyIE4F5EfHL5LinpBMi4qWWrouIOklfBKaTGuZ6Z0TMk3RNcv7WiJgvaRowB2ggNRR2rqQRwEPJonAdgXsiwpsU7aHtdQ38YOp8Rg7owWXjhu7+AjOzNNl8pfwVcGzacU2GsowiYgowpUnZrU2OJwITm5QtJtXUZPvg7pfe4e13t/Cbf/oAHb2lp5ntoWw+NRRp63FERAPZJRbLow21O/j5E2/woUP24/TR/fMdjpkVoGwSxGJJX5bUKfm5Flic68Bs39zy1CI21O7gm+cetnP/BjOzPZFNgrgG+CCwnNSooxOAq3IZlO2bpeu28Jvn3+biY4dwxKDM+ziYme1ONkttrCY1AgkASeXAecADOYzL9sGPpi+kQwf4uuc8mNk+yKrnUlKZpHMk/Q54C7gst2HZ3pq1ZD2PvrqCq08ZwQG9u+Y7HDMrYC0+QUg6ldR6SR8FXgY+BIyIiC1tEJvtodSkuPlU9OjC1Z4UZ2b7qNknCEnLgB8AzwOHR8TFQK2TQ/s1fV4VM95Zz3WeFGdmraClJqY/kVpc7zLgY8mMZq911E6lJsUtYNT+Pbh03JB8h2NmRaDZBBER1wLDgJ8AZwCvA/0lXSqpR9uEZ9n6w4upSXE3nnuYJ8WZWato8ZMkUp6MiKtIJYsrgAtJ7Spn7cSGLTv4xZNvcPIhFZw+ypPizKx1ZN1QHRE7gEeBR5OhrtZO3Py0J8WZWevbq7aIiMi8wbG1uaXrtnDX829zybFDOHxQr3yHY2ZFxI3VBe6H0xZQ1kHeCMjMWp0TRAH7x5L1/HnOSq461ZPizKz1ZbMfxCjgeuCg9PoRcWYO47LdiAj+bzIp7vOnjsh3OGZWhLLppH4AuBW4HajPbTiWrWlzU5Pi/vvjY+juSXFmlgPZfLLURcSvch6J7dbDs5YzcfpCllfXUtZBDOzdlUu9U5yZ5Ug2fRCPSvpXSQMl9Wv8yXlktouHZy3nxkmVLK9ODSCrbwje3bydR19dkefIzKxYZfME8dnkz+vTygJww3cbmjh9IbU7dm3h217fwMTpC7lw7OA8RWVmxSyb/SCGt0Ug1rIV1ZmnnjRXbma2r7IZxdQJ+Bfg1KToaeB/k5nV1kYG9Snf2bzUtNzMLBey6YP4FXAccEvyc1xSZm3o+vGj6dJx17+u8k5lXD/eE+TMLDey6YP4QEQcnXb8pKRXcxWQZXbh2ME8uWA1k5NO6cF9yrl+/Gj3P5hZzmSTIOolHRwRbwJIGoHnQ+TFknVbGDO4N49+6eR8h2JmJSCbBHE98JSkxYBIzai+MqdR2fssr65l9tJqvjHBTUpm1jayGcX0hKSRwGhSCWJBRGzLeWS2i6mVKwE458iBeY7EzEpFswlC0pkR8aSkjzc5dbAkImJSjmOzNFPnVnHYwF4Mr+ie71DMrES09ARxGvAk8LEM5wJwgmgjVRu2MvOd9XztrFH5DsXMSkizCSIivpP8+p8R8Vb6OUmePNeGps1NmpfGuHnJzNpONvMg/pSh7MHWDsSaN2VuFaP378khA3rkOxQzKyEt9UEcChwB9G7SD9EL8O40bWT1pq288vY6rv3wyHyHYmYlpqU+iNHAeUAfdu2H2ARclcug7D3T560iAs5185KZtbGW+iAeAR6RdFJEvNCGMVmaqZUrObh/d0a6ecnM2lg2E+VmSfoCqeamnU1LEfG5nEVlALy7eRsvLn6XL5xxCJLyHY6ZlZhsOql/DxwAjAeeAYaQambaLUkTJC2UtEjSDc3UOV3SbEnzJD3T5FyZpFmS/pzN+xWbv762iobw5Dgzy49sEsQhEfEtoCYifgt8FBizu4sklQE3A+cAhwOflHR4kzp9SK0Qe35EHAF8osnLXAvMzyLGojSlciXD9uvGYQN75jsUMytB2SSIxn0fqiUdCfQGhmVx3fHAoohYHBHbgfuAC5rUuQKYFBFLACJideMJSUNIJaM7snivorO+Zjt/f/Ndzhkz0M1LZpYX2SSI2yT1Bb4FTAZeA36UxXWDgaVpx8uSsnSjgL6SnpY0U9Jn0s79DPgG0NDSm0i6WtIMSTPWrFmTRViF4bH5q6hvCM5185KZ5Uk2i/U1foN/hj3bhzrT197I8P7HAR8GyoEXJL1IKnGsjoiZkk7fTXy3AbcBjBs3runrF6wplSsZ0recIwf3yncoZlaiWpood11LF0bET3bz2suAoWnHQ4AVGeqsjYgaoEbSs8DRwLHA+ZLOJTVyqpekP0TEp3bznkVhw5YdPL9oLVd+aLibl8wsb1pqYuqZ/IwjtSf14OTnGlKdzrvzCjBS0nBJnYHLSTVRpXsEOEVSR0ndgBOA+RFxY0QMiYhhyXVPlkpyAHh8/ip21AfnHHlAvkMxsxLW0kS57wFI+itwbERsSo6/CzywuxeOiDpJXwSmA2XAnRExT9I1yflbI2K+pGnAHFJ9DXdExNx9vKeCN3XuSgb17soxQ/vkOxQzK2HZTJQ7ENiedryd7EYxERFTgClNym5tcjwRmNjCazwNPJ3N+xWDTVt38Ozra/nUiQe5ecnM8iqbBPF74GVJD5HqZL4I+F1OoyphTy5Yzfb6Bj56lJuXzCy/shnFdJOkqcApSdGVETErt2GVrimVK9m/VxfGDu2b71DMrMS1NIqpV0RslNQPeDv5aTzXLyLW5T680lKzrY6nF67hk8cfSIcObl4ys/xq6QniHlLLfc9k1/kLSo73ZE6EZeGphavZVtfg0Utm1i60NIrpvORPby/aRqZWVlHRowvjhvXLdyhmZi02MR3b0oUR8Y/WD6d01W6v58kFq7n4uMGUuXnJzNqBlpqYftzCuQDObOVYStozr6+mdke9114ys3ajpSamM9oykFI3pbKKft07c/xwNy+ZWfuQzTwIkmW+D2fXHeU8F6KVbN1RzxPzV3H+MYPoWJbNArtmZrm32wQh6TvA6aQSxBRSGwD9DU+WazXPvr6Gmu313jnOzNqVbL6uXkJqOe6qiLiS1GqrXXIaVYmZOreK3uWdOOng/fIdipnZTtkkiNqIaADqJPUCVuM5EK1mW109j7+2irMP359Obl4ys3Ykmz6IGcne0beTmjS3GXg5p1GVkOcXrWXTtjrOHePmJTNrX1qaB/FL4J6I+Nek6NZkae5eETGnTaIrAVMqq+jZtSMfOqQi36GYme2ipSeIN4AfSxoI/BG4NyJmt01YpWF7XQN/nVfFWYfvT+eObl4ys/al2U+liPh5RJwEnAasA34jab6kb0sa1WYRFrEXFr/Lxq11nhxnZu3Sbr+2RsQ7EfHDiBgLXEFqP4j5OY+sBEytXEmPLh05eaSbl8ys/dltgpDUSdLHJN0NTAVeBy7OeWRFrq6+genzqvjwYQPo2qks3+GYmb1PS53UZwGfBD5KatTSfcDVEVHTRrEVtZfeWsf6LTs8Oc7M2q2WOqm/SWpPiK97c6DWN6VyJd06l3H66P75DsXMLCMv1pcH9Q3B9HlVnHGom5fMrP3y2Mo8eOXtdazdvN2jl8ysXXOCyIMplSvp2qmDm5fMrF1zgmhjDQ3B1LlVnD5qAN27ZLXauplZXjhBtLGZS9azZtM2zhlzQL5DMTNrkRNEG5tSuZLOHTtw5qED8h2KmVmLnCDaUENDMG1uFaeO7E/Prp3yHY6ZWYucINrQ7GXVrNywlXPdvGRmBcAJog1NrVxJpzLxkcP3z3coZma75QTRRiKCKZVVnDKyP73cvGRmBcAJoo1ULt/A8upazjnSzUtmVhicINrIlMoqOnYQZ7l5ycwKhBNEG4gIps5dyQcPqaBPt875DsfMLCtOEG3gtZUbeefdLZzr5iUzKyA5TRCSJkhaKGmRpBuaqXO6pNmS5kl6JinrKullSa8m5d/LZZy5NrWyirIO4uwjnCDMrHDkbDEgSWXAzcBZwDLgFUmTI+K1tDp9gFuACRGxRFLj9OJtwJkRseTVAJAAAAuqSURBVFlSJ+BvkqZGxIu5ijdXUqOXVnLiiH706+7mJTMrHLl8gjgeWBQRiyNiO6kd6S5oUucKYFJELAGIiNXJnxERm5M6nZKfyGGsObNw1SYWr63xznFmVnBymSAGA0vTjpclZelGAX0lPS1ppqTPNJ6QVCZpNrAaeCwiXsr0JpKuljRD0ow1a9a08i3suymVVUgw3s1LZlZgcpkglKGs6VNAR+A4Uvtejwe+JWkUQETUR8QxwBDgeElHZnqTiLgtIsZFxLj+/dvf/gpTK1dy/LB+9O/ZJd+hmJntkVwmiGXA0LTjIcCKDHWmRURNRKwFngWOTq8QEdXA08CE3IWaG2+s2sQbqzdz7hg3L5lZ4cllgngFGClpuKTOwOXA5CZ1HgFOkdRRUjfgBGC+pP5JBzaSyoGPAAtyGGtOTJ1bBcAED281swKUs1FMEVEn6YvAdKAMuDMi5km6Jjl/a0TMlzQNmAM0AHdExFxJRwG/TUZCdQDuj4g/5yrWXJlSuZJxB/Vl/15d8x2Kmdkey+melxExBZjSpOzWJscTgYlNyuYAY3MZW64tXrOZBVWb+PZ5h+c7FDOzveKZ1Dni5iUzK3ROEDkyde5Kxh7Yh0F9yvMdipnZXnGCyIEl725h7vKNnOvJcWZWwJwgcmDq3JWAm5fMrLA5QeTAlLlVHDWkN0P7dct3KGZme80JopUtW7+FV5dWe+0lMyt4ThCtbFoyeslbi5pZoXOCaGVTKldy+MBeDKvonu9QzMz2iRNEK1q5oZZ/LKnm3DF+ejCzwucE0Yp2Ni95cT4zKwJOEK1oamUVo/fvycH9e+Q7FDOzfeYE0UpWb9zKK++s4xw3L5lZkXCCaCXT51URgfd+MLOikdPVXEvBw7OWM3H6QpZX19Kxg3htxUZG7d8z32GZme0zJ4h98PCs5dw4qZLaHfUA1DUEN06qBODCsU233zYzKyxuYtoHE6cv3JkcGtXuqGfi9IV5isjMrPU4QeyD5dW1GctXNFNuZlZI3MS0FxZWbWrxKcF7QJhZMXCC2ANL123hp4+/zkOzltOjc0c+OmYgT8xfxda6hp11yjuVcf340XmM0sysdThBZGHt5m388slF3P3SO3SQuPqUEVxz2sH07d555yimFdW1DOpTzvXjR7uD2syKghNECzZt3cHtz73FHc8tZuuOei4dN5RrPzKSgb3fa0K6cOxgJwQzK0pOEBls3VHPH158h5ufWsT6LTs4d8wBXHfWaA4Z4CU0zKx0lHyCSG8iGti7K6eN7s+zr69leXUtp4ys4PrxozlqSJ98h2lm1uZKOkE0nei2YsNW7n15KUP7lnP3P5/Ahw6pyHOEZmb5U9LzIDJNdAOobwgnBzMreSWdIJqb0LZyw9Y2jsTMrP0p6QTR3IQ2T3QzMyvxBHH9+NGUdyrbpcwT3czMUkq6k7px/oInupmZvV9JJwjwRDczs+aUdBOTmZk1zwnCzMwycoIwM7OMnCDMzCwjJwgzM8tIEZHvGFqNpDXAO1lUrQDW5jicfCr2+4Piv8divz/wPbYXB0VE/0wniipBZEvSjIgYl+84cqXY7w+K/x6L/f7A91gI3MRkZmYZOUGYmVlGpZogbst3ADlW7PcHxX+PxX5/4Hts90qyD8LMzHavVJ8gzMxsN5wgzMwso5JKEJImSFooaZGkG/IdT2uQdKek1ZLmppX1k/SYpDeSP/vmM8Z9IWmopKckzZc0T9K1SXkx3WNXSS9LejW5x+8l5UVzjwCSyiTNkvTn5LjY7u9tSZWSZkuakZQV9D2WTIKQVAbcDJwDHA58UtLh+Y2qVdwFTGhSdgPwRESMBJ5IjgtVHfC1iDgMOBH4QvL3Vkz3uA04MyKOBo4BJkg6keK6R4Brgflpx8V2fwBnRMQxaXMfCvoeSyZBAMcDiyJicURsB+4DLshzTPssIp4F1jUpvgD4bfL7b4EL2zSoVhQRKyPiH8nvm0h9wAymuO4xImJzctgp+QmK6B4lDQE+CtyRVlw099eCgr7HUkoQg4GlacfLkrJitH9ErITUBywwIM/xtApJw4CxwEsU2T0mzS+zgdXAYxFRbPf4M+AbQENaWTHdH6SS+l8lzZR0dVJW0PdYSjvKKUOZx/gWCEk9gD8BX4mIjVKmv87CFRH1wDGS+gAPSToy3zG1FknnAasjYqak0/MdTw59KCJWSBoAPCZpQb4D2lel9ASxDBiadjwEWJGnWHJtlaSBAMmfq/Mczz6R1IlUcrg7IiYlxUV1j40iohp4mlS/UrHc44eA8yW9Tapp90xJf6B47g+AiFiR/LkaeIhUs3ZB32MpJYhXgJGShkvqDFwOTM5zTLkyGfhs8vtngUfyGMs+UepR4dfA/Ij4SdqpYrrH/smTA5LKgY8ACyiSe4yIGyNiSEQMI/Xv7smI+BRFcn8AkrpL6tn4O3A2MJcCv8eSmkkt6VxSbaFlwJ0RcVOeQ9pnku4FTie1rPAq4DvAw8D9wIHAEuATEdG0I7sgSDoZeA6o5L3262+S6ocolns8ilQHZhmpL233R8R/StqPIrnHRkkT09cj4rxiuj9JI0g9NUCq6f6eiLip0O+xpBKEmZllr5SamMzMbA84QZiZWUZOEGZmlpEThJmZZeQEYWZmGTlBWMGQ9FNJX0k7ni7pjrTjH0u6roXr75J0SfL705Let5m8pE6SfpCsvjk3WWX1nOTc25Iq9iLune/bzPmbkxVAX5NUm/w+W9IlkqY0zpFoTZIGNq6q2sz5zpKelVRKqy1YE04QVkj+DnwQQFIHUnM/jkg7/0Hg+X18j+8DA4EjI+JI4GNAz318zRZFxBci4hjgXODNZDXQYyLiwYg4N5ld3dquA25vIabtpFYfvSwH720FwgnCCsnzJAmCVGKYC2yS1FdSF+AwYJakb0t6JXkCuE1ZLtwkqRtwFfCliNgGEBGrIuL+DHWvS15/bpOnms9ImpPs7fD7DNd9P3miyOrfXuNTi6RhkhZIuiN5z7slfUTS88nTzvFJ/e5K7RHyilJ7LzS3YvHFwLTkmiOSJ6XZSewjkzoPA/8nmzitOPnx0QpGshBanaQDSSWKF0ityHsSsAGYExHbJf0yIv4TIPmQPg94NIu3OARYEhEbW6ok6TjgSuAEUotAviTpGWA78O+kFm1bK6lfk+t+BPQGroy9m6F6CPAJ4GpSS8dcAZwMnE9qdvmFyfs/GRGfS5qmXpb0eETUpMUxHFjfmASBa4CfR8TdyTI0ZUn5XOADexGnFQk/QVihaXyKaEwQL6Qd/z2pc4aklyRVAmeyazNUazgZeCgiapJ9HCYBpyTv9WBErAVosqTCt4A+EfH5vUwOAG9FRGVENADzSG1EE6SWIRmW1DkbuEGppcOfBrqSWuYh3UBgTdrxC8A3Jf0bcFBE1Cbx1wPbG9cYstLjBGGFprEfYgypb7gvknqC+CDwvKSuwC3AJRExhlQ7e9csX3sRcGAWH4jNNVmJ5peQfwU4rulTxR7alvZ7Q9pxA++1Bgi4OK0f48CISN/FDaCWtP8mEXEPqaeQWmC6pDPT6nYBtu5DzFbAnCCs0DxPqsloXUTUJ9/S+5BKEi/w3gffWqX2kGh29FBTEbGF1Mqxv0iaWhpH+3yqSdVngQsldUtW7ryI1IKCTwCXJgu00SQZTAN+APwlx9/IpwNfaux3kTQ2Q53Xee+Jo3GhucUR8QtSq48elZTvB6yJiB05jNfaMScIKzSVpEYvvdikbENErE1G/NyelD1M6pv7nvgPUs0vr0mam7xGenMMyRaodwEvk1pV9o6ImBUR84CbgGckvQr8pMl1DySxTU6W9c6F75PasnROEv/3m1ZI+iPelHRIUnQZMDdpljoU+F1SfgYwJUdxWgHwaq5mJUjSRcBxEfEfLdSZBNwYEQvbLjJrTzyKyawERcRDjU1hmSRNbA87OZQ2P0GYmVlG7oMwM7OMnCDMzCwjJwgzM8vICcLMzDJygjAzs4z+f1uGjPL173e5AAAAAElFTkSuQmCC\n" + "image/svg+xml": "\r\n\r\n\r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3df7xVdZ3v8dfbA+qxxIOJDRxEdFQKLUFJM7PUMrBUqKzU29i1R/6YqcbyBiM19vM6Y8OtqW4alxgry9+EiA2BTgZOpvIjiJ/iIBlyMIGxk4YngcPn/rHWxs12ncMCz9qbs/f7+Xicx9nru359lj/253y/3/X9fhURmJmZVdqv1gGYmdm+yQnCzMwyOUGYmVkmJwgzM8vkBGFmZpmcIMzMLJMThNlekHSGpNW1jsOsSE4Q1utIekrSu2sZQ0T8Z0QMK+r6kkZLekjSC5I2SZon6YKi7meWxQnCLIOkphre+0LgbuAWYDDweuCLwPl7cS1J8v/ntlf8H47VDUn7SbpW0pOS/lvSXZIOLdt/t6Q/SPpT+tf58WX7fijpe5JmSdoCnJXWVD4naWl6zp2SDkyPP1PS+rLzuzw23T9B0jOSNkj6hKSQdEzGMwj4JvC1iJgaEX+KiB0RMS8iLk+P+bKkn5SdMzS9Xp90e66k6yU9DLwIfF7Swor7fFbSzPTzAZL+j6R1kp6VNFlS86v812F1wAnC6snfA+OAdwKDgD8CN5bt/zlwLHA48Bvg1orzLwGuBw4GfpWWfRgYAxwFvBn4n93cP/NYSWOAa4B3A8ek8XVlGHAEMK2bY/L4G+AKkmf5v8AwSceW7b8EuC39/HXgOGBEGl8rSY3FGpwThNWTK4EvRMT6iHgJ+DJwYekv64i4OSJeKNt3oqRDys6/NyIeTv9i/0ta9p2I2BARzwH3kXyJdqWrYz8M/CAiVkTEi8BXurnG69Lfz+R+6mw/TO+3PSL+BNwLXAyQJoo3ADPTGsvlwGcj4rmIeAH4J+CiV3l/qwNOEFZPjgTukdQuqR1YBXQCr5fUJOmGtPnpeeCp9JzDys5/OuOafyj7/CLw2m7u39WxgyqunXWfkv9Ofw/s5pg8Ku9xG2mCIKk9zEiT1QDgIGBR2T+32Wm5NTgnCKsnTwPnRkRL2c+BEdFG8qU4lqSZ5xBgaHqOys4vamrjZ0g6m0uO6ObY1STP8cFujtlC8qVe8lcZx1Q+y/3AYZJGkCSKUvPSZqADOL7sn9khEdFdIrQG4QRhvVVfSQeW/fQBJgPXSzoSQNIASWPT4w8GXiL5C/0gkmaUarkLuEzSGyUdRDft+5HMv38NcJ2kyyT1Szvf3y5pSnrYEuAdkoakTWQTdxdARGwn6deYBBwKPJCW7wC+D/yrpMMBJLVKGr3XT2t1wwnCeqtZJH/5ln6+DHwbmAncL+kF4FHg1PT4W4DfA23AynRfVUTEz4HvAL8E1gCPpLte6uL4acBHgI8DG4Bngf9N0o9ARDwA3AksBRYBP8sZym0kNai704RR8g9pXI+mzW//QdJZbg1OXjDIrLokvRFYDhxQ8UVttk9xDcKsCiS9X9L+kvqTvFZ6n5OD7eucIMyq40pgE/AkyZtVf1vbcMx2z01MZmaWyTUIMzPL1KfWAfSkww47LIYOHVrrMMzMeo1FixZtjojMgZF1lSCGDh3KwoULd3+gmZkBIOn3Xe0rtIlJ0hhJqyWtkXRtF8ecKWmJpBWS5qVlw9Ky0s/zkj5TZKxmZrarwmoQ6Xz6NwLnAOuBBZJmRsTKsmNagJuAMRGxrjSSMyJWk050ll6nDbinqFjNzOyViqxBnAKsiYi1EbEVuINkLpxylwDTI2IdQERszLjOu4AnI6LLapCZmfW8IhNEK7vOKLk+LSt3HNA/XeBkkaRLM65zEXB7QTGamVkXiuykVkZZ5aCLPsDJJLWEZuARSY9GxBMAkvYHLqCbycgkXUGyMApDhgzpgbDNzAyKTRDr2XVa48EkE49VHrM5IrYAWyQ9BJwIPJHuPxf4TUQ829VNImIKMAVg1KhRHvVnZg1jxuI2Js1ZzYb2Dga1NDN+9DDGjaxsqNl7RTYxLQCOlXRUWhO4iGSmzXL3AmdI6pNOg3wqySIvJRfj5iUzs1eYsbiNidOX0dbeQQBt7R1MnL6MGYvbeuwehSWIdCKyTwFzSL7074qIFZKuknRVeswqktWrlgLzgakRsRwgTRjnANOLitHMrLeaNGc1Hds6dynr2NbJpDmre+wehQ6Ui4hZJPP2l5dNrtieRLKISeW5L/Ly+rxmZlZmQ3vHHpXvDc/FZGbWCw1qad6j8r3hBGFm1guNHz2M5r5Nu5Q1921i/OieWwywruZiMjNrFKW3lSZMW8rWzh20FvAWkxOEmVkvNW5kK7fPXwfAnVee1uPXdxOTmZllcoIwM7NMThBmZpbJCcLMzDI5QZiZWSYnCDMzy+QEYWZmmZwgzMwskxOEmZllcoIwM7NMThBmZpbJCcLMzDI5QZiZWSYnCDMzy+QEYWZmmZwgzMwskxOEmZllcoIwM7NMThBmZpbJa1KbmaVmLG5j0pzVbGjvYFBLM+NHD2PcyNZah1UzThBmZiTJYeL0ZXRs6wSgrb2DidOXATRsknCCMDMDJs1ZvTM5lHRs62TCtKXcPn9djaLavZXPPM/wgf0Kubb7IMzMgA3tHZnlWzt3VDmSPTN8YD/GjiimhuMahJkZMKilmbaMJNHa0sydV55Wg4hqzzUIMzNg/OhhNPdt2qWsuW8T40cPq1FEtecahJkZL3dET5i2lK2dO2j1W0xOEGZmJeNGtu7skG7UZqVybmIyM7NMThBmZpbJCcLMzDIVmiAkjZG0WtIaSdd2ccyZkpZIWiFpXll5i6Rpkh6XtEqSGwTNzKqosE5qSU3AjcA5wHpggaSZEbGy7JgW4CZgTESsk3R42SW+DcyOiAsl7Q8cVFSsZmb2SkXWIE4B1kTE2ojYCtwBjK045hJgekSsA4iIjQCS+gHvAP4tLd8aEe0FxmpmZhWKTBCtwNNl2+vTsnLHAf0lzZW0SNKlafnRwCbgB5IWS5oq6TUFxmpmZhWKTBDKKIuK7T7AycD7gNHAdZKOS8tPAr4XESOBLUBXfRhXSFooaeGmTZt6LHgzs0ZXZIJYDxxRtj0Y2JBxzOyI2BIRm4GHgBPT8vUR8Vh63DSShPEKETElIkZFxKgBAwb06AOYmTWyIhPEAuBYSUelncwXATMrjrkXOENSH0kHAacCqyLiD8DTkkqToLwLWImZNbQZi9s4/YYHOeraf+f0Gx5kxuK2WodU1wp7iykitkv6FDAHaAJujogVkq5K90+OiFWSZgNLgR3A1IhYnl7i08CtaXJZC1xWVKxmtu/zgj7Vp4jKboHea9SoUbFw4cJah2FmBTj9hgczp+Pev2k/Rg5p6bH7lBbgaZS5mCQtiohRWfs8ktrMeoVqLehT5AI8vY1nczWzXsEL+lSfaxBm1it4QZ/qcw3CzHoFL+hTfU4QZtZreEGf6nITk5mZZXKCMDOzTLtNEJIOrUYgZma2b8lTg3hM0t2S3ispawI+MzOrQ3kSxHHAFOBvgDWS/imdcdXMzOrYbhNEJB6IiIuBTwAfA+ZLmudlQM3M6tduX3OV9DrgoyQ1iGdJJtGbCYwA7gaOKjJAMzOrjTzjIB4BfgyMi4j1ZeULJU0uJiwzM6u1PAliWHQx5WtEfL2H4zEzs31Enk7q+yXtnEtXUn9JcwqMyczM9gF5EsSAiGgvbUTEH4HDiwvJzMz2BXkSRKekIaUNSUcC9bPKkJmZZcrTB/EF4FeS5qXb7wCuKC4kMzPbF+w2QUTEbEknAW8FBHw2IjYXHpmZmdVU3um+O4GNwIHAcElExEPFhWVmZrWWZ6DcJ4CrgcHAEpKaxCPA2cWGZmZmtZSnk/pq4C3A7yPiLGAksKnQqMzMrObyNDH9JSL+IglJB0TE45K8CKxZhhmL25g0ZzUb2jsY5CUxrZfLkyDWpwPlZgAPSPojsKHYsMx6nxmL25g4fRkd2zoBaGvvYOL0ZQBOEtYr5XmL6f3pxy9L+iVwCDC70KjMeqFJc1bvTA4lHds6mTBt6c51lO3VW/nM8wwf2K/WYTSEbhOEpP2ApRFxAkBEzOvueLNGtqG9I7N8a+eOKkdS34YP7MfYEa6RVUO3CSIidkj6raQhEeE/gcy6MailmbaMJNHa0sydV3rpFOt98rzFNBBYIekXkmaWfooOzKy3GT96GM19m3Ypa+7bxPjRfqfDeqc8ndRfKTwKszpQ6oieMG0pWzt30Oq3mKyXy9NJ7X4Hs5zGjWzd2SHtZiXr7fKMpH6Bl2dv3R/oC2yJCL9GYGZWx/LUIA4u35Y0DjilsIjMCuABbGZ7Lk8n9S4iYgaeh8l6kdIAtrb2DoKXB7DNWNxW69DM9ml5mpg+ULa5HzAKLxhkvUi1B7B5IJfVizxvMZ1f9nk78BQwNs/FJY0Bvg00AVMj4oaMY84EvkXSt7E5It6Zlj8FvEAy1fj2iBiV555mlao9gM0Duaxe5OmDuGxvLiypCbgROAdYDyyQNDMiVpYd0wLcBIyJiHWSKte6PsuLE9mr5QFsZntnt30Qkn6UfpGXtvtLujnHtU8B1kTE2ojYCtzBK2selwDTS6O0I2Jj/tDN8vEANrO9k6eT+s0R0V7aiIg/kqwJsTutwNNl2+vTsnLHAf0lzZW0SNKlZfsCuD8t73INbElXSFooaeGmTV6mwl5p3MhW/vkDb2L/puQ/99aWZv75A2/yW0xmu5GnD2I/Sf3TxICkQ3Oep4yyys7tPsDJwLuAZuARSY9GxBPA6RGxIW12ekDS41nLnEbEFGAKwKhRo9x5bpk8gM1sz+X5ov8G8GtJ00i+4D8MXJ/jvPXAEWXbg3nlOhLrSTqmtwBbJD0EnAg8EREbIGl2knQPSZOV18E2M6uS3TYxRcQtwAeBZ0mWGv1ARPw4x7UXAMdKOkrS/sBFQOUkf/cCZ0jqI+kg4FRglaTXSDoYQNJrgPcAy/M+lJmZvXp5xkG8FVgREd9Ntw+WdGpEPNbdeRGxXdKngDkkr7neHBErJF2V7p8cEaskzQaWAjtIXoVdLulo4B5JpRhviwgvUmRmVkV5mpi+B5xUtr0loyxTRMwCZlWUTa7YngRMqihbS9LUZGZmNZLnLSZFxM7O34jYQb7EYmZmvVieBLFW0t9L6pv+XA2sLTowMzOrrTwJ4irgbUAbyVtHpwKXFxmUmZnVXp6pNjaSvIEEgKRm4Dzg7gLjMjOzGss13bekJknnSroF+B3wkWLDMjOzWuu2BiHpHSTzJb0PmA+cDhwdES9WITYzM6uhLhOEpPXAOpJXWsdHxAuSfufkYGbWGLprYvopyeR6HwHOT0c0e64jM7MG0WWCiIirgaHAN4GzgCeAAZI+LOm11QnPzMxqpdtO6kg8GBGXkySLS4BxJKvKmZlZHcs9IjoitgH3Afelr7qamVkdy/Waa6WIyF7k18zM6sZeJQgzM6t/ThBmZpYpz3oQxwHjgSPLj4+IswuMy8zMaixPJ/XdwGTg+0BnseGYmdm+Ik+C2B4R3ys8ErMuzFjcxqQ5q9nQ3sGglmbGjx7GuJGttQ7LrO7lSRD3Sfo74B7gpVJhRDxXWFRmqRmL25g4fRkd25LKa1t7BxOnLwNwkjArWJ4E8bH09/iysgCO7vlwzHY1ac7qncmhpGNbJxOmLeX2+ev26Forn3me4QP79WR4ZnUtz3oQR1UjELMsG9qzh9xs7dyxx9caPrAfY0e41mGWV563mPoCfwu8Iy2aC/y/dGS1WaEGtTTTlpEkWluaufPK02oQkVnjyDMO4nvAycBN6c/JaZlZ4caPHkZz36Zdypr7NjF+9LAaRWTWOPL0QbwlIk4s235Q0m+LCsisXKkjesK0pWzt3EGr32Iyq5o8CaJT0l9HxJMAko7G4yGsisaNbN3ZIe1mJbPqyZMgxgO/lLQWEMmI6ssKjcrMzGouz1tMv5B0LDCMJEE8HhEv7eY0MzPr5bpbk/rsiHhQ0gcqdv21JCJiesGxmZlZDXVXg3gn8CBwfsa+AJwgzMzqWJcJIiK+lH78akT8rnyfJA+eMzOrc3nGQfw0o2xaTwdiZmb7lu76IN4AHA8cUtEP0Q84sOjAzMystrrrgxgGnAe0sGs/xAvA5UUGZWZmtdddH8S9wL2STouIR6oYk5mZ7QPyDJRbLOmTJM1NO5uWIuLjhUVlZmY1l6eT+sfAXwGjgXnAYJJmpt2SNEbSaklrJF3bxTFnSloiaYWkeRX7miQtlvSzPPczM7OekydBHBMR1wFbIuJHwPuAN+3uJElNwI3AucBw4GJJwyuOaSGZIfaCiDge+FDFZa4GVuWI0czMelieBFFa96Fd0gnAIcDQHOedAqyJiLURsRW4AxhbccwlwPSIWAcQERtLOyQNJklGU3Pcy8zMelieBDFFUn/gOmAmsBL4lxzntQJPl22vT8vKHQf0lzRX0iJJl5bt+xYwAeh26TBJV0haKGnhpk2bcoRlZmZ55Jmsr/QX/Dz2bB1qZV0u4/4nA+8CmoFHJD1Kkjg2RsQiSWfuJr4pwBSAUaNGVV7fzMz2UncD5a7p7sSI+OZurr0eOKJsezCwIeOYzRGxBdgi6SHgROAk4AJJ7yV5c6qfpJ9ExEd3c08zM+sh3TUxHZz+jCJZk7o1/bmKpNN5dxYAx0o6StL+wEUkTVTl7gXOkNRH0kHAqcCqiJgYEYMjYmh63oNODmZm1dXdQLmvAEi6HzgpIl5It78M3L27C0fEdkmfAuYATcDNEbFC0lXp/skRsUrSbGApSV/D1IhY/iqfyczMekCegXJDgK1l21vJ9xYTETELmFVRNrliexIwqZtrzAXm5rmfmZn1nDwJ4sfAfEn3kHQyvx+4pdCozMys5vK8xXS9pJ8DZ6RFl0XE4mLDMjOzWuvuLaZ+EfG8pEOBp9Kf0r5DI+K54sMzM7Na6a4GcRvJdN+L2HX8gtLtPRkTYWZmvUx3bzGdl/728qJmZg2ouyamk7o7MSJ+0/PhmJnZvqK7JqZvdLMvgLN7OBYzM9uHdNfEdFY1AzEzs31LnnEQpNN8D2fXFeU8FsLMrI7tNkFI+hJwJkmCmEWyANCv8GA5M7O6lmc9iAtJpuP+Q0RcRjLb6gGFRmVmZjWXJ0F0RMQOYLukfsBGPAbCzKzu5emDWJiuHf19kkFzfwbmFxqVmZnVXHfjIL4L3BYRf5cWTU6n5u4XEUurEp2ZmdVMdzWI/wK+IWkgcCdwe0QsqU5YZmZWa132QUTEtyPiNOCdwHPADyStkvRFScdVLUIzM6uJ3XZSR8TvI+LrETESuIRkPYhVhUdmZmY1tdsEIamvpPMl3Qr8HHgC+GDhkZmZWU1110l9DnAx8D6St5buAK6IiC1Vis3MzGqou07qz5OsCfE5Lw5kZtZ4PFmfmZllyjOS2szMGpAThJmZZXKCMDOzTE4QZmaWyQnCzMwyOUGYmVkmJwgzM8vkBGFmZpmcIMzMLJMThJmZZXKCMDOzTE4QZmaWyQnCzMwyFZogJI2RtFrSGknXdnHMmZKWSFohaV5adqCk+ZJ+m5Z/pcg4zczslbpbD+JVkdQE3AicA6wHFkiaGREry45pAW4CxkTEOkmHp7teAs6OiD9L6gv8StLPI+LRouI1M7NdFVmDOAVYExFrI2IryYp0YyuOuQSYHhHrACJiY/o7IuLP6TF9058oMFYzM6tQZIJoBZ4u216flpU7Dugvaa6kRZIuLe2Q1CRpCbAReCAiHsu6iaQrJC2UtHDTpk09/AhmZo2ryAShjLLKWkAf4GSSda9HA9dJOg4gIjojYgQwGDhF0glZN4mIKRExKiJGDRgwoOeiNzNrcEUmiPXAEWXbg4ENGcfMjogtEbEZeAg4sfyAiGgH5gJjigvVzMwqFZkgFgDHSjpK0v7ARcDMimPuBc6Q1EfSQcCpwCpJA9IObCQ1A+8GHi8wVjMzq1DYW0wRsV3Sp4A5QBNwc0SskHRVun9yRKySNBtYCuwApkbEcklvBn6Uvgm1H3BXRPysqFjNzOyVCksQABExC5hVUTa5YnsSMKmibCkwssjYzMysex5JbWZmmZwgzMwskxOEmZllcoIwM7NMThBmZpbJCcLMzDI5QZiZWSYnCDMzy+QEYWZmmZwgzMwskxOEmZllcoIwM7NMThBmZpap0NlcG9mMxW1MmrOaDe0dDGppZvzoYYwbWbniau+5j5k1HieIAsxY3MbE6cvo2NYJQFt7BxOnLwPo0S/vat3HzBqTE0QBJs1ZvfNLu6RjWycTpi3l9vnreuw+i9e1s7VzR+H32ResfOZ5hg/sV+swzBqK+yAKsKG9I7O88sv81erqej19n33B8IH9GDvCtSKzanINogCDWpppy0gSrS3N3HnlaT12n9NveLAq9zGzxuQaRAHGjx5Gc9+mXcqa+zYxfvSwXnkfM2tMrkEUoNRBPGHaUrZ27qC1oLeLStfzW0xmVgQniIKMG9m6s6O4yOaecSNbnRDMrBBuYjIzs0wNX4PwQDMzs2wNnSA80MzMrGsNnSCKHtDmwV1m1ps1dB9E0QPaPLjLzHqzhq5BVGtAm5lZb9TQNQgPNDMz61pD1yA80MzMrGsNnSDAA83MzLrS0E1MZmbWNScIMzPL5ARhZmaZnCDMzCyTE4SZmWVSRNQ6hh4jaRPw+90cdhiwuQrh7Ev8zI3Bz9wYevqZj4yIAVk76ipB5CFpYUSMqnUc1eRnbgx+5sZQzWd2E5OZmWVygjAzs0yNmCCm1DqAGvAzNwY/c2Oo2jM3XB+EmZnl04g1CDMzy8EJwszMMjVUgpA0RtJqSWskXVvreIog6WZJGyUtLys7VNIDkv4r/d2/ljH2JElHSPqlpFWSVki6Oi2v52c+UNJ8Sb9Nn/kraXndPnOJpCZJiyX9LN2u62eW9JSkZZKWSFqYllXtmRsmQUhqAm4EzgWGAxdLGl7bqArxQ2BMRdm1wC8i4ljgF+l2vdgO/K+IeCPwVuCT6b/Xen7ml4CzI+JEYAQwRtJbqe9nLrkaWFW23QjPfFZEjCgb+1C1Z26YBAGcAqyJiLURsRW4Axhb45h6XEQ8BDxXUTwW+FH6+UfAuKoGVaCIeCYifpN+foHky6OV+n7miIg/p5t905+gjp8ZQNJg4H3A1LLiun7mLlTtmRspQbQCT5dtr0/LGsHrI+IZSL5QgcNrHE8hJA0FRgKPUefPnDa1LAE2Ag9ERN0/M/AtYAKwo6ys3p85gPslLZJ0RVpWtWdupBXllFHmd3zrhKTXAj8FPhMRz0tZ/7rrR0R0AiMktQD3SDqh1jEVSdJ5wMaIWCTpzFrHU0WnR8QGSYcDD0h6vJo3b6QaxHrgiLLtwcCGGsVSbc9KGgiQ/t5Y43h6lKS+JMnh1oiYnhbX9TOXREQ7MJek36men/l04AJJT5E0D58t6SfU9zMTERvS3xuBe0iayqv2zI2UIBYAx0o6StL+wEXAzBrHVC0zgY+lnz8G3FvDWHqUkqrCvwGrIuKbZbvq+ZkHpDUHJDUD7wYep46fOSImRsTgiBhK8v/ugxHxUer4mSW9RtLBpc/Ae4DlVPGZG2oktaT3krRjNgE3R8T1NQ6px0m6HTiTZErgZ4EvATOAu4AhwDrgQxFR2ZHdK0l6O/CfwDJebpv+PEk/RL0+85tJOiebSP7IuysivirpddTpM5dLm5g+FxHn1fMzSzqapNYASXfAbRFxfTWfuaEShJmZ5ddITUxmZrYHnCDMzCyTE4SZmWVygjAzs0xOEGZmlskJwnoNSf8q6TNl23MkTS3b/oaka7o5/4eSLkw/z5X0ioXfJfWVdEM6U+bydNbUc9N9T0k6bC/i3nnfLvbfmM7WuVJSR/p5iaQLJc0qjXnoSZIGlmZE7WL//pIektRIsy1YBScI601+DbwNQNJ+JGM9ji/b/zbg4Vd5j68BA4ETIuIE4Hzg4Fd5zW5FxCcjYgTwXuDJdObOERExLSLem46W7mnXAN/vJqatJDOFfqSAe1sv4QRhvcnDpAmCJDEsB16Q1F/SAcAbgcWSvihpQVoDmKKcEzNJOgi4HPh0RLwEEBHPRsRdGcdek15/eUWt5lJJS9O1Gn6ccd7X0hpFrv/3SrUWSUMlPS5panrPWyW9W9LDaW3nlPT41yhZE2SBknUTupqx+IPA7PSc49Oa0pI09mPTY2YA/yNPnFafXH20XiOdtGy7pCEkieIRkhl5TwP+BCyNiK2SvhsRXwVIv6TPA+7LcYtjgHUR8Xx3B0k6GbgMOJVkEsjHJM0DtgJfIJlgbbOkQyvO+xfgEOCy2LsRqscAHwKuIJk65hLg7cAFJKPHx6X3fzAiPp42Tc2X9B8RsaUsjqOAP5aSIHAV8O2IuDWdhqYpLV8OvGUv4rQ64RqE9TalWkQpQTxStv3r9JizJD0maRlwNrs2Q/WEtwP3RMSWdF2G6cAZ6b2mRcRmgIrpD64DWiLiyr1MDgC/i4hlEbEDWEGyaEyQTDMyND3mPcC1SqYCnwscSDIlQ7mBwKay7UeAz0v6B+DIiOhI4+8EtpbmA7LG4wRhvU2pH+JNJH/hPkpSg3gb8LCkA4GbgAsj4k0k7ewH5rz2GmBIji/ErpqsRNdTyC8ATq6sVeyhl8o+7yjb3sHLrQECPljWjzEkIspXYAPooOyfSUTcRlIL6QDmSDq77NgDgL+8ipitF3OCsN7mYZImo+ciojP9K72FJEk8wstffJuVrBHR5dtDlSLiRZKZYb+TNrWU3vb5aMWhDwHjJB2UzrL5fpIJA38BfDidTI2KZDAbuAH494L/Ip8DfLrU7yJpZMYxT/ByjaM0KdzaiPgOyUyhb07LXwdsiohtBcZr+zAnCOttlpG8vfRoRdmfImJz+sbP99OyGSR/ue+JfyRpflkpaXl6jfLmGNIlTn8IzCeZNXZqRCyOiBXA9cA8Sb8Fvllx3t1pbDPTabqL8DWSJUiXpvF/rfKAtD/iSUnHpEUfAZanzVJvAG5Jy88CZhUUp/UCns3VrAFJej9wckT8YzfHTAcmRsTq6kVm+xK/xWTWgCLinlJTWJa0iTco/qkAAAAuSURBVG2Gk0Njcw3CzMwyuQ/CzMwyOUGYmVkmJwgzM8vkBGFmZpmcIMzMLNP/ByWwIdumQhb/AAAAAElFTkSuQmCC\n" }, "metadata": { "needs_background": "light" @@ -416,7 +487,7 @@ "plt.xlabel('Wall Clock Time (s)')\n", "plt.ylabel('Validation Accuracy')\n", "plt.scatter(time_history, 1-np.array(valid_loss_history))\n", - "plt.plot(time_history, 1-np.array(best_valid_loss_history))\n", + "plt.step(time_history, 1-np.array(best_valid_loss_history), where='post')\n", "plt.show()" ] }, @@ -465,7 +536,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 14, "metadata": { "slideshow": { "slide_type": "slide" @@ -570,7 +641,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 15, "metadata": { "slideshow": { "slide_type": "slide" @@ -584,7 +655,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 16, "metadata": { "slideshow": { "slide_type": "slide" @@ -594,124 +665,118 @@ "outputs": [ { "output_type": "stream", - "name": "stdout", + "name": "stderr", "text": [ - "INFO: 'flaml_custom' does not exist. Creating a new experiment\n", - "[flaml.automl: 02-05 13:31:31] {820} INFO - Evaluation method: holdout\n", - "[flaml.automl: 02-05 13:31:31] {545} INFO - Using StratifiedKFold\n", - "[flaml.automl: 02-05 13:31:31] {841} INFO - Minimizing error metric: 1-accuracy\n", - "[flaml.automl: 02-05 13:31:31] {861} INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", - "[flaml.automl: 02-05 13:31:31] {920} INFO - iteration 0 current learner RGF\n", - "[flaml.automl: 02-05 13:31:34] {1074} INFO - at 1.2s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", - "[flaml.automl: 02-05 13:31:34] {920} INFO - iteration 1 current learner RGF\n", - "[flaml.automl: 02-05 13:31:35] {1074} INFO - at 3.8s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", - "[flaml.automl: 02-05 13:31:35] {920} INFO - iteration 2 current learner RGF\n", - "[flaml.automl: 02-05 13:31:36] {1074} INFO - at 4.6s,\tbest RGF's error=0.3840,\tbest RGF's error=0.3840\n", - "[flaml.automl: 02-05 13:31:36] {920} INFO - iteration 3 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:38] {1074} INFO - at 4.6s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", - "[flaml.automl: 02-05 13:31:38] {920} INFO - iteration 4 current learner RGF\n", - "[flaml.automl: 02-05 13:31:41] {1074} INFO - at 7.7s,\tbest RGF's error=0.3751,\tbest RGF's error=0.3751\n", - "[flaml.automl: 02-05 13:31:41] {920} INFO - iteration 5 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:41] {1074} INFO - at 9.7s,\tbest lgbm's error=0.3777,\tbest RGF's error=0.3751\n", - "[flaml.automl: 02-05 13:31:41] {920} INFO - iteration 6 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:44] {1074} INFO - at 9.7s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", - "[flaml.automl: 02-05 13:31:44] {920} INFO - iteration 7 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:44] {1074} INFO - at 12.7s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", - "[flaml.automl: 02-05 13:31:44] {920} INFO - iteration 8 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:46] {1074} INFO - at 12.7s,\tbest lgbm's error=0.3662,\tbest lgbm's error=0.3662\n", - "[flaml.automl: 02-05 13:31:46] {920} INFO - iteration 9 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:48] {1074} INFO - at 14.9s,\tbest lgbm's error=0.3636,\tbest lgbm's error=0.3636\n", - "[flaml.automl: 02-05 13:31:48] {920} INFO - iteration 10 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 16.8s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 11 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 18.7s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 12 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 18.7s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 13 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 18.8s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 14 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 18.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 15 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 18.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 16 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:50] {1074} INFO - at 19.1s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:50] {920} INFO - iteration 17 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.1s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 18 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.1s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 19 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.2s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 20 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.3s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 21 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.3s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 22 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.4s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 23 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:51] {1074} INFO - at 20.5s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:51] {920} INFO - iteration 24 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:52] {1074} INFO - at 20.6s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:52] {920} INFO - iteration 25 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:52] {1074} INFO - at 20.7s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:52] {920} INFO - iteration 26 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:52] {1074} INFO - at 20.8s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 02-05 13:31:52] {920} INFO - iteration 27 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:55] {1074} INFO - at 21.1s,\tbest xgboost's error=0.3611,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 02-05 13:31:55] {920} INFO - iteration 28 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:55] {1074} INFO - at 24.1s,\tbest xgboost's error=0.3611,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 02-05 13:31:55] {920} INFO - iteration 29 current learner lgbm\n", - "[flaml.automl: 02-05 13:31:56] {1074} INFO - at 24.8s,\tbest lgbm's error=0.3618,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 02-05 13:31:56] {920} INFO - iteration 30 current learner RGF\n", - "[flaml.automl: 02-05 13:31:57] {1074} INFO - at 25.8s,\tbest RGF's error=0.3751,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 02-05 13:31:57] {920} INFO - iteration 31 current learner xgboost\n", - "[flaml.automl: 02-05 13:31:57] {1074} INFO - at 26.2s,\tbest xgboost's error=0.3611,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 02-05 13:31:57] {920} INFO - iteration 32 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:00] {1074} INFO - at 27.2s,\tbest xgboost's error=0.3523,\tbest xgboost's error=0.3523\n", - "[flaml.automl: 02-05 13:32:00] {920} INFO - iteration 33 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:00] {1074} INFO - at 29.5s,\tbest xgboost's error=0.3523,\tbest xgboost's error=0.3523\n", - "[flaml.automl: 02-05 13:32:00] {920} INFO - iteration 34 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:01] {1074} INFO - at 30.2s,\tbest xgboost's error=0.3523,\tbest xgboost's error=0.3523\n", - "[flaml.automl: 02-05 13:32:01] {920} INFO - iteration 35 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:05] {1074} INFO - at 32.3s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:05] {920} INFO - iteration 36 current learner rf\n", - "[flaml.automl: 02-05 13:32:06] {1074} INFO - at 34.8s,\tbest rf's error=0.3998,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:06] {920} INFO - iteration 37 current learner rf\n", - "[flaml.automl: 02-05 13:32:06] {1074} INFO - at 35.2s,\tbest rf's error=0.3998,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:06] {920} INFO - iteration 38 current learner rf\n", - "[flaml.automl: 02-05 13:32:07] {1074} INFO - at 35.6s,\tbest rf's error=0.3998,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:07] {920} INFO - iteration 39 current learner rf\n", - "[flaml.automl: 02-05 13:32:07] {1074} INFO - at 35.9s,\tbest rf's error=0.3998,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:07] {920} INFO - iteration 40 current learner rf\n", - "[flaml.automl: 02-05 13:32:07] {1074} INFO - at 36.4s,\tbest rf's error=0.3998,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:07] {920} INFO - iteration 41 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:08] {1074} INFO - at 37.3s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:08] {920} INFO - iteration 42 current learner RGF\n", - "[flaml.automl: 02-05 13:32:09] {1074} INFO - at 38.4s,\tbest RGF's error=0.3751,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:09] {920} INFO - iteration 43 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:11] {1074} INFO - at 39.9s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:11] {920} INFO - iteration 44 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:12] {1074} INFO - at 41.4s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:12] {920} INFO - iteration 45 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:13] {1074} INFO - at 42.4s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:13] {920} INFO - iteration 46 current learner rf\n", - "[flaml.automl: 02-05 13:32:14] {1074} INFO - at 43.3s,\tbest rf's error=0.3954,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:14] {920} INFO - iteration 47 current learner xgboost\n", - "[flaml.automl: 02-05 13:32:15] {1074} INFO - at 43.7s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:15] {920} INFO - iteration 48 current learner RGF\n", - "[flaml.automl: 02-05 13:32:32] {1074} INFO - at 60.7s,\tbest RGF's error=0.3572,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 02-05 13:32:32] {1114} INFO - selected model: XGBClassifier(base_score=0.5, booster='gbtree',\n", - " colsample_bylevel=0.9320142747883016, colsample_bynode=1,\n", - " colsample_bytree=0.9700688784239055, gamma=0, gpu_id=-1,\n", - " grow_policy='lossguide', importance_type='gain',\n", - " interaction_constraints=None, learning_rate=0.1803601073103824,\n", - " max_delta_step=0, max_depth=0, max_leaves=22,\n", - " min_child_weight=14.640318514250904, missing=nan,\n", - " monotone_constraints=None, n_estimators=143, n_jobs=-1,\n", - " num_parallel_tree=1, random_state=0,\n", - " reg_alpha=1.986243711660331e-08, reg_lambda=0.19460138956942644,\n", - " scale_pos_weight=1, subsample=1.0, tree_method='hist',\n", - " validate_parameters=False, verbosity=0)\n", - "[flaml.automl: 02-05 13:32:32] {875} INFO - fit succeeded\n" + "[flaml.automl: 02-17 13:47:25] {840} INFO - Evaluation method: holdout\n", + "INFO - Evaluation method: holdout\n", + "[flaml.automl: 02-17 13:47:26] {565} INFO - Using StratifiedKFold\n", + "INFO - Using StratifiedKFold\n", + "[flaml.automl: 02-17 13:47:26] {861} INFO - Minimizing error metric: 1-accuracy\n", + "INFO - Minimizing error metric: 1-accuracy\n", + "[flaml.automl: 02-17 13:47:26] {881} INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", + "INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", + "[flaml.automl: 02-17 13:47:26] {940} INFO - iteration 0 current learner RGF\n", + "INFO - iteration 0 current learner RGF\n", + "[flaml.automl: 02-17 13:47:28] {1094} INFO - at 1.4s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "INFO - at 1.4s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "[flaml.automl: 02-17 13:47:28] {940} INFO - iteration 1 current learner RGF\n", + "INFO - iteration 1 current learner RGF\n", + "[flaml.automl: 02-17 13:47:29] {1094} INFO - at 4.0s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "INFO - at 4.0s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "[flaml.automl: 02-17 13:47:29] {940} INFO - iteration 2 current learner RGF\n", + "INFO - iteration 2 current learner RGF\n", + "[flaml.automl: 02-17 13:47:30] {1094} INFO - at 4.9s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "INFO - at 4.9s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "[flaml.automl: 02-17 13:47:30] {940} INFO - iteration 3 current learner lgbm\n", + "INFO - iteration 3 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:32] {1094} INFO - at 5.0s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "INFO - at 5.0s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 02-17 13:47:32] {940} INFO - iteration 4 current learner RGF\n", + "INFO - iteration 4 current learner RGF\n", + "[flaml.automl: 02-17 13:47:35] {1094} INFO - at 7.9s,\tbest RGF's error=0.3658,\tbest RGF's error=0.3658\n", + "INFO - at 7.9s,\tbest RGF's error=0.3658,\tbest RGF's error=0.3658\n", + "[flaml.automl: 02-17 13:47:35] {940} INFO - iteration 5 current learner lgbm\n", + "INFO - iteration 5 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:35] {1094} INFO - at 9.8s,\tbest lgbm's error=0.3777,\tbest RGF's error=0.3658\n", + "INFO - at 9.8s,\tbest lgbm's error=0.3777,\tbest RGF's error=0.3658\n", + "[flaml.automl: 02-17 13:47:35] {940} INFO - iteration 6 current learner lgbm\n", + "INFO - iteration 6 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:35] {1094} INFO - at 9.8s,\tbest lgbm's error=0.3669,\tbest RGF's error=0.3658\n", + "INFO - at 9.8s,\tbest lgbm's error=0.3669,\tbest RGF's error=0.3658\n", + "[flaml.automl: 02-17 13:47:35] {940} INFO - iteration 7 current learner lgbm\n", + "INFO - iteration 7 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:35] {1094} INFO - at 9.9s,\tbest lgbm's error=0.3669,\tbest RGF's error=0.3658\n", + "INFO - at 9.9s,\tbest lgbm's error=0.3669,\tbest RGF's error=0.3658\n", + "[flaml.automl: 02-17 13:47:35] {940} INFO - iteration 8 current learner lgbm\n", + "INFO - iteration 8 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:35] {1094} INFO - at 9.9s,\tbest lgbm's error=0.3662,\tbest RGF's error=0.3658\n", + "INFO - at 9.9s,\tbest lgbm's error=0.3662,\tbest RGF's error=0.3658\n", + "[flaml.automl: 02-17 13:47:35] {940} INFO - iteration 9 current learner lgbm\n", + "INFO - iteration 9 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:37] {1094} INFO - at 10.0s,\tbest lgbm's error=0.3636,\tbest lgbm's error=0.3636\n", + "INFO - at 10.0s,\tbest lgbm's error=0.3636,\tbest lgbm's error=0.3636\n", + "[flaml.automl: 02-17 13:47:37] {940} INFO - iteration 10 current learner lgbm\n", + "INFO - iteration 10 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:39] {1094} INFO - at 12.1s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 12.1s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:39] {940} INFO - iteration 11 current learner lgbm\n", + "INFO - iteration 11 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:39] {1094} INFO - at 14.0s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 14.0s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:39] {940} INFO - iteration 12 current learner lgbm\n", + "INFO - iteration 12 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:39] {1094} INFO - at 14.1s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 14.1s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:39] {940} INFO - iteration 13 current learner lgbm\n", + "INFO - iteration 13 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:39] {1094} INFO - at 14.2s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 14.2s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:39] {940} INFO - iteration 14 current learner lgbm\n", + "INFO - iteration 14 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:40] {1094} INFO - at 14.4s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 14.4s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:40] {940} INFO - iteration 15 current learner lgbm\n", + "INFO - iteration 15 current learner lgbm\n", + "[flaml.automl: 02-17 13:47:41] {1094} INFO - at 16.0s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 16.0s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:41] {940} INFO - iteration 16 current learner xgboost\n", + "INFO - iteration 16 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:41] {1094} INFO - at 16.1s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3621\n", + "INFO - at 16.1s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:41] {940} INFO - iteration 17 current learner RGF\n", + "INFO - iteration 17 current learner RGF\n", + "[flaml.automl: 02-17 13:47:42] {1094} INFO - at 17.1s,\tbest RGF's error=0.3658,\tbest lgbm's error=0.3621\n", + "INFO - at 17.1s,\tbest RGF's error=0.3658,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:42] {940} INFO - iteration 18 current learner xgboost\n", + "INFO - iteration 18 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:42] {1094} INFO - at 17.2s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3621\n", + "INFO - at 17.2s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:42] {940} INFO - iteration 19 current learner xgboost\n", + "INFO - iteration 19 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:42] {1094} INFO - at 17.2s,\tbest xgboost's error=0.3757,\tbest lgbm's error=0.3621\n", + "INFO - at 17.2s,\tbest xgboost's error=0.3757,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:42] {940} INFO - iteration 20 current learner xgboost\n", + "INFO - iteration 20 current learner xgboost\n", + "[flaml.automl: 02-17 13:47:42] {1094} INFO - at 17.3s,\tbest xgboost's error=0.3756,\tbest lgbm's error=0.3621\n", + "INFO - at 17.3s,\tbest xgboost's error=0.3756,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:42] {940} INFO - iteration 21 current learner RGF\n", + "INFO - iteration 21 current learner RGF\n", + "[flaml.automl: 02-17 13:47:44] {1094} INFO - at 18.4s,\tbest RGF's error=0.3658,\tbest lgbm's error=0.3621\n", + "INFO - at 18.4s,\tbest RGF's error=0.3658,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:47:44] {940} INFO - iteration 22 current learner RGF\n", + "INFO - iteration 22 current learner RGF\n", + "[flaml.automl: 02-17 13:48:30] {1094} INFO - at 65.2s,\tbest RGF's error=0.3658,\tbest lgbm's error=0.3621\n", + "INFO - at 65.2s,\tbest RGF's error=0.3658,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:48:30] {1135} INFO - selected model: LGBMClassifier(learning_rate=0.7179196339383696, max_bin=511,\n", + " min_child_weight=2.776007506782275, n_estimators=6, num_leaves=4,\n", + " objective='binary', reg_alpha=9.69511928836042e-10,\n", + " reg_lambda=0.17744769739709204, subsample=0.8746997476758036)\n", + "INFO - selected model: LGBMClassifier(learning_rate=0.7179196339383696, max_bin=511,\n", + " min_child_weight=2.776007506782275, n_estimators=6, num_leaves=4,\n", + " objective='binary', reg_alpha=9.69511928836042e-10,\n", + " reg_lambda=0.17744769739709204, subsample=0.8746997476758036)\n", + "[flaml.automl: 02-17 13:48:30] {895} INFO - fit succeeded\n", + "INFO - fit succeeded\n" ] } ], diff --git a/notebook/flaml_demo.ipynb b/notebook/flaml_demo.ipynb index ff8ccc345c..c330ba78c2 100644 --- a/notebook/flaml_demo.ipynb +++ b/notebook/flaml_demo.ipynb @@ -33,6 +33,68 @@ "```" ] }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting flaml[notebook]\n", + " Downloading FLAML-0.2.3-py3-none-any.whl (77 kB)\n", + "Requirement already satisfied: scipy>=1.4.1 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (1.4.1) WARNING: The script optuna.exe is installed in 'C:\\Users\\chiw\\Miniconda3\\envs\\flaml\\Scripts' which is not on PATH.\n", + " Consider adding this directory to PATH or, if you prefer to suppress this warning, use --no-warn-script-location.\n", + "\n", + "Processing c:\\users\\chiw\\appdata\\local\\pip\\cache\\wheels\\38\\61\\9e\\955ab1890f6cab231b1d756db63f36c711968a324296e0b649\\optuna-2.3.0-py3-none-any.whl\n", + "Requirement already satisfied: xgboost>=0.90 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (1.3.3)\n", + "Requirement already satisfied: catboost>=0.23 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (0.23.2)\n", + "Requirement already satisfied: NumPy>=1.16.2 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (1.18.4)\n", + "Requirement already satisfied: scikit-learn>=0.23.2 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (0.23.2)\n", + "Requirement already satisfied: lightgbm>=2.3.1 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (2.3.1)\n", + "Requirement already satisfied: matplotlib==3.2.0; extra == \"notebook\" in c:\\users\\chiw\\appdata\\roaming\\python\\python37\\site-packages (from flaml[notebook]) (3.2.0)\n", + "Requirement already satisfied: rgf-python; extra == \"notebook\" in c:\\users\\chiw\\appdata\\roaming\\python\\python37\\site-packages (from flaml[notebook]) (3.9.0)\n", + "Requirement already satisfied: openml==0.10.2; extra == \"notebook\" in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (0.10.2)\n", + "Requirement already satisfied: jupyter; extra == \"notebook\" in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from flaml[notebook]) (1.0.0)\n", + "Requirement already satisfied: packaging>=20.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (20.4)\n", + "Requirement already satisfied: alembic in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (1.4.1)\n", + "Requirement already satisfied: sqlalchemy>=1.1.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (1.3.20)\n", + "Requirement already satisfied: tqdm in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (4.56.1)\n", + "Requirement already satisfied: cliff in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (3.5.0)\n", + "Requirement already satisfied: joblib in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (0.14.1)\n", + "Requirement already satisfied: cmaes>=0.6.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (0.7.0)\n", + "Requirement already satisfied: colorlog in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from optuna==2.3.0->flaml[notebook]) (4.6.2)\n", + "Requirement already satisfied: graphviz in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from catboost>=0.23->flaml[notebook]) (0.14.1)\n", + "Requirement already satisfied: plotly in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from catboost>=0.23->flaml[notebook]) (4.9.0)\n", + "Requirement already satisfied: pandas>=0.24.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from catboost>=0.23->flaml[notebook]) (0.24.2)\n", + "Requirement already satisfied: six in c:\\users\\chiw\\appdata\\roaming\\python\\python37\\site-packages (from catboost>=0.23->flaml[notebook]) (1.14.0)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from scikit-learn>=0.23.2->flaml[notebook]) (2.0.0)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from matplotlib==3.2.0; extra == \"notebook\"->flaml[notebook]) (2.4.7)\n", + "Requirement already satisfied: cycler>=0.10 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from matplotlib==3.2.0; extra == \"notebook\"->flaml[notebook]) (0.10.0)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from matplotlib==3.2.0; extra == \"notebook\"->flaml[notebook]) (1.2.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from matplotlib==3.2.0; extra == \"notebook\"->flaml[notebook]) (2.8.1)\n", + "Requirement already satisfied: requests in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from openml==0.10.2; extra == \"notebook\"->flaml[notebook]) (2.25.0)\n", + "Requirement already satisfied: xmltodict in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from openml==0.10.2; extra == \"notebook\"->flaml[notebook]) (0.12.0)\n", + "Requirement already satisfied: liac-arff>=2.4.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from openml==0.10.2; extra == \"notebook\"->flaml[notebook]) (2.4.0)\n", + "Requirement already satisfied: qtconsole in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from jupyter; extra == \"notebook\"->flaml[notebook]) (4.7.7)\n", + "Requirement already satisfied: notebook in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from jupyter; extra == \"notebook\"->flaml[notebook]) (6.1.3)\n", + "Requirement already satisfied: nbconvert in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from jupyter; extra == \"notebook\"->flaml[notebook]) (5.6.1)\n", + "Requirement already satisfied: ipykernel in c:\\users\\chiw\\appdata\\roaming\\python\\python37\\site-packages (from jupyter; extra == \"notebook\"->flaml[notebook]) (5.3.4)\n", + "Requirement already satisfied: jupyter-console in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from jupyter; extra == \"notebook\"->flaml[notebook]) (6.2.0)\n", + "Requirement already satisfied: ipywidgets in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from jupyter; extra == \"notebook\"->flaml[notebook]) (7.5.1)\n", + "Requirement already satisfied: python-editor>=0.3 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from alembic->optuna==2.3.0->flaml[notebook]) (1.0.4)\n", + "Requirement already satisfied: Mako in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from alembic->optuna==2.3.0->flaml[notebook]) (1.1.3)\n", + "Requirement already satisfied: cmd2!=0.8.3,>=0.8.0 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from cliff->optuna==2.3.0->flaml[notebook]) (1.4.0)\n", + "Requirement already satisfied: PrettyTable<0.8,>=0.7.2 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from cliff->optuna==2.3.0->flaml[notebook]) (0.7.2)\n", + "Requirement already satisfied: stevedore>=2.0.1 in c:\\users\\chiw\\miniconda3\\envs\\flaml\\lib\\site-packages (from cliff->optuna==2.3.0->flaml[notebook]) (3.2.2)" + ] + } + ], + "source": [ + "!pip install flaml[notebook];" + ] + }, { "cell_type": "markdown", "metadata": { @@ -49,7 +111,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": { "slideshow": { "slide_type": "subslide" @@ -84,7 +146,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": { "slideshow": { "slide_type": "slide" @@ -99,7 +161,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": { "slideshow": { "slide_type": "slide" @@ -108,87 +170,290 @@ "outputs": [], "source": [ "settings = {\n", - " \"time_budget\": 60, # total running time in seconds\n", + " \"time_budget\": 300, # total running time in seconds\n", " \"metric\": 'accuracy', # primary metrics can be chosen from: ['accuracy','roc_auc','f1','log_loss','mae','mse','r2']\n", - " \"estimator_list\": ['lgbm', 'rf', 'xgboost'], # list of ML learners\n", + " # \"estimator_list\": ['lgbm', 'rf', 'xgboost'], # list of ML learners\n", " \"task\": 'classification', # task type \n", - " \"sample\": False, # whether to subsample training data\n", + " # \"sample\": False, # whether to subsample training data\n", " \"log_file_name\": 'airlines_experiment.log', # cache directory of flaml log files \n", "}" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": { "slideshow": { "slide_type": "slide" }, - "tags": [] + "tags": [ + "outputPrepend" + ] }, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ - "[flaml.automl: 01-31 05:20:44] {816} INFO - Evaluation method: holdout\n", - "[flaml.automl: 01-31 05:20:44] {541} INFO - Using StratifiedKFold\n", - "[flaml.automl: 01-31 05:20:44] {837} INFO - Minimizing error metric: 1-accuracy\n", - "[flaml.automl: 01-31 05:20:44] {857} INFO - List of ML learners in AutoML Run: ['lgbm', 'rf', 'xgboost']\n", - "[flaml.automl: 01-31 05:20:44] {916} INFO - iteration 0 current learner lgbm\n", - "[flaml.automl: 01-31 05:20:45] {1046} INFO - at 0.9s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", - "[flaml.automl: 01-31 05:20:45] {916} INFO - iteration 1 current learner lgbm\n", - "[flaml.automl: 01-31 05:20:45] {1046} INFO - at 1.6s,\tbest lgbm's error=0.3771,\tbest lgbm's error=0.3771\n", - "[flaml.automl: 01-31 05:20:45] {916} INFO - iteration 2 current learner lgbm\n", - "[flaml.automl: 01-31 05:20:46] {1046} INFO - at 2.7s,\tbest lgbm's error=0.3751,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 01-31 05:20:46] {916} INFO - iteration 3 current learner xgboost\n", - "[flaml.automl: 01-31 05:20:49] {1046} INFO - at 5.8s,\tbest xgboost's error=0.3753,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 01-31 05:20:49] {916} INFO - iteration 4 current learner rf\n", - "[flaml.automl: 01-31 05:20:57] {1046} INFO - at 13.5s,\tbest rf's error=0.3850,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 01-31 05:20:57] {916} INFO - iteration 5 current learner lgbm\n", - "[flaml.automl: 01-31 05:20:58] {1046} INFO - at 14.5s,\tbest lgbm's error=0.3751,\tbest lgbm's error=0.3751\n", - "[flaml.automl: 01-31 05:20:58] {916} INFO - iteration 6 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:00] {1046} INFO - at 16.2s,\tbest lgbm's error=0.3558,\tbest lgbm's error=0.3558\n", - "[flaml.automl: 01-31 05:21:00] {916} INFO - iteration 7 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:01] {1046} INFO - at 17.8s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", - "[flaml.automl: 01-31 05:21:01] {916} INFO - iteration 8 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:03] {1046} INFO - at 19.2s,\tbest lgbm's error=0.3492,\tbest lgbm's error=0.3492\n", - "[flaml.automl: 01-31 05:21:03] {916} INFO - iteration 9 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:05] {1046} INFO - at 20.9s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 01-31 05:21:05] {916} INFO - iteration 10 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:06] {1046} INFO - at 22.4s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 01-31 05:21:06] {916} INFO - iteration 11 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:08] {1046} INFO - at 23.9s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 01-31 05:21:08] {916} INFO - iteration 12 current learner rf\n", - "[flaml.automl: 01-31 05:21:12] {1046} INFO - at 28.8s,\tbest rf's error=0.3843,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 01-31 05:21:12] {916} INFO - iteration 13 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:14] {1046} INFO - at 30.3s,\tbest lgbm's error=0.3470,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 01-31 05:21:14] {916} INFO - iteration 14 current learner xgboost\n", - "[flaml.automl: 01-31 05:21:16] {1046} INFO - at 32.0s,\tbest xgboost's error=0.3753,\tbest lgbm's error=0.3470\n", - "[flaml.automl: 01-31 05:21:16] {916} INFO - iteration 15 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:19] {1046} INFO - at 35.0s,\tbest lgbm's error=0.3412,\tbest lgbm's error=0.3412\n", - "[flaml.automl: 01-31 05:21:19] {916} INFO - iteration 16 current learner xgboost\n", - "[flaml.automl: 01-31 05:21:20] {1046} INFO - at 36.8s,\tbest xgboost's error=0.3753,\tbest lgbm's error=0.3412\n", - "[flaml.automl: 01-31 05:21:20] {916} INFO - iteration 17 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:24] {1046} INFO - at 40.6s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", - "[flaml.automl: 01-31 05:21:24] {916} INFO - iteration 18 current learner xgboost\n", - "[flaml.automl: 01-31 05:21:26] {1046} INFO - at 42.3s,\tbest xgboost's error=0.3750,\tbest lgbm's error=0.3374\n", - "[flaml.automl: 01-31 05:21:26] {916} INFO - iteration 19 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:27] {1046} INFO - at 43.6s,\tbest lgbm's error=0.3374,\tbest lgbm's error=0.3374\n", - "[flaml.automl: 01-31 05:21:27] {916} INFO - iteration 20 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:35] {1046} INFO - at 51.5s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", - "[flaml.automl: 01-31 05:21:35] {916} INFO - iteration 21 current learner lgbm\n", - "[flaml.automl: 01-31 05:21:41] {1046} INFO - at 56.9s,\tbest lgbm's error=0.3311,\tbest lgbm's error=0.3311\n", - "[flaml.automl: 01-31 05:21:41] {916} INFO - iteration 22 current learner rf\n", - "[flaml.automl: 01-31 05:21:41] {1048} INFO - no enough budget for learner rf\n", - "[flaml.automl: 01-31 05:21:41] {916} INFO - iteration 23 current learner xgboost\n", - "[flaml.automl: 01-31 05:21:41] {1048} INFO - no enough budget for learner xgboost\n", - "[flaml.automl: 01-31 05:21:41] {1086} INFO - selected model: LGBMClassifier(colsample_bytree=0.9997863921359742,\n", - " learning_rate=0.1564464373197609, max_bin=511,\n", - " min_child_weight=7.427173668000723, n_estimators=18,\n", - " num_leaves=1846, objective='binary',\n", - " reg_alpha=6.349231150788211e-09, reg_lambda=0.8927146483558472)\n", - "[flaml.automl: 01-31 05:21:41] {871} INFO - fit succeeded\n" + "error=0.3600,\tbest catboost's error=0.3600\n", + "[flaml.automl: 02-17 13:53:08] {939} INFO - iteration 22 current learner catboost\n", + "INFO - iteration 22 current learner catboost\n", + "[flaml.automl: 02-17 13:53:10] {1093} INFO - at 11.5s,\tbest catboost's error=0.3600,\tbest catboost's error=0.3600\n", + "INFO - at 11.5s,\tbest catboost's error=0.3600,\tbest catboost's error=0.3600\n", + "[flaml.automl: 02-17 13:53:10] {939} INFO - iteration 23 current learner rf\n", + "INFO - iteration 23 current learner rf\n", + "[flaml.automl: 02-17 13:53:10] {1093} INFO - at 12.0s,\tbest rf's error=0.4000,\tbest catboost's error=0.3600\n", + "INFO - at 12.0s,\tbest rf's error=0.4000,\tbest catboost's error=0.3600\n", + "[flaml.automl: 02-17 13:53:10] {939} INFO - iteration 24 current learner catboost\n", + "INFO - iteration 24 current learner catboost\n", + "[flaml.automl: 02-17 13:53:11] {1093} INFO - at 12.7s,\tbest catboost's error=0.3599,\tbest catboost's error=0.3599\n", + "INFO - at 12.7s,\tbest catboost's error=0.3599,\tbest catboost's error=0.3599\n", + "[flaml.automl: 02-17 13:53:11] {939} INFO - iteration 25 current learner xgboost\n", + "INFO - iteration 25 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:11] {1093} INFO - at 12.9s,\tbest xgboost's error=0.3787,\tbest catboost's error=0.3599\n", + "INFO - at 12.9s,\tbest xgboost's error=0.3787,\tbest catboost's error=0.3599\n", + "[flaml.automl: 02-17 13:53:11] {939} INFO - iteration 26 current learner extra_tree\n", + "INFO - iteration 26 current learner extra_tree\n", + "[flaml.automl: 02-17 13:53:12] {1093} INFO - at 13.4s,\tbest extra_tree's error=0.3967,\tbest catboost's error=0.3599\n", + "INFO - at 13.4s,\tbest extra_tree's error=0.3967,\tbest catboost's error=0.3599\n", + "[flaml.automl: 02-17 13:53:12] {939} INFO - iteration 27 current learner catboost\n", + "INFO - iteration 27 current learner catboost\n", + "[flaml.automl: 02-17 13:53:13] {1093} INFO - at 14.2s,\tbest catboost's error=0.3598,\tbest catboost's error=0.3598\n", + "INFO - at 14.2s,\tbest catboost's error=0.3598,\tbest catboost's error=0.3598\n", + "[flaml.automl: 02-17 13:53:13] {939} INFO - iteration 28 current learner xgboost\n", + "INFO - iteration 28 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:13] {1093} INFO - at 14.4s,\tbest xgboost's error=0.3757,\tbest catboost's error=0.3598\n", + "INFO - at 14.4s,\tbest xgboost's error=0.3757,\tbest catboost's error=0.3598\n", + "[flaml.automl: 02-17 13:53:13] {939} INFO - iteration 29 current learner xgboost\n", + "INFO - iteration 29 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:13] {1093} INFO - at 14.4s,\tbest xgboost's error=0.3756,\tbest catboost's error=0.3598\n", + "INFO - at 14.4s,\tbest xgboost's error=0.3756,\tbest catboost's error=0.3598\n", + "[flaml.automl: 02-17 13:53:13] {939} INFO - iteration 30 current learner catboost\n", + "INFO - iteration 30 current learner catboost\n", + "[flaml.automl: 02-17 13:53:13] {1093} INFO - at 15.1s,\tbest catboost's error=0.3598,\tbest catboost's error=0.3598\n", + "INFO - at 15.1s,\tbest catboost's error=0.3598,\tbest catboost's error=0.3598\n", + "[flaml.automl: 02-17 13:53:13] {939} INFO - iteration 31 current learner lgbm\n", + "INFO - iteration 31 current learner lgbm\n", + "[flaml.automl: 02-17 13:53:14] {1093} INFO - at 16.0s,\tbest lgbm's error=0.3618,\tbest catboost's error=0.3598\n", + "INFO - at 16.0s,\tbest lgbm's error=0.3618,\tbest catboost's error=0.3598\n", + "[flaml.automl: 02-17 13:53:14] {939} INFO - iteration 32 current learner catboost\n", + "INFO - iteration 32 current learner catboost\n", + "[flaml.automl: 02-17 13:53:15] {1093} INFO - at 17.2s,\tbest catboost's error=0.3598,\tbest catboost's error=0.3598\n", + "INFO - at 17.2s,\tbest catboost's error=0.3598,\tbest catboost's error=0.3598\n", + "[flaml.automl: 02-17 13:53:15] {939} INFO - iteration 33 current learner catboost\n", + "INFO - iteration 33 current learner catboost\n", + "[flaml.automl: 02-17 13:53:17] {1093} INFO - at 19.0s,\tbest catboost's error=0.3592,\tbest catboost's error=0.3592\n", + "INFO - at 19.0s,\tbest catboost's error=0.3592,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:17] {939} INFO - iteration 34 current learner xgboost\n", + "INFO - iteration 34 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:17] {1093} INFO - at 19.2s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "INFO - at 19.2s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:17] {939} INFO - iteration 35 current learner xgboost\n", + "INFO - iteration 35 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:18] {1093} INFO - at 19.4s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "INFO - at 19.4s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:18] {939} INFO - iteration 36 current learner xgboost\n", + "INFO - iteration 36 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:18] {1093} INFO - at 19.5s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "INFO - at 19.5s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:18] {939} INFO - iteration 37 current learner xgboost\n", + "INFO - iteration 37 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:18] {1093} INFO - at 19.7s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "INFO - at 19.7s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:18] {939} INFO - iteration 38 current learner xgboost\n", + "INFO - iteration 38 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:18] {1093} INFO - at 19.9s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "INFO - at 19.9s,\tbest xgboost's error=0.3620,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:18] {939} INFO - iteration 39 current learner xgboost\n", + "INFO - iteration 39 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:18] {1093} INFO - at 20.2s,\tbest xgboost's error=0.3598,\tbest catboost's error=0.3592\n", + "INFO - at 20.2s,\tbest xgboost's error=0.3598,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:18] {939} INFO - iteration 40 current learner xgboost\n", + "INFO - iteration 40 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:19] {1093} INFO - at 20.4s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 20.4s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:19] {939} INFO - iteration 41 current learner xgboost\n", + "INFO - iteration 41 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:19] {1093} INFO - at 20.6s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 20.6s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:19] {939} INFO - iteration 42 current learner xgboost\n", + "INFO - iteration 42 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:19] {1093} INFO - at 21.0s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 21.0s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:19] {939} INFO - iteration 43 current learner catboost\n", + "INFO - iteration 43 current learner catboost\n", + "[flaml.automl: 02-17 13:53:20] {1093} INFO - at 22.1s,\tbest catboost's error=0.3592,\tbest catboost's error=0.3592\n", + "INFO - at 22.1s,\tbest catboost's error=0.3592,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:20] {939} INFO - iteration 44 current learner xgboost\n", + "INFO - iteration 44 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:21] {1093} INFO - at 22.3s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 22.3s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:21] {939} INFO - iteration 45 current learner extra_tree\n", + "INFO - iteration 45 current learner extra_tree\n", + "[flaml.automl: 02-17 13:53:21] {1093} INFO - at 22.8s,\tbest extra_tree's error=0.3915,\tbest catboost's error=0.3592\n", + "INFO - at 22.8s,\tbest extra_tree's error=0.3915,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:21] {939} INFO - iteration 46 current learner xgboost\n", + "INFO - iteration 46 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:21] {1093} INFO - at 23.1s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 23.1s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:21] {939} INFO - iteration 47 current learner xgboost\n", + "INFO - iteration 47 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:23] {1093} INFO - at 24.3s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 24.3s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:23] {939} INFO - iteration 48 current learner xgboost\n", + "INFO - iteration 48 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:24] {1093} INFO - at 25.6s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 25.6s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:24] {939} INFO - iteration 49 current learner catboost\n", + "INFO - iteration 49 current learner catboost\n", + "[flaml.automl: 02-17 13:53:25] {1093} INFO - at 26.8s,\tbest catboost's error=0.3592,\tbest catboost's error=0.3592\n", + "INFO - at 26.8s,\tbest catboost's error=0.3592,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:25] {939} INFO - iteration 50 current learner xgboost\n", + "INFO - iteration 50 current learner xgboost\n", + "[flaml.automl: 02-17 13:53:26] {1093} INFO - at 27.6s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "INFO - at 27.6s,\tbest xgboost's error=0.3593,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:26] {939} INFO - iteration 51 current learner extra_tree\n", + "INFO - iteration 51 current learner extra_tree\n", + "[flaml.automl: 02-17 13:53:26] {1093} INFO - at 28.2s,\tbest extra_tree's error=0.3910,\tbest catboost's error=0.3592\n", + "INFO - at 28.2s,\tbest extra_tree's error=0.3910,\tbest catboost's error=0.3592\n", + "[flaml.automl: 02-17 13:53:26] {939} INFO - iteration 52 current learner catboost\n", + "INFO - iteration 52 current learner catboost\n", + "[flaml.automl: 02-17 13:53:32] {1093} INFO - at 34.1s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "INFO - at 34.1s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:32] {939} INFO - iteration 53 current learner catboost\n", + "INFO - iteration 53 current learner catboost\n", + "[flaml.automl: 02-17 13:53:34] {1093} INFO - at 36.0s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "INFO - at 36.0s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:34] {939} INFO - iteration 54 current learner catboost\n", + "INFO - iteration 54 current learner catboost\n", + "[flaml.automl: 02-17 13:53:42] {1093} INFO - at 43.7s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "INFO - at 43.7s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:42] {939} INFO - iteration 55 current learner lrl1\n", + "INFO - iteration 55 current learner lrl1\n", + "[flaml.automl: 02-17 13:53:42] {1093} INFO - at 44.1s,\tbest lrl1's error=0.4338,\tbest catboost's error=0.3553\n", + "INFO - at 44.1s,\tbest lrl1's error=0.4338,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:42] {939} INFO - iteration 56 current learner catboost\n", + "INFO - iteration 56 current learner catboost\n", + "[flaml.automl: 02-17 13:53:47] {1093} INFO - at 48.3s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "INFO - at 48.3s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:47] {939} INFO - iteration 57 current learner lrl1\n", + "INFO - iteration 57 current learner lrl1\n", + "[flaml.automl: 02-17 13:53:47] {1093} INFO - at 48.7s,\tbest lrl1's error=0.4338,\tbest catboost's error=0.3553\n", + "INFO - at 48.7s,\tbest lrl1's error=0.4338,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:47] {939} INFO - iteration 58 current learner lrl1\n", + "INFO - iteration 58 current learner lrl1\n", + "[flaml.automl: 02-17 13:53:47] {1093} INFO - at 49.0s,\tbest lrl1's error=0.4338,\tbest catboost's error=0.3553\n", + "INFO - at 49.0s,\tbest lrl1's error=0.4338,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:47] {939} INFO - iteration 59 current learner catboost\n", + "INFO - iteration 59 current learner catboost\n", + "[flaml.automl: 02-17 13:53:54] {1093} INFO - at 55.4s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "INFO - at 55.4s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:53:54] {939} INFO - iteration 60 current learner catboost\n", + "INFO - iteration 60 current learner catboost\n", + "[flaml.automl: 02-17 13:54:00] {1093} INFO - at 61.8s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "INFO - at 61.8s,\tbest catboost's error=0.3553,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:54:00] {939} INFO - iteration 61 current learner lgbm\n", + "INFO - iteration 61 current learner lgbm\n", + "[flaml.automl: 02-17 13:54:01] {1093} INFO - at 62.6s,\tbest lgbm's error=0.3618,\tbest catboost's error=0.3553\n", + "INFO - at 62.6s,\tbest lgbm's error=0.3618,\tbest catboost's error=0.3553\n", + "[flaml.automl: 02-17 13:54:01] {939} INFO - iteration 62 current learner catboost\n", + "INFO - iteration 62 current learner catboost\n", + "[flaml.automl: 02-17 13:54:40] {1093} INFO - at 101.8s,\tbest catboost's error=0.3476,\tbest catboost's error=0.3476\n", + "INFO - at 101.8s,\tbest catboost's error=0.3476,\tbest catboost's error=0.3476\n", + "[flaml.automl: 02-17 13:54:40] {939} INFO - iteration 63 current learner catboost\n", + "INFO - iteration 63 current learner catboost\n", + "[flaml.automl: 02-17 13:54:48] {1093} INFO - at 109.9s,\tbest catboost's error=0.3476,\tbest catboost's error=0.3476\n", + "INFO - at 109.9s,\tbest catboost's error=0.3476,\tbest catboost's error=0.3476\n", + "[flaml.automl: 02-17 13:54:48] {939} INFO - iteration 64 current learner xgboost\n", + "INFO - iteration 64 current learner xgboost\n", + "[flaml.automl: 02-17 13:54:50] {1093} INFO - at 112.0s,\tbest xgboost's error=0.3424,\tbest xgboost's error=0.3424\n", + "INFO - at 112.0s,\tbest xgboost's error=0.3424,\tbest xgboost's error=0.3424\n", + "[flaml.automl: 02-17 13:54:50] {939} INFO - iteration 65 current learner xgboost\n", + "INFO - iteration 65 current learner xgboost\n", + "[flaml.automl: 02-17 13:54:56] {1093} INFO - at 117.6s,\tbest xgboost's error=0.3424,\tbest xgboost's error=0.3424\n", + "INFO - at 117.6s,\tbest xgboost's error=0.3424,\tbest xgboost's error=0.3424\n", + "[flaml.automl: 02-17 13:54:56] {939} INFO - iteration 66 current learner xgboost\n", + "INFO - iteration 66 current learner xgboost\n", + "[flaml.automl: 02-17 13:55:03] {1093} INFO - at 125.1s,\tbest xgboost's error=0.3400,\tbest xgboost's error=0.3400\n", + "INFO - at 125.1s,\tbest xgboost's error=0.3400,\tbest xgboost's error=0.3400\n", + "[flaml.automl: 02-17 13:55:03] {939} INFO - iteration 67 current learner xgboost\n", + "INFO - iteration 67 current learner xgboost\n", + "[flaml.automl: 02-17 13:55:06] {1093} INFO - at 127.4s,\tbest xgboost's error=0.3400,\tbest xgboost's error=0.3400\n", + "INFO - at 127.4s,\tbest xgboost's error=0.3400,\tbest xgboost's error=0.3400\n", + "[flaml.automl: 02-17 13:55:06] {939} INFO - iteration 68 current learner xgboost\n", + "INFO - iteration 68 current learner xgboost\n", + "[flaml.automl: 02-17 13:55:20] {1093} INFO - at 141.8s,\tbest xgboost's error=0.3366,\tbest xgboost's error=0.3366\n", + "INFO - at 141.8s,\tbest xgboost's error=0.3366,\tbest xgboost's error=0.3366\n", + "[flaml.automl: 02-17 13:55:20] {939} INFO - iteration 69 current learner xgboost\n", + "INFO - iteration 69 current learner xgboost\n", + "[flaml.automl: 02-17 13:55:25] {1093} INFO - at 147.0s,\tbest xgboost's error=0.3366,\tbest xgboost's error=0.3366\n", + "INFO - at 147.0s,\tbest xgboost's error=0.3366,\tbest xgboost's error=0.3366\n", + "[flaml.automl: 02-17 13:55:25] {939} INFO - iteration 70 current learner catboost\n", + "INFO - iteration 70 current learner catboost\n", + "[flaml.automl: 02-17 13:56:11] {1093} INFO - at 192.7s,\tbest catboost's error=0.3476,\tbest xgboost's error=0.3366\n", + "INFO - at 192.7s,\tbest catboost's error=0.3476,\tbest xgboost's error=0.3366\n", + "[flaml.automl: 02-17 13:56:11] {939} INFO - iteration 71 current learner xgboost\n", + "INFO - iteration 71 current learner xgboost\n", + "[flaml.automl: 02-17 13:56:29] {1093} INFO - at 210.7s,\tbest xgboost's error=0.3317,\tbest xgboost's error=0.3317\n", + "INFO - at 210.7s,\tbest xgboost's error=0.3317,\tbest xgboost's error=0.3317\n", + "[flaml.automl: 02-17 13:56:29] {939} INFO - iteration 72 current learner xgboost\n", + "INFO - iteration 72 current learner xgboost\n", + "[flaml.automl: 02-17 13:56:59] {1093} INFO - at 240.5s,\tbest xgboost's error=0.3268,\tbest xgboost's error=0.3268\n", + "INFO - at 240.5s,\tbest xgboost's error=0.3268,\tbest xgboost's error=0.3268\n", + "[flaml.automl: 02-17 13:56:59] {939} INFO - iteration 73 current learner xgboost\n", + "INFO - iteration 73 current learner xgboost\n", + "[flaml.automl: 02-17 13:57:14] {1093} INFO - at 255.9s,\tbest xgboost's error=0.3268,\tbest xgboost's error=0.3268\n", + "INFO - at 255.9s,\tbest xgboost's error=0.3268,\tbest xgboost's error=0.3268\n", + "[flaml.automl: 02-17 13:57:32] {1109} INFO - retrain xgboost for 18.0s\n", + "INFO - retrain xgboost for 18.0s\n", + "[flaml.automl: 02-17 13:57:32] {939} INFO - iteration 74 current learner extra_tree\n", + "INFO - iteration 74 current learner extra_tree\n", + "[flaml.automl: 02-17 13:57:32] {1093} INFO - at 274.2s,\tbest extra_tree's error=0.3910,\tbest xgboost's error=0.3268\n", + "INFO - at 274.2s,\tbest extra_tree's error=0.3910,\tbest xgboost's error=0.3268\n", + "[flaml.automl: 02-17 13:57:46] {1109} INFO - retrain extra_tree for 13.2s\n", + "INFO - retrain extra_tree for 13.2s\n", + "[flaml.automl: 02-17 13:57:46] {939} INFO - iteration 75 current learner extra_tree\n", + "INFO - iteration 75 current learner extra_tree\n", + "[flaml.automl: 02-17 13:57:46] {1093} INFO - at 287.8s,\tbest extra_tree's error=0.3910,\tbest xgboost's error=0.3268\n", + "INFO - at 287.8s,\tbest extra_tree's error=0.3910,\tbest xgboost's error=0.3268\n", + "[flaml.automl: 02-17 13:57:52] {1109} INFO - retrain extra_tree for 5.9s\n", + "INFO - retrain extra_tree for 5.9s\n", + "[flaml.automl: 02-17 13:57:52] {939} INFO - iteration 76 current learner extra_tree\n", + "INFO - iteration 76 current learner extra_tree\n", + "[flaml.automl: 02-17 13:57:52] {1093} INFO - at 293.9s,\tbest extra_tree's error=0.3910,\tbest xgboost's error=0.3268\n", + "INFO - at 293.9s,\tbest extra_tree's error=0.3910,\tbest xgboost's error=0.3268\n", + "[flaml.automl: 02-17 13:57:56] {1109} INFO - retrain extra_tree for 3.8s\n", + "INFO - retrain extra_tree for 3.8s\n", + "[flaml.automl: 02-17 13:57:56] {939} INFO - iteration 77 current learner lgbm\n", + "INFO - iteration 77 current learner lgbm\n", + "[flaml.automl: 02-17 13:57:57] {1093} INFO - at 299.0s,\tbest lgbm's error=0.3563,\tbest xgboost's error=0.3268\n", + "INFO - at 299.0s,\tbest lgbm's error=0.3563,\tbest xgboost's error=0.3268\n", + "[flaml.automl: 02-17 13:57:58] {1109} INFO - retrain lgbm for 0.9s\n", + "INFO - retrain lgbm for 0.9s\n", + "[flaml.automl: 02-17 13:57:58] {1133} INFO - selected model: XGBClassifier(base_score=0.5, booster='gbtree',\n", + " colsample_bylevel=0.8909660754557278, colsample_bynode=1,\n", + " colsample_bytree=0.9330310727361396, gamma=0, gpu_id=-1,\n", + " grow_policy='lossguide', importance_type='gain',\n", + " interaction_constraints='', learning_rate=0.16464534671449255,\n", + " max_delta_step=0, max_depth=0, max_leaves=28,\n", + " min_child_weight=20.0, missing=nan, monotone_constraints='()',\n", + " n_estimators=1221, n_jobs=-1, num_parallel_tree=1, random_state=0,\n", + " reg_alpha=1e-10, reg_lambda=0.003747467958239166,\n", + " scale_pos_weight=1, subsample=1.0, tree_method='hist',\n", + " validate_parameters=1, verbosity=0)\n", + "INFO - selected model: XGBClassifier(base_score=0.5, booster='gbtree',\n", + " colsample_bylevel=0.8909660754557278, colsample_bynode=1,\n", + " colsample_bytree=0.9330310727361396, gamma=0, gpu_id=-1,\n", + " grow_policy='lossguide', importance_type='gain',\n", + " interaction_constraints='', learning_rate=0.16464534671449255,\n", + " max_delta_step=0, max_depth=0, max_leaves=28,\n", + " min_child_weight=20.0, missing=nan, monotone_constraints='()',\n", + " n_estimators=1221, n_jobs=-1, num_parallel_tree=1, random_state=0,\n", + " reg_alpha=1e-10, reg_lambda=0.003747467958239166,\n", + " scale_pos_weight=1, subsample=1.0, tree_method='hist',\n", + " validate_parameters=1, verbosity=0)\n", + "[flaml.automl: 02-17 13:57:58] {894} INFO - fit succeeded\n", + "INFO - fit succeeded\n" ] } ], @@ -210,7 +475,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": { "slideshow": { "slide_type": "slide" @@ -222,7 +487,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "Best ML leaner: lgbm\nBest hyperparmeter config: {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}\nBest accuracy on validation data: 0.6689\nTraining duration of best run: 7.89 s\n" + "Best ML leaner: xgboost\nBest hyperparmeter config: {'n_estimators': 1389.0, 'max_leaves': 28.0, 'min_child_weight': 20.0, 'learning_rate': 0.16464534671449255, 'subsample': 1.0, 'colsample_bylevel': 0.8909660754557278, 'colsample_bytree': 0.9330310727361396, 'reg_alpha': 1e-10, 'reg_lambda': 0.003747467958239166, 'FLAML_sample_size': 364083}\nBest accuracy on validation data: 0.6732\nTraining duration of best run: 29.74 s\n" ] } ], @@ -236,7 +501,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": { "slideshow": { "slide_type": "slide" @@ -247,15 +512,21 @@ "output_type": "execute_result", "data": { "text/plain": [ - "LGBMClassifier(colsample_bytree=0.9997863921359742,\n", - " learning_rate=0.1564464373197609, max_bin=511,\n", - " min_child_weight=7.427173668000723, n_estimators=18,\n", - " num_leaves=1846, objective='binary',\n", - " reg_alpha=6.349231150788211e-09, reg_lambda=0.8927146483558472)" + "XGBClassifier(base_score=0.5, booster='gbtree',\n", + " colsample_bylevel=0.8909660754557278, colsample_bynode=1,\n", + " colsample_bytree=0.9330310727361396, gamma=0, gpu_id=-1,\n", + " grow_policy='lossguide', importance_type='gain',\n", + " interaction_constraints='', learning_rate=0.16464534671449255,\n", + " max_delta_step=0, max_depth=0, max_leaves=28,\n", + " min_child_weight=20.0, missing=nan, monotone_constraints='()',\n", + " n_estimators=1221, n_jobs=-1, num_parallel_tree=1, random_state=0,\n", + " reg_alpha=1e-10, reg_lambda=0.003747467958239166,\n", + " scale_pos_weight=1, subsample=1.0, tree_method='hist',\n", + " validate_parameters=1, verbosity=0)" ] }, "metadata": {}, - "execution_count": 6 + "execution_count": 7 } ], "source": [ @@ -264,7 +535,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": { "slideshow": { "slide_type": "slide" @@ -280,7 +551,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": { "slideshow": { "slide_type": "slide" @@ -306,7 +577,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": { "slideshow": { "slide_type": "slide" @@ -318,10 +589,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "accuracy = 0.6681918633107397\n", - "roc_auc = 0.7208412179342409\n", - "log_loss = 0.6064652793713222\n", - "f1 = 0.5838518559855651\n" + "accuracy = 0.6721222728149148\nroc_auc = 0.7252473500166565\nlog_loss = 0.6035663268278709\nf1 = 0.5905710872605036\n" ] } ], @@ -342,12 +610,14 @@ } }, "source": [ + "See Section 4 for an accuracy comparison with default LightGBM and XGBoost.\n", + "\n", "### Log history" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": { "slideshow": { "slide_type": "subslide" @@ -359,7 +629,7 @@ "output_type": "stream", "name": "stdout", "text": [ - "{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 51.0, 'min_child_weight': 20.0, 'learning_rate': 0.8834537640176922, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.9837052481490312, 'reg_alpha': 4.482246955743696e-08, 'reg_lambda': 0.028657570201141073}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 7.0, 'max_leaves': 51.0, 'min_child_weight': 20.0, 'learning_rate': 0.8834537640176922, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.9837052481490312, 'reg_alpha': 4.482246955743696e-08, 'reg_lambda': 0.028657570201141073}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 15.0, 'max_leaves': 165.0, 'min_child_weight': 11.09973081317571, 'learning_rate': 1.0, 'subsample': 0.9847553005974036, 'log_max_bin': 9.0, 'colsample_bytree': 0.9508927355861483, 'reg_alpha': 2.031936014930936e-06, 'reg_lambda': 0.00624701632609755}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 15.0, 'max_leaves': 165.0, 'min_child_weight': 11.09973081317571, 'learning_rate': 1.0, 'subsample': 0.9847553005974036, 'log_max_bin': 9.0, 'colsample_bytree': 0.9508927355861483, 'reg_alpha': 2.031936014930936e-06, 'reg_lambda': 0.00624701632609755}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 1073.0, 'min_child_weight': 5.630999649172112, 'learning_rate': 0.32864729892819683, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.99236562733598, 'reg_alpha': 1.978160373587824e-09, 'reg_lambda': 1.0}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 1073.0, 'min_child_weight': 5.630999649172112, 'learning_rate': 0.32864729892819683, 'subsample': 1.0, 'log_max_bin': 10.0, 'colsample_bytree': 0.99236562733598, 'reg_alpha': 1.978160373587824e-09, 'reg_lambda': 1.0}}\n{'Current Learner': 'lgbm', 'Current Sample': 364083, 'Current Hyper-parameters': {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 18.0, 'max_leaves': 1846.0, 'min_child_weight': 7.427173668000723, 'learning_rate': 0.1564464373197609, 'subsample': 1.0, 'log_max_bin': 9.0, 'colsample_bytree': 0.9997863921359742, 'reg_alpha': 6.349231150788211e-09, 'reg_lambda': 0.8927146483558472}}\n" + "{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4, 'max_leaves': 4, 'min_child_weight': 20.0, 'learning_rate': 0.1, 'subsample': 1.0, 'log_max_bin': 8, 'colsample_bytree': 1.0, 'reg_alpha': 1e-10, 'reg_lambda': 1.0, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 4.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 0.46335414315327306, 'subsample': 0.9339389930838808, 'log_max_bin': 10.0, 'colsample_bytree': 0.9904286645657556, 'reg_alpha': 2.841147337412889e-10, 'reg_lambda': 0.12000833497054482, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 23.0, 'max_leaves': 4.0, 'min_child_weight': 20.0, 'learning_rate': 1.0, 'subsample': 0.9917683183663918, 'log_max_bin': 10.0, 'colsample_bytree': 0.9858892907525497, 'reg_alpha': 3.8783982645515837e-10, 'reg_lambda': 0.36607431863072826, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 11.0, 'max_leaves': 17.0, 'min_child_weight': 14.947587304572773, 'learning_rate': 0.6092558236172073, 'subsample': 0.9659256891661986, 'log_max_bin': 10.0, 'colsample_bytree': 1.0, 'reg_alpha': 3.816590663384559e-08, 'reg_lambda': 0.4482946615262561, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'lgbm', 'Current Sample': 10000, 'Current Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 4.0, 'min_child_weight': 2.776007506782275, 'learning_rate': 0.7179196339383696, 'subsample': 0.8746997476758036, 'log_max_bin': 9.0, 'colsample_bytree': 1.0, 'reg_alpha': 9.69511928836042e-10, 'reg_lambda': 0.17744769739709204, 'FLAML_sample_size': 10000}, 'Best Learner': 'lgbm', 'Best Hyper-parameters': {'n_estimators': 6.0, 'max_leaves': 4.0, 'min_child_weight': 2.776007506782275, 'learning_rate': 0.7179196339383696, 'subsample': 0.8746997476758036, 'log_max_bin': 9.0, 'colsample_bytree': 1.0, 'reg_alpha': 9.69511928836042e-10, 'reg_lambda': 0.17744769739709204, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'catboost', 'Current Sample': 10000, 'Current Hyper-parameters': {'early_stopping_rounds': 10, 'learning_rate': 0.1, 'FLAML_sample_size': 10000}, 'Best Learner': 'catboost', 'Best Hyper-parameters': {'early_stopping_rounds': 10, 'learning_rate': 0.1, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'catboost', 'Current Sample': 10000, 'Current Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.2, 'FLAML_sample_size': 10000}, 'Best Learner': 'catboost', 'Best Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.2, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'catboost', 'Current Sample': 10000, 'Current Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.09293377774381106, 'FLAML_sample_size': 10000}, 'Best Learner': 'catboost', 'Best Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.09293377774381106, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'catboost', 'Current Sample': 10000, 'Current Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.042438101461189835, 'FLAML_sample_size': 10000}, 'Best Learner': 'catboost', 'Best Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.042438101461189835, 'FLAML_sample_size': 10000}}\n{'Current Learner': 'catboost', 'Current Sample': 40000, 'Current Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.042438101461189835, 'FLAML_sample_size': 40000}, 'Best Learner': 'catboost', 'Best Hyper-parameters': {'early_stopping_rounds': 11.0, 'learning_rate': 0.042438101461189835, 'FLAML_sample_size': 40000}}\n" ] } ], @@ -374,7 +644,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": { "slideshow": { "slide_type": "slide" @@ -385,8 +655,8 @@ "output_type": "display_data", "data": { "text/plain": "
", - "image/svg+xml": "\r\n\r\n\r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n", - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3deXxV1bn/8c9DCBDmMcyTMglOYASsI6gFWwdsbSs41AnqbW1t7aVXe29va3u9HfjdttZqLeA8VlvEoQq1ZbBVZhkEJRiQIQRMAoQhJGR6fn/sHTyEk3CAnJzk5Pt+vfJK9trrnP1shvNkrbXXWubuiIiIVNUk0QGIiEj9pAQhIiJRKUGIiEhUShAiIhKVEoSIiESlBCEiIlEpQYicADO70MwyEx2HSDwpQUiDY2abzeyyRMbg7v9098Hxen8zG2dm75jZfjPLM7OFZnZ1vK4nEo0ShEgUZpaSwGtfB7wMPA30AroC/w1cdQLvZWam/+dyQvQPR5KGmTUxs3vNbKOZ7TKzl8ysY8T5l81sp5ntDX87HxZx7kkz+4OZvWlmhcCYsKXy72a2JnzNn8ysRVj/EjPLjnh9tXXD8z8wsx1mlmNmd5iZm9mAKPdgwK+Bn7n7THff6+4V7r7Q3SeHdX5iZs9GvKZf+H5Nw+MFZvaAmb0LHAR+aGbLq1zne2b2WvhzczP7f2a21cw+NbNHzSztJP86JAkoQUgy+Q4wAbgY6AHsAR6OOP8WMBBIB94Hnqvy+knAA0Ab4F9h2VeB8UB/4EzglhquH7WumY0H7gEuAwaE8VVnMNAb+HMNdWJxEzCF4F4eAgab2cCI85OA58OffwkMAs4O4+tJ0GKRRk4JQpLJN4D/dPdsdz8E/AS4rvI3a3d/3N33R5w7y8zaRbz+VXd/N/yNvTgs+52757j7buB1gg/R6lRX96vAE+6+zt0PAvfX8B6dwu87Yr7r6J4Mr1fm7nuBV4GJAGGiGAK8FrZYJgPfc/fd7r4f+F/g+pO8viQBJQhJJn2BV8yswMwKgI+AcqCrmaWY2S/C7qd9wObwNZ0jXr8tynvujPj5INC6hutXV7dHlfeOdp1Ku8Lv3WuoE4uq13ieMEEQtB5mh8mqC9ASWBHx5zYnLJdGTglCksk24Ap3bx/x1cLdtxN8KF5D0M3TDugXvsYiXh+vpY13EAw2V+pdQ91Mgvv4cg11Cgk+1Ct1i1Kn6r38DehsZmcTJIrK7qV8oAgYFvFn1s7da0qE0kgoQUhDlWpmLSK+mgKPAg+YWV8AM+tiZteE9dsAhwh+Q29J0I1SV14CbjWz08ysJTX073uw/v49wI/M7FYzaxsOvl9gZtPDaquAi8ysT9hFdt+xAnD3MoJxjWlAR+DtsLwCmAH8xszSAcysp5mNO+G7laShBCEN1ZsEv/lWfv0EeBB4Dfibme0HFgOjwvpPA1uA7cCH4bk64e5vAb8D5gNZwKLw1KFq6v8Z+BpwG5ADfAr8D8E4Au7+NvAnYA2wAngjxlCeJ2hBvRwmjEr/Eca1OOx++zvBYLk0cqYNg0TqlpmdBqwFmlf5oBapV9SCEKkDZnatmTUzsw4Ej5W+ruQg9Z0ShEjd+AaQB2wkeLLq3xIbjsixqYtJRESiUgtCRESiaproAGpT586dvV+/fokOQ0SkwVixYkW+u0edGJlUCaJfv34sX7782BVFRAQAM9tS3bm4djGZ2XgzyzSzLDO7t5o6l5jZKjNbZ2YLw7LBYVnl1z4z+248YxURkSPFrQURrqf/MHA5kA0sM7PX3P3DiDrtgUeA8e6+tXImp7tnEi50Fr7PduCVeMUqIiJHi2cLYiSQ5e6b3L0EeJFgLZxIk4BZ7r4VwN1zo7zPpcBGd6+2GSQiIrUvngmiJ0euKJkdlkUaBHQINzhZYWY3R3mf64EX4hSjiIhUI56D1BalrOqki6bAOQSthDRgkZktdvcNAGbWDLiaGhYjM7MpBBuj0KdPn1oIW0REIL4JIpsjlzXuRbDwWNU6+e5eCBSa2TvAWcCG8PwVwPvu/ml1F3H36cB0gIyMDM36E5FGY/bK7Uybm0lOQRE92qcxddxgJgyv2lFz4uLZxbQMGGhm/cOWwPUEK21GehW40MyahssgjyLY5KXSRNS9JCJylNkrt3PfrA/YXlCEA9sLirhv1gfMXrm91q4RtwQRLkR2FzCX4EP/JXdfZ2Z3mtmdYZ2PCHavWgMsBWa6+1qAMGFcDsyKV4wiIg3VtLmZFJWWH1FWVFrOtLmZtXaNuE6Uc/c3Cdbtjyx7tMrxNIJNTKq+9iCf7c8rIiKhXQcOsb2gKOq5nGrKT0RSzaQWEUlW7s6qbQU8s2gLb6zZUW29Hu3Tau2aShAiIvVYUUk5r6/O4enFm1m7fR+tmqVw/cjedG/Xgt/9I+uIbqa01BSmjqu9zQCVIERE6qHN+YU8u3gLL6/IZm9RKQPTW/Oza4Zx7YhetG4efHR3b5cW16eYlCBEROqJ8gpnQWYuTy/awsINeTRtYowb1o2bzuvLqP4dMTtyetmE4T1rNSFUpQQhIpJguwtL+NOybTy3ZAvZe4pIb9Oc7142kIkj+9C1bYuExaUEISKSAIcHnRcHg84lZRWM6t+R+644jc8P60pqSuL3c1OCEBGpQ8Wl5by2OodnFm3hg+17adUsha9l9Oam8/oyqGubRId3BCUIEZE6sGVXMOj80vLPBp1/es0wrh3ekzYtUhMdXlRKECIicVJe4Szc8NmgcxMzxg3ryk2j+zH6lKMHnesbJQgRkVq2u7CEl5YHg87bdgeDzt8ZGww6d2uXuEHn46UEISJSS1ZtK+DpRZuPGHT+j/FDGDesW70YdD5eShAiIiehuDSY6fzM4i2syQ4Gnb+a0YubRvdjcLf6Neh8vJQgREROwNZdB3l2yRZeWr6NgoOlDGgAg87HSwlCRCRGlYPOzyzawoIGOOh8vJQgRESOYU846PxsOOjcpU1zvj12IJMa2KDz8VKCEBGpxuptBTy9aAuvr8mhpKyCkQ180Pl4KUGIiESoHHR+dvEWVmfvpWU46Hzj6L4M6dY20eHVKSUIERGCQefnlmzhTxGDzvdfPYwvjUieQefjpQQhIo1WRYWzcEMeTy/afHjQ+fNDu3LTeX0575ROSTfofLyUIESk0alu0HniyN50b1d7W3Y2dEoQItJorA6X1359dQ6HwkHnH4wLBp2bNU3+QefjpQQhIkmtuLScN9bs4JlFmw8POl93Ti9uOq/xDTofLyUIEUlK23YfDJfX3saeg6Wc2qUV9189jGtH9KRtIx10Pl5KECLSIM1euZ1pczPJKSiiR/s0po4bzNVn9WDhx3k8s2gL8zNzPxt0Ht2X807VoPPxMndPdAy1JiMjw5cvX57oMEQkzmav3M59sz6gqLT8cFnTJka7tFR2FZbQuXVzJo3szcRRfTTofAxmtsLdM6KdUwtCRBqcaXMzj0gOAGUVzv7iMh6aOFyDzrVECUJEGpztBUVRy0vLK7jqrB51HE3yUoIQkQZjyaZd/H5+VrXne7RXd1JtUoIQkXrN3flXVj4P/SOLpZt307l1M64+qwd/W7eT4rKKw/XSUlOYOm5wAiNNPkoQIlIvuTvz1ufy0LwsVm0roFvbFvz4qqFMHNmHFqkpUZ9imjC8Z6LDTipKECJSr1RUOHPX7eSheVl8uGMfvTqk8cC1p3PdOb1o3jTlcL0Jw3sqIcSZEoSI1AvlFc4ba3J4eH4WGz49QP/OrZh23ZlMGN6zUey9UB8pQYhIQpWWVzB75XYeWbCRT/ILGZjemgevP5srz+xBShNNbEskJQgRSYhDZeX8eUU2f1iwkew9RQzt3pY/3DCCccO60USJoV6Ia4Iws/HAg0AKMNPdfxGlziXAb4FUIN/dLw7L2wMzgdMBB25z90XxjFdE4q+4tJwXlm7ljws3sXNfMWf1bs/9Vw9j7JB0LYVRz8QtQZhZCvAwcDmQDSwzs9fc/cOIOu2BR4Dx7r7VzNIj3uJBYI67X2dmzYCW8YpVROKv8FAZzy7ewox/fkL+gUOM7NeRaV85kwsGdFZiqKfi2YIYCWS5+yYAM3sRuAb4MKLOJGCWu28FcPfcsG5b4CLglrC8BCiJY6wiEif7ikt5+r3NPPavT9hzsJQLBnTm22OHM+qUTokOTY4hngmiJ7At4jgbGFWlziAg1cwWAG2AB939aeAUIA94wszOAlYAd7t7YRzjFZFatKewhCfe/YQn3tvM/uIyxg5J566xAxjRp0OiQ5MYxTNBRGszVl06tilwDnApkAYsMrPFYfkI4NvuvsTMHgTuBX501EXMpgBTAPr06VN70YvICck/cIgZ/9zEs4u2UFhSzrhhXfn22IGc3rNdokOT4xTPBJEN9I447gXkRKmTH7YMCs3sHeAs4J9AtrsvCev9mSBBHMXdpwPTIVjuu/bCF5HjsXNvMX98ZyMvLN3KobIKrjyzB3eNGcDgbm0SHZqcoHgmiGXAQDPrD2wHricYc4j0KvB7M2sKNCPogvqNu+80s21mNtjdMwlaGB8iIglXdYmL2y/oz8a8A7y8PJtydyac3ZNvjjmVU7u0TnSocpLiliDcvczM7gLmEjzm+ri7rzOzO8Pzj7r7R2Y2B1gDVBA8Crs2fItvA8+FTzBtAm6NV6wiEpuqG/VsLyjip298SEoT46sZvfm3i0+lTyc9cJgstKOciMRs1AN/59P9h44q79q2OUt+eFkCIpKTpR3lROSEHCorZ8mm3czPzGX++tyoyQEgd1/0cmnYlCBE5Ag5BUVhQsjj3ax8ikrLada0CZ87tRN7Dpayt6j0qNdoo57kpAQh0siVlVfw/taCw62E9Tv3A9CzfRpfPqcnY4ekc94pnUlrlnLUGARoo55kpgQh0gjtOnCIhRvymLc+l3c25LGvuIyUJkZG3w7cd8UQxgxJZ2B666OWwKjcf0Eb9TQOShAijUBFhbMuZx/z1ucyPzOX1dkFuEPn1s34/LBujBmczgUDO9MuLfWY76WNehoPJQiRJLWvuJR/fZzP/PW5LNiQR97+Q5jBmb3ac/elAxk7JJ3Te7TT0tpSLSUIkSTh7mTlHmB+Zi7z1ueyfPMeyiqcti2actGgLowZnM7Fg7vQuXXzRIcqDcQxE4SZdXT33XURjIgcn6KSchZv2nW46yh7TxEAQ7q14Y4LT2HskHRG9GlPU23ZKScglhbEEjNbBTwBvOXJNLNOpAHatvvg4SeO3tu4i0NlFaSlpnD+gE782yWncsngdHrqsVOpBbEkiEHAZcBtwENm9ifgSXffENfIRASAkrIKlm/ZzYLM4KmjrNwDAPTt1JKJI/swZkg6o/p3pEVqSoIjlWRzzAQRthjeBt42szHAs8A3zWw1cK+2ARWpfbn7ilmQmcf8zFz++XE+Bw6VkZpijOrfievP7c3YIen079xKO7FJXMUyBtEJuBG4CfiUYBG914CzgZeB/vEMUKQxKK9wVmcXsGB9LvMyc1m7fR8QrHF05ZndGTMknfMHdKZ1cz1XInUnln9ti4BngAnunh1RvtzMHo1PWCLJr+BgCe+Ej6Eu3JDH7sISmhiM6NOBqeMGM2ZwOqd1b6NWgiRMLAlicHUD0+7+y1qORyRpuTvrd+5n3vpcFmTmsmLLHiocOrRM5eJBXRgzJJ2LBnahQ6tmiQ5VBIgtQfzNzL7i7gUAZtYBeNHdx8U3NJGGr/BQGe9m5TM/M48Fmbns2FsMwLAebfnWmAFcMjids3u3J0WT1aQeiiVBdKlMDgDuvsfM0uMYk0iD9kl+IfPDeQlLNu2mpLyC1s2bcsGAznz3si5cMjidrm1bJDpMkWOKJUGUm1kfd98KYGZ9Ac2FEAkdKitn6Se7w66jPD7JLwTg1C6tuPm8vowdkk5Gv440a6rJatKwxJIg/hP4l5ktDI8vAqbELySR+m/H3iLmrw8eQ303K5+DJZ/tmXDL5/oxZnC6tt6UBi+WeRBzzGwEMBow4Hvunh/3yETqkbLyClZuKwiWtKiyZ8KXRhy5Z4JIsoj1oepyIBdoAQw1M9z9nfiFJZJ4lXsmzM/M450NeewtKo1pzwSRZBHLRLk7gLuBXsAqgpbEImBsfEMTqVuVeyZUroYauWfC5UO7HteeCSLJIJYWxN3AucBidx9jZkOA++Mblkjd2Fdcyrsf5wcDzNozQeQIsSSIYncvNjPMrLm7rzczbUArDcLslduP2B7z3z8/iDN6tQvHEvJYtnm39kwQqUYsCSLbzNoDswkW7NsD5MQ3LJGTN3vldu6b9QFFpeUAbC8o4p6XVh9+Rlt7JojULJanmK4Nf/yJmc0H2gFz4hqVSC341Zz1h5NDJQfap6Xy17sv1J4JIsdQY4IwsybAGnc/HcDdF9ZUX6Q+KC2v4C8rsskJl7Woam9RqZKDSAxqTBDuXmFmqyNnUovUV6XlFcx6P5uH5mWRvaeI1BSjtPzoSf89lBxEYhLLGER3YJ2ZLQUKKwvd/eq4RSVyHMrKK5i1cju/n5fF1t0HObNXO356zTD2Hizlh6+sPaKbKS01hanj9IyFSCxiSRB6pFXqpbLyCmavyuGheR+zZddBTu/Zlpk3Z3DpaemHJ6+Z2RFPMU0dN5gJw3smOHKRhiGWQWqNO0i9Ul7hvLpqOw/Ny+KT/EKGdm/L9JvO4fKhXY+a1TxheE8lBJETFMtM6v18tnprMyAVKHT3tvEMTKSq8grn9dU5/O4fH7Mpv5Ah3drw6I3nMG7Y0YlBRE5eLC2INpHHZjYBGBm3iKTRqzq57fuXDyIlxfjdPz5mY14hg7u24Q83jGDcsG6a4SwSR8e9A7q7zzaze+MRjEi0yW3ffzmY3Daoa2seuWEE45UYROpELF1MX4o4bAJkoA2DJE6mzc2MOrmtQ8tU5tx9kRKDSB2KpQVxVcTPZcBm4JpY3tzMxgMPAinATHf/RZQ6lwC/JRjbyHf3i8PyzcB+gqXGy9w9I5ZrSsOWU1AUtbzgYKmSg0gdi2UM4tYTeWMzSwEeBi4HsoFlZvaau38YUac98Agw3t23Rtnreow2J2pcerRPY3uUJKHJbSJ175irk5nZU+EHeeVxBzN7PIb3Hglkufsmdy8BXuTolsckYFblLG13z409dElGU8cNpnmVvZs1uU0kMWJZvvJMdy+oPHD3PcDwGF7XE9gWcZwdlkUaBHQwswVmtsLMbo4458DfwvJq98A2sylmttzMlufl5cUQltRnE4b3ZMyQzxqSPdun8fMvnaG5DCIJEMsYRBMz6xAmBsysY4yvi9ZhXHVwuylwDnApkAYsMrPF7r4BON/dc8Jup7fNbH20bU7dfTowHSAjI0OD50lgY+4BRvbvyEvfOC/RoYg0arG0IP4PeM/MfmZmPwXeA34Vw+uygd4Rx704eh+JbGCOuxeGYw3vAGcBuHtO+D0XeAXNvWgUMnfu5+PcA1x1ZvdEhyLS6B0zQbj708CXgU+BPOBL7v5MDO+9DBhoZv3NrBlwPfBalTqvAheaWVMzawmMAj4ys1Zm1gbAzFoBnwfWxnpT0nC9sSaHJgbjT1eCEEm0WOZBjAbWufvvw+M2ZjbK3ZfU9Dp3LzOzu4C5BI+5Pu7u68zszvD8o+7+kZnNAdYAFQSPwq41s1OAV8LlE5oCz7u7NilKcu7OG2t2cN6pnejSRlt+iiRaLGMJfwBGRBwXRimLyt3fBN6sUvZoleNpwLQqZZsIu5qk8ViXs49P8guZctEpiQ5FRIhtDMLc/fDgr7tXcAJLdIgcyxtrdpDSxBg/rFuiQxERYksQm8zsO2aWGn7dDWyKd2DSuATdSzmcP6AzHVo1S3Q4IkJsCeJO4HPAdoKnjkYBk+MZlDQ+q7P3kr2niCv19JJIvRHLUhu5BE8gAWBmacCVwMtxjEsamTdW55CaYowbqu4lkfoilhYEZpZiZleY2dPAJ8DX4huWNCYVFc6bH+zgooFdaNcyNdHhiEioxhaEmV1EsF7SF4GlwPnAKe5+sA5ik0Zi5bY95OwtZup4rbckUp9UmyDMLBvYSvBI61R3329mnyg5SG17ffUOmjVtwmWndU10KCISoaYupr8QLK73NeCqcEaz1jqSWlUedi+NGdyFNi3UvSRSn1SbINz9bqAf8GtgDLAB6GJmXzWz1nUTniS7ZZt3k7v/EF88s0eiQxGRKmocpPbAPHefTJAsJgETCHaVEzlpb6zJoUVqEy4dUnWvKBFJtJhnRLt7KfA68Hr4qKvISSkrr+CtD3Zy6ZCutGquyfki9U1Mj7lW5e7RNw4WOQ6LN+1mV2GJJseJ1FMnlCBEasMba3Jo1SzliB3kRKT+UIKQhCgtr2DOup1cNrQrLVJTEh2OiEQRy34Qg4CpQN/I+u4+No5xSZJ7NyufgoOlXKmnl0TqrVhGBl8GHgVmAOXxDUcaizfW7KBN86ZcNKhzokMRkWrEkiDK3P0PcY9EGoXZK7fzqznrydlbTFpqCm99sJMJw3smOiwRiSKWBPG6mX0TeAU4VFno7rvjFpUkpdkrt3PfrA8oKg0aokWl5dw36wMAJQmReiiWBPH18PvUiDIHtC+kHJdpczMPJ4dKRaXlTJubqQQhUg/Fsh9E/7oIRJJfTkH06TPVlYtIYsXyFFMq8G/ARWHRAuCP4cxqkZh1bNWMXYUlR5X3aK+J+SL1USxdTH8AUoFHwuObwrI74hWUJJ9/fZzP3qJSjCOXBE5LTWHqOO0DIVIfxZIgznX3syKO55nZ6ngFJMnnvY353P7UMgakt+am0X15ZMFGcgqK6NE+janjBmv8QaSeiiVBlJvZqe6+EcDMTkHzISRGSzbt4vYnl9O3U0ueu2MUnVo354bRfRMdlojEIJYEMRWYb2abACOYUX1rXKOSpLB8825ufXIZPdq34Lk7RtOpdfNEhyQixyGWp5j+YWYDgcEECWK9ux86xsukkXt/6x5ueWIZ3dq24IXJo+nSRslBpKGpaU/qse4+z8y+VOXUqWaGu8+Kc2zSQK3eVsDXH1tKp9bNeH7yaNLbtkh0SCJyAmpqQVwMzAOuinLOASUIOcra7Xu56bEltGuZyvOTR9OtnZKDSENVbYJw9x+HP/7U3T+JPGdmmjwnR/kwZx83PraENi1SeWHyaHpqfoNIgxbLfhB/iVL259oORBq2zJ37ufGxJaSlpvDC5NH07tgy0SGJyEmqaQxiCDAMaFdlHKItoH4DOSwrdz83zFxMaorx/OTR9Omk5CCSDGoagxgMXAm058hxiP3A5HgGJQ3HxrwDTJyxBLMgOfTv3CrRIYlILalpDOJV4FUzO8/dF9VhTNJAbM4vZNKMxVRUOC9OGc2pXVonOiQRqUWxTJRbaWbfIuhuOty15O63xS0qqfe27jrIxBmLKS13Xpg8moFd2yQ6JBGpZbEMUj8DdAPGAQuBXgTdTMdkZuPNLNPMsszs3mrqXGJmq8xsnZktrHIuxcxWmtkbsVxP6sa23UFyKCot59nbRzG4m5KDSDKKJUEMcPcfAYXu/hTwReCMY73IzFKAh4ErgKHARDMbWqVOe4JVYq9292HAV6q8zd3ARzHEKHUkp6CISTMXs7+4lGdvH8XQHm0THZKIxEksCaJy34cCMzsdaAf0i+F1I4Esd9/k7iXAi8A1VepMAma5+1YAd8+tPGFmvQiS0cwYriV1YOfeYibOWExBYSnP3D6K03u2S3RIIhJHsSSI6WbWAfgR8BrwIfCrGF7XE9gWcZwdlkUaBHQwswVmtsLMbo4491vgB0BFTRcxsylmttzMlufl5cUQlpyI3H3FTJqxmF0HSnjq9pGc1bt9okMSkTiLZbG+yt/gF3J8+1BbtLeLcv1zgEuBNGCRmS0mSBy57r7CzC45RnzTgekAGRkZVd9fakHe/kNMnLGYnfuKefq2kYzo0yHRIYlIHahpotw9Nb3Q3X99jPfOBnpHHPcCcqLUyXf3QqDQzN4BzgJGAFeb2RcInpxqa2bPuvuNx7im1LJdBw5xw8zF5BQU8+St55LRr2OiQxKROlJTF1Ob8CuDYE/qnuHXnQSDzseyDBhoZv3NrBlwPUEXVaRXgQvNrKmZtQRGAR+5+33u3svd+4Wvm6fkUPf2FJZww8wlbNl1kMe+nsGoUzolOiQRqUM1TZS7H8DM/gaMcPf94fFPgJeP9cbuXmZmdwFzgRTgcXdfZ2Z3hucfdfePzGwOsIZgrGGmu689yXuSWrD3YCk3PraETfmFPPb1DD43oHOiQxKROmbuNXfbm9l64KzKTYLMrDmw2t2H1EF8xyUjI8OXL1+e6DAavL1Fpdz02BLW79jP9JvP4ZLB6YkOSUTixMxWuHtGtHOxzKR+BlhqZq8QDDJfCzxdi/FJPbK/uJSvP76Uj3bs49EblRxEGrNYnmJ6wMzeAi4Mi25195XxDUsS4cChMm55Yhlrt+/l4RtGcOlpXRMdkogkUE1PMbV1931m1hHYHH5Vnuvo7rvjH57UlYMlZdz2xDJWbSvg9xOHM25Yt0SHJCIJVlML4nmC5b5XcOT8BQuPj2dOhNRjRSXl3PbkMpZv2c2D1w/nijO6JzokEakHanqK6crwu7YXTWLFpeVMfno5Sz7ZzW++ejZXndUj0SGJSD1RUxfTiJpe6O7v1344UpeKS8uZ8swK3t2Yz7TrzmLC8KoroYhIY1ZTF9P/1XDOgbG1HIvUoUNl5Xzzufd5Z0Mev/zyGVx3Tq9EhyQi9UxNXUxj6jIQqTslZRXc9fxK5q3P5YFrT+dr5/ZJdEgiUg/FMg+CcJnvoRy5o5zmQjRApeUVfOeFlbz94af89Jph3DCqb6JDEpF66pgJwsx+DFxCkCDeJNgA6F9oslyDU1ZewXf/tIo563byoyuHcvN5/RIdkojUY7HsB3EdwXLcO939VoLVVpvHNSqpdeUVzvdfXs1f1+zgh18Ywu0X6OE0EalZLAmiyN0rgDIzawvkojkQDUp5hTP1z6t5dVUOU8cNZspFpyY6JBFpAGIZg1ge7h09g2DS3AFgaVyjklpTUeHcN2sNs97fzj2XD+JbYwYkOiQRaSBqmgfxe+B5d/9mWPRouDR3W3dfUyfRyUmpqHD+c/ZaXlqezXfGDuA7lw5MdEgi0n8j1W4AAA9FSURBVIDU1IL4GPg/M+sO/Al4wd1X1U1YcrLcnR+/to4Xlm7lm5ecyvcuH5TokESkgal2DMLdH3T384CLgd3AE2b2kZn9t5np06Yec3d++saHPLN4C1MuOoWp4wZjFm2LcBGR6h1zkNrdt7j7L919ODCJYD+Ij+IemZwQd+d/3/yIJ97dzK3n9+O+K4YoOYjICTlmgjCzVDO7ysyeA94CNgBfjntkctzcnV/OyWTGPz/h5vP68t9XDlVyEJETVtMg9eXAROCLBE8tvQhMcffCOopNjtOv397Aows3MmlUH+6/epiSg4iclJoGqX9IsCfEv2tzoPrvwb9/zEPzsvhaRm/+55rTlRxE5KRpsb4k8PD8LH7z9w18eUQvfv6lM2jSRMlBRE5eLDOppR7748KNTJubyYSze/Cr685UchCRWqME0YDN/Ocmfv7Weq48szv/7ytnkaLkICK1SAmigXrqvc38z18/4orTu/Hbr51N0xT9VYpI7dKnSgP07OIt/Pi1dVw+tCu/mzhcyUFE4kKfLA3Mi0u38l+z13LpkHQenjSCVCUHEYkTfbo0IC8v38Z9r3zAxYO68MiNI2jWVH99IhI/+oRpIF5Zmc0P/rKG80/tzB9vOofmTVMSHZKIJDkliAbgtdU5fP+l1Yzu34kZN2fQIlXJQUTiTwminvvrmh1870+ryOjXkcduySCtmZKDiNQNJYh6bO66ndz94kqG927P47ecS8tmsWwAKCJSO5Qg6qm/f/gpdz3/Pmf0ascTt55L6+ZKDiJSt5Qg6qH5mbl887n3Oa17W566bSRtWqQmOiQRaYSUIOqZdzbk8Y1nVjCwa2ueuW0UbZUcRCRB4pogzGy8mWWaWZaZ3VtNnUvMbJWZrTOzhWFZCzNbamarw/L74xlnffFuVj6Tn17OqV1a8+zto2jXUslBRBInbh3bZpYCPAxcDmQDy8zsNXf/MKJOe+ARYLy7bzWz9PDUIWCsux8ws1TgX2b2lrsvjle8ibZ40y5uf2oZfTu15NnbR9KhVbNEhyQijVw8WxAjgSx33+TuJQQ70l1Tpc4kYJa7bwVw99zwu7v7gbBOavjlcYw1oZZt3s1tTy6jV4eWPHfHaDq1bp7okERE4pogegLbIo6zw7JIg4AOZrbAzFaY2c2VJ8wsxcxWAbnA2+6+JNpFzGyKmS03s+V5eXm1fAvxt2LLHm55fCnd2rbg+TtG0aWNkoOI1A/xTBDRNieo2gpoCpxDsO/1OOBHZjYIwN3L3f1soBcw0sxOj3YRd5/u7hnuntGlS5fai74OrNpWwC2PL6Vzm+Y8P3k06W1bJDokEZHD4pkgsoHeEce9gJwodea4e6G75wPvAGdFVnD3AmABMD5+oda9D7L3cvNjS2jfKpUXJo+mWzslBxGpX+KZIJYBA82sv5k1A64HXqtS51XgQjNramYtgVHAR2bWJRzAxszSgMuA9XGMtU6ty9nLjY8toU2LIDn0aJ+W6JBERI4St6eY3L3MzO4C5gIpwOPuvs7M7gzPP+ruH5nZHGANUAHMdPe1ZnYm8FT4JFQT4CV3fyNesdal9Tv3cePMJbRqlsILk0fTq0PLRIckIhKVuSfPw0EZGRm+fPnyRIdRrY8/3c/10xfTNMX405Tz6Ne5VaJDEpFGzsxWuHtGtHOaSV1HsnIPMHHGEpo0MV6YPFrJQUTqPSWIOvBJfiGTZiwGnBcmj+KULq0THZKIyDFpidA427KrkInTF1NW4bw4ZTQD0tskOiQRkZioBRFH23YfZNKMJRSXlfPs7aMY1FXJQUQaDiWIONleUMTEGYvZX1zKs7ePYmiPtokOSUTkuKiLKQ527C1i4vTF7C0q5bk7RnF6z3aJDklE5LipBVHLPt1XzKQZS9hdWMLTt43kzF7tEx2SiMgJUYKoRbn7i5k4YzG5+4p56rZzGd6nQ6JDEhE5YepiqiX5Bw5xw4wl7Cgo5qnbRnJO346JDklE5KSoBVELdheWcOPMJWzbc5DHbzmXkf2VHESk4VOCOEkFB4Pk8El+ITNvPpfzTu2U6JBERGqFEsRJ2FtUyk2PLSUr9wDTb87ggoGdEx2SiEitUYI4QfuKS7n58aWs37mPR28awcWDGtZmRSIix6JB6uMwe+V2ps3NZHtBEc1SmlBWUcEfb8pg7JCuiQ5NRKTWKUHEaPbK7dw36wOKSssBKCmvoFlKEwoPlSU4MhGR+FAXU4ymzc08nBwqlZRXMG1uZoIiEhGJLyWIGG0vKIpanlNNuYhIQ6cEEYP563Oxas5pP2kRSVZKEDVwdx5ZkMVtTy2jR/s0mjc98o8rLTWFqeMGJyg6EZH4UoKoxsGSMu56YSW/mpPJF8/ozt/vuZhffvlMerZPw4Ce7dP4+ZfOYMLwnokOVUQkLvQUUxTbdh9kyjMrWL9zH/8xfgh3XnwKZsaE4T2VEESk0VCCqOK9rHy+9fz7lFU4T9xyLpcMTk90SCIiCdHoE0Tk5Ld2aansLy7llC6tmXFzBv07t0p0eCIiCdOoE0TVyW97i0ppYnDHBf2VHESk0WvUg9TRJr9VODw0LytBEYmI1B+NOkFUN8lNk99ERBp5gqhukpsmv4mINPIEMXXcYNJSU44o0+Q3EZFAox6krpzTMG1uJjkFRfRon8bUcYM110FEhEaeIABNfhMRqUaj7mISEZHqKUGIiEhUShAiIhKVEoSIiESlBCEiIlGZuyc6hlpjZnnAlmNU6wzk10E49Ulju2fdb3LT/dauvu7eJdqJpEoQsTCz5e6ekeg46lJju2fdb3LT/dYddTGJiEhUShAiIhJVY0wQ0xMdQAI0tnvW/SY33W8daXRjECIiEpvG2IIQEZEYKEGIiEhUjSpBmNl4M8s0sywzuzfR8dQ2M3vczHLNbG1EWUcze9vMPg6/d0hkjLXJzHqb2Xwz+8jM1pnZ3WF5Ut6zmbUws6Vmtjq83/vD8qS830pmlmJmK83sjfA42e93s5l9YGarzGx5WJaQe240CcLMUoCHgSuAocBEMxua2Khq3ZPA+Cpl9wL/cPeBwD/C42RRBnzf3U8DRgPfCv9Ok/WeDwFj3f0s4GxgvJmNJnnvt9LdwEcRx8l+vwBj3P3siPkPCbnnRpMggJFAlrtvcvcS4EXgmgTHVKvc/R1gd5Xia4Cnwp+fAibUaVBx5O473P398Of9BB8iPUnSe/bAgfAwNfxykvR+AcysF/BFYGZEcdLebw0Scs+NKUH0BLZFHGeHZcmuq7vvgOADFUhPcDxxYWb9gOHAEpL4nsPullVALvC2uyf1/QK/BX4AVESUJfP9QpD0/2ZmK8xsSliWkHtuTDvKWZQyPeObBMysNfAX4Lvuvs8s2l91cnD3cuBsM2sPvGJmpyc6pngxsyuBXHdfYWaXJDqeOnS+u+eYWTrwtpmtT1QgjakFkQ30jjjuBeQkKJa69KmZdQcIv+cmOJ5aZWapBMnhOXefFRYn9T0DuHsBsIBgzClZ7/d84Goz20zQJTzWzJ4lee8XAHfPCb/nAq8QdI8n5J4bU4JYBgw0s/5m1gy4HngtwTHVhdeAr4c/fx14NYGx1CoLmgqPAR+5+68jTiXlPZtZl7DlgJmlAZcB60nS+3X3+9y9l7v3I/j/Os/dbyRJ7xfAzFqZWZvKn4HPA2tJ0D03qpnUZvYFgj7NFOBxd38gwSHVKjN7AbiEYHngT4EfA7OBl4A+wFbgK+5edSC7QTKzC4B/Ah/wWR/1DwnGIZLuns3sTIIByhSCX+5ecvefmlknkvB+I4VdTP/u7lcm8/2a2SkErQYIhgCed/cHEnXPjSpBiIhI7BpTF5OIiBwHJQgREYlKCUJERKJSghARkaiUIEREJColCGkwzOw3ZvbdiOO5ZjYz4vj/zOyeGl7/pJldF/68wMyO2gjezFLN7Bfhqplrw9VTrwjPbTazzicQ9+HrVnP+4XDlzg/NrCj8eZWZXWdmb1bOfahNZta9cnXUas43M7N3zKwxrbYgVShBSEPyHvA5ADNrQjDfY1jE+c8B757kNX4GdAdOd/fTgauANif5njVy92+5+9nAF4CN4SqeZ7v7n939C+Gs6dp2DzCjhphKCFYN/Vocri0NhBKENCTvEiYIgsSwFthvZh3MrDlwGrDSzP7bzJaFLYDpFuPiTGbWEpgMfNvdDwG4+6fu/lKUuveE77+2SqvmZjNbE+7Z8EyU1/0sbFHE9H+vstViZv3MbL2ZzQyv+ZyZXWZm74atnZFh/VYW7AuyzII9FKpbsfjLwJzwNcPCltKqMPaBYZ3ZwA2xxCnJSc1HaTDCBczKzKwPQaJYRLAi73nAXmCNu5eY2e/d/acA4Yf0lcDrMVxiALDV3ffVVMnMzgFuBUYRLAK5xMwWAiXAfxIstpZvZh2rvO5XQDvgVj+xGaoDgK8AUwiWjpkEXABcTTCDfEJ4/XnuflvYNbXUzP7u7oURcfQH9lQmQeBO4EF3fy5chiYlLF8LnHsCcUqSUAtCGprKVkRlglgUcfxeWGeMmS0xsw+AsRzZDVUbLgBecffCcH+GWcCF4bX+7O75AFWWQvgR0N7dv3GCyQHgE3f/wN0rgHUEG8g4wVIj/cI6nwfutWBJ8AVAC4LlGSJ1B/IijhcBPzSz/wD6untRGH85UFK5NpA0PkoQ0tBUjkOcQfAb7mKCFsTngHfNrAXwCHCdu59B0M/eIsb3zgL6xPCBWF2XlVH9EvLLgHOqtiqO06GInysijiv4rDfAgC9HjGP0cffI3dgAioj4M3H35wlaIUXAXDMbG1G3OVB8EjFLA6YEIQ3NuwRdRrvdvTz8Lb09QZJYxGcffPkW7BNR7dNDVbn7QYLVYX8XdrVUPu1zY5Wq7wATzKxluOLmtQSLBv4D+Gq4sBpVksEc4BfAX+P8G/lc4NuV4y5mNjxKnQ181uKoXCBuk7v/jmDV0DPD8k5AnruXxjFeqceUIKSh+YDg6aXFVcr2unt++MTPjLBsNsFv7sfjvwi6Xz40s7Xhe0R2xxBuc/oksJRg5diZ7r7S3dcBDwALzWw18Osqr3s5jO21cLnuePgZwVaka8L4f1a1QjgesdHMBoRFXwPWht1SQ4Cnw/IxwJtxilMaAK3mKtIImdm1wDnu/l811JkF3OfumXUXmdQneopJpBFy91cqu8KiCbvYZis5NG5qQYiISFQagxARkaiUIEREJColCBERiUoJQkREolKCEBGRqP4/CJ4U6RCkPy8AAAAASUVORK5CYII=\n" + "image/svg+xml": "\r\n\r\n\r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEWCAYAAABxMXBSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAgAElEQVR4nO3de5xdVX338c83k4EMQkiQYENCDCqJopVEUqjiBag2EdQE7/B4Ka0CbWmptMFgiwV96Cs14u0RyQOUCq0oF2OIGokUTCgI5GJCLsTYSDFkkpJEiEAcCSS//rHXSU4OZ87sncyeOTPzfb9e85qz1157n9/ZhPObtdfaaykiMDMzy2tQbwdgZmZ9ixOHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGHWzSS9RdK63o7DrCxOHNavSHpM0tt7M4aI+M+IGF/W+SVNlnSvpGckbZW0SNJ7yno/s1pOHGYFSWrpxfd+P3AbcBMwGngZ8Fng3ftxLknyd4AV5n80NiBIGiRphqRfSvq1pFslHVG1/zZJ/yPpN+mv+ddW7fumpGskzZe0AzgttWz+TtLKdMwtkoak+qdK2lh1fKd10/5LJG2WtEnSJySFpFfV+QwCvgR8PiKuj4jfRMTuiFgUEZ9MdS6X9O9Vx4xN5xucthdKulLS/cBvgc9IWlrzPp+SNC+9PljSFyVtkPSEpNmS2g7wP4f1cU4cNlD8NTANeBtwNPAUcHXV/h8BxwFHAT8DvlVz/DnAlcBhwH2p7IPAFOBY4PXAnzR4/7p1JU0BLgbeDrwqxdeZ8cAxwO0N6uTxUeA8ss/y/4Dxko6r2n8OcHN6/c/AOGBCim8UWQvHBjAnDhsozgf+PiI2RsRzwOXA+yt/iUfEDRHxTNW+EyQdXnX8HRFxf/oL/3ep7GsRsSkingS+T/bl2pnO6n4Q+NeIWBMRvwWuaHCOl6bfm3N/6vq+md7vhYj4DXAHcDZASiCvBualFs4ngU9FxJMR8QzwT8CHD/D9rY9z4rCB4uXA9yRtl7QdWAvsAl4mqUXSzHQb62ngsXTMkVXHP17nnP9T9fq3wKEN3r+zukfXnLve+1T8Ov0e2aBOHrXvcTMpcZC1NuamJDYCOARYVnXd7kzlNoA5cdhA8TjwzogYVvUzJCLayb4sp5LdLjocGJuOUdXxZU0jvZmsk7vimAZ115F9jvc1qLOD7Mu+4vfq1Kn9LD8GjpQ0gSyBVG5TbQM6gNdWXbPDI6JRgrQBwInD+qNWSUOqfgYDs4ErJb0cQNIISVNT/cOA58j+oj+E7HZMT7kVOFfSayQdQoP+g8jWQLgYuEzSuZKGpk7/N0u6NlVbAbxV0ph0q+3SrgKIiBfI+k1mAUcAd6Xy3cB1wJclHQUgaZSkyfv9aa1fcOKw/mg+2V/KlZ/Lga8C84AfS3oGeBA4OdW/CfgV0A48kvb1iIj4EfA14CfAeuCBtOu5TurfDnwI+FNgE/AE8H/J+imIiLuAW4CVwDLgBzlDuZmsxXVbSiQVn05xPZhu4/0HWSe9DWDyQk5mzUPSa4DVwME1X+BmTcMtDrNeJuksSQdJGk42/PX7ThrWzJw4zHrf+cBW4JdkI73+vHfDMWvMt6rMzKwQtzjMzKyQwb0dQE848sgjY+zYsb0dhplZn7Js2bJtEfGiBz4HROIYO3YsS5cu7bqimZntIelX9cp9q8rMzApx4jAzs0KcOMzMrJBSE4ekKZLWSVovaUYndU6VtELSGkmLava1SFou6QdVZZdLak/HrJB0RpmfwczM9lVa53haXvNq4B3ARmCJpHkR8UhVnWHAN4ApEbGhMpFalYvIpr8eWlP+5Yj4Ylmxm5lZ58pscZwErI+IRyNiJ/Adsqmrq50DzImIDQARsaWyQ9Jo4Ezg+hJjNDPrl+Yub+eUmfdw7IwfcsrMe5i7vL3bzl1m4hjFvgvGbExl1cYBw9M6yMskfaxq31eAS4Dddc59YVq/+YY0v8+LSDpP0lJJS7du3XoAH8PMrG+Zu7ydS+eson17BwG0b+/g0jmrui15lJk4VKesdn6TwcCJZC2LyWTrDIyT9C5gS0Qsq3OOa4BXki29uRm4qt6bR8S1ETEpIiaNGOEFy8xs4Ji1YB0dz+/ap6zj+V3MWrCuW85f5gOAG9l3NbPRZOsH1NbZFhE7gB2S7gVOAN4AvCd1fA8Bhkr694j4SEQ8UTlY0nXkX2/AzGxA2LS9o1B5UWW2OJYAx0k6VtJBZAvcz6upcwfwFkmD0+pnJwNrI+LSiBgdEWPTcfdExEcAJFWvt3wW2doFZmaWHD2srVB5UaUljrSewIXAArKRUbdGxBpJF0i6INVZC9xJtlrZYuD6iOgqEXxB0ipJK4HTgE+V9RnMzPqi6ZPH09bask9ZW2sL0yd3z+KNA2Ja9UmTJoXnqjKzgWTu8nYuuX0lO3ftZtSwNqZPHs+0ibXjkxqTtCwiJtWWD4hJDs3MBpppE0fx7cUbALjl/Dd267k95YiZmRXixGFmZoU4cZiZWSFOHGZmVogTh5mZFeLEYWZmhThxmJlZIU4cZmZWiBOHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGFmZoU4cZiZWSFOHGZmVogTh5mZFeLEYWZmhThxmJlZIU4cZmZWiBOHmZkV4sRhZmaFOHGYmVkhThxmZlaIE4eZmRXixGFmZoU4cZiZWSGDezsAMytu7vJ2Zi1Yx6btHRw9rI3pk8czbeKo3g7LBggnDrM+Zu7ydi6ds4qO53cB0L69g0vnrAJw8rAe4cRh1sfMWrBuT9Ko6Hh+F5fcvpJvL97QS1FZM3pk89McP3Jot5/XfRxmfcym7R11y3fu2t3DkVizO37kUKZO6P5WqFscZn3M0cPaaK+TPEYNa+OW89/YCxHZQOMWh1kyd3k7p8y8h2Nn/JBTZt7D3OXtvR1SXdMnj6ettWWfsrbWFqZPHt9LEdlA4xaH9Tv7M+KoL3U4V+K55PaV7Ny1m1EeVWU9rNTEIWkK8FWgBbg+ImbWqXMq8BWgFdgWEW+r2tcCLAXaI+JdqewI4BZgLPAY8MGIeKrMz2EHpieHju5vAuiLHc4Htw5i4phhvj1lPa60xJG+9K8G3gFsBJZImhcRj1TVGQZ8A5gSERskHVVzmouAtUD1sIAZwN0RMVPSjLT96bI+hx2Ynv5Lfn8TQL0+A2juDueyOj7NulJmi+MkYH1EPAog6TvAVOCRqjrnAHMiYgNARGyp7JA0GjgTuBK4uOqYqcCp6fWNwEKcOJpWT/8lv78J4KCWQXXruMPZ7MXKTByjgMertjcCJ9fUGQe0SloIHAZ8NSJuSvu+AlySyqu9LCI2A0TE5jqtFAAknQecBzBmzJgD+Bh2IHp66Oj+JoDalhG4w9msM2UmDtUpizrvfyLwR0Ab8ICkB8kSypaIWJb6QAqLiGuBawEmTZpU+77WQ3p66Oj+JoDKbTNP42HWtTITx0bgmKrt0cCmOnW2RcQOYIeke4ETgDcA75F0BjAEGCrp3yPiI8ATkkam1sZIYAvWtKZPHt+jf8kfSAKYNnGUE4VZDooo549xSYOBX5C1JtqBJcA5EbGmqs5rgK8Dk4GDgMXAhyNidVWdU4G/qxpVNQv4dVXn+BERcUmjWCZNmhRLly7tzo9nBcxd3u6ho2Z9kKRlETGptry0FkdEvCDpQmAB2XDcGyJijaQL0v7ZEbFW0p3ASmA32ZDd1Z2fFYCZwK2S/gzYAHygrM9g3WPaxFF7OsLd0WzW95X6HEdEzAfm15TNrtmeBcxqcI6FZCOnKtu/JmvFmJlZL/CUI2ZmVogTh5mZFeLEYWZmhXSZONLcUGZmZkC+FsdDkm6TdIakeg/1mZnZAJIncYwjewL7o8B6Sf8kaVy5YZmZWbPqMnFE5q6IOBv4BPBxYLGkRZI8KN/MbIDp8jkOSS8FPkLW4ngC+CtgHjABuA04tswAzcysueR5APAB4N+AaRGxsap8qaTZnRxjZmb9VJ7EMT46mdAqIv65m+MxM7Mml6dz/MdppT4AJA2XtKDEmKzK3OXtnDLzHo6d8UNOmXkPc5e393ZIZjbA5WlxjIiI7ZWNiHiqs8WTrHv19LKrZmZ55EkcuySNqSzvKunlvHhBJitBTy+7WqZHNj/N8SOHdl3RzJpensTx98B9khal7beSlmS1cvX0sqtlOn7kUKZOcCvJrD/oMnFExJ2S3gD8IdlysJ+KiG2lR2Y9vuyqmVkeeSc53EW2ROtvgOMlvbW8kKxi+uTxtLW27FNW5rKrZmZ55HkA8BPARWRrhq8ga3k8AJxebmhW6QD3sqtm1kzy9HFcBPwB8GBEnCbp1cAV5YZlFV521cyaTZ5bVb+LiN8BSDo4In4O+F6JmdkAlafFsTE9ADgXuEvSU8CmcsMyM7NmlWdU1Vnp5eWSfgIcDtxZalRmZta0GiYOSYOAlRHxOoCIWNSovpmZ9X8N+zgiYjfwsKQxPRSPmZk1uTx9HCOBNZIWAzsqhRHxntKiGuDmLm9n1oJ1bNrewdHD2hjSOogjDz24t8MyMwPyJQ4Pve1B9SY2HOSV3s2sieTpHHe/Rg+qN7Hh7oDHn6w/b5WZWU/L8+T4M+ydDfcgoBXYERGe6rQE/WliQzPrn/K0OA6r3pY0DTiptIgGuEYTG5qZNYO8kxzuERFz8TxVpfHEhmbW7PLcqnpv1eYgYBJeyKk0ntjQzJpdnlFV7656/QLwGDC1lGgM8MSGZtbc8vRxnNsTgZiZWd/QZR+HpBvTJIeV7eGSbig3LDMza1Z5OsdfHxHbKxsR8RQwsbyQzMysmeVJHIMkDa9sSDqCfH0jZmbWD+VJHFcBP5X0eUmfA34KfCHPySVNkbRO0npJMzqpc6qkFZLWSFqUyoZIWizp4VR+RVX9yyW1p2NWSDojTyxmZtY98nSO3yRpKdmzGwLeGxGPdHWcpBbgauAdwEZgiaR51cemvpNvAFMiYoOko9Ku54DTI+JZSa3AfZJ+FBEPpv1fjogvFvicZmbWTfI8x/GHwJqI+HraPkzSyRHxUBeHngSsj4hH03HfIRvGW510zgHmRMQGgIjYkn4H8Gyq05p+/OyImVkTyHOr6hr2folDNrX6NTmOGwU8XrW9MZVVGwcMl7RQ0jJJH6vskNQiaQWwBbirJlFdKGmlpBuq+1+qSTpP0lJJS7du3ZojXDMzyyNP4lBqAQB7FnfK0zlebzLw2lbDYOBE4ExgMnCZpHHpfXZFxARgNHCSpNelY64BXglMADaT9cG8+I0iro2ISRExacSIETnCNTOzPPIkjkcl/bWk1vRzEfBojuM2AsdUbY8GNtWpc2dE7IiIbcC9wAnVFdJQ4IXAlLT9REoqu4Hr8ISLZmY9Kk/iuAB4E9BO9kV/MvDJHMctAY6TdKykg4APA/Nq6twBvEXSYEmHpHOvlTSi8tChpDbg7cDP0/bIquPPAlbniMXMzLpJnlFVW8i+9IE9X+TvAm7r4rgXJF0ILABagBsiYo2kC9L+2RGxVtKdwEpgN3B9RKyW9HrgxjQyaxBwa0T8IJ36C5ImkN32egw4v9AnNjOzA5LrQb70Bf7HwNnp9310kTgAImI+ML+mbHbN9ixgVk3ZSjp5Oj0iPponZjMzK0fDxCHprWRDZs8EFgOnAK+IiN/2QGxmZtaEOk0ckjYCG8hGMU2PiGck/beTRjnmLm9n1oJ1bNrewdHD2hjSOogjDz24t8MyM3uRRi2O7wLTgA8BuyTdgR/CK8Xc5e1cOmcVHc/vAqB9eweD6g1mNjNrAp2OqoqIi4CxwJeA04BfACMkfVDSoT0T3sAwa8G6PUmjYnfA40++eO1xM7Pe1nA4bmTuiYhPkiWRc8haIY+VH9rAsWl7/QSxc9fuHo7EzKxreZ7jACAino+I70fEOez7YJ8doKOHtdUtH9VJuZlZb8qdOKpFhO+hdKPpk8fT1tqyT1lbawvTJ4/vpYjMzDrnBZmawLSJ2dyPl9y+kp27djNqWBvTJ4/fU25m1kycOHKqHS7b3V/s0yaO4tuLNwBwy/lv7Lbzmpl1tzzrcYwDpgMvr64fEaeXGFdTqTdc9tI5qwDcKjCzASdPi+M2YDbZTLS7uqjbL9UbLtvx/C4uuX3lnlZCd3hk89McP3Jot53PzKwMeRLHCxGRZ+GmfqunhsseP3IoUye4BWNmzS1P4vi+pL8Avke2FjgAEfFkaVE1maOHtdFeJ3mMGtbm/ggzG3DyDMf9OFkfx0+BZelnaZlBNRsPlzUz2yvPehzH9kQgzczDZc3M9sozqqoV+HPgraloIfD/I+L5EuNqOh4ua2aWydPHcQ3QCnwjbX80lX2irKDMzKx55UkcfxARJ1Rt3yPp4bICMjOz5panc3yXpFdWNiS9ggH6PIeZmeVrcUwHfiLpUUBkT5CfW2pUZmbWtPKMqrpb0nHAeLLE8fOIeK6Lw8zMrJ9qtOb46RFxj6T31ux6pSQiYk7JsZmZWRNq1OJ4G3AP8O46+wJw4jAzG4A6TRwR8Y/p5eci4r+r90ka8A8FmpkNVHlGVX23Ttnt3R2ImZn1DY36OF4NvBY4vKafYygwpOzAzMysOTXq4xgPvAsYxr79HM8AnywzKDMza16N+jjuAO6Q9MaIeKAHYzIzsyaW5wHA5ZL+kuy21Z5bVBHxp6VFZWZmTStP5/i/Ab8HTAYWAaPJbleZmdkAlCdxvCoiLgN2RMSNwJnA75cblpmZNas8iaOy7sZ2Sa8DDgfGlhaRmZk1tTx9HNdKGg5cBswDDgU+W2pUZmbWtPJMcnh9erkIeEW54ZiZWbNr9ADgxY0OjIgvdX84ZmbW7Br1cRyWfiaRrTk+Kv1cAByf5+SSpkhaJ2m9pBmd1DlV0gpJayQtSmVDJC2W9HAqv6Kq/hGS7pL0X+n38Hwf1czMukOniSMiroiIK4AjgTdExN9GxN8CJ5INyW1IUgtwNfBOskRztqTja+oMI1vL/D0R8VrgA2nXc8DpacnaCcAUSX+Y9s0A7o6I44C707aZmfWQPKOqxgA7q7Z3km9U1UnA+oh4NCJ2At8BptbUOQeYExEbACJiS/odEfFsqtOafiJtTwVuTK9vBKbliMXMzLpJ3gcAF0u6XNI/Ag8BN+U4bhTweNX2xlRWbRwwXNJCScskfayyQ1KLpBXAFuCuiHgo7XpZRGwGSL+Pqvfmks6TtFTS0q1bt+YI18zM8ugycUTElWRrjD8FbAfOjYh/ynFu1TtdzfZgsltfZ5I9mX6ZpHHpfXdFxASy22InpWdIcouIayNiUkRMGjFiRJFDzcysgUajqoZGxNOSjgAeSz+VfUdExJNdnHsjcEzV9mhgU5062yJiB7BD0r3ACcAvKhUiYrukhcAUYDXwhKSREbFZ0kiyFomZmfWQRi2Om9PvZcDSqp/KdleWAMdJOlbSQcCHyR4grHYH8BZJgyUdApwMrJU0InWcI6kNeDvw83TMPODj6fXH0znMzKyHNJpW/V3p934tExsRL0i6EFgAtAA3RMQaSRek/bMjYq2kO4GVwG7g+ohYLen1wI1pZNYg4NaI+EE69UzgVkl/Bmxg70gsMzPrAY1uVb2h0YER8bOuTh4R84H5NWWza7ZnAbNqylYCEzs556+BP+rqvc3MrByNphy5qsG+AE7v5ljMzKwPaHSr6rSeDMTMzPqGPLPjkobCHs++KwDmeZbDzMz6mS4TR3ro71SyxDGfbAqR+8j3EKCZmfUzeZ4cfz9ZZ/T/RMS5ZM9ZHFxqVGZm1rTyJI6OiNgNvCBpKNkDd16Xw8xsgMrTx7E0PYx3HdnDf88Ci0uNyszMmlaj5zi+DtwcEX+Rimanh/WGpucszMxsAGrU4vgv4Ko0H9QtwLcjYkXPhGVmZs2q0UJOX42INwJvA54E/lXSWkmfrcxga2ZmA0+eadV/FRH/HBETyRZeOgtYW3pkZmbWlLpMHJJaJb1b0reAH5FNef6+0iMzM7Om1Khz/B3A2WSLLC0mW/r1vLR2hpmZDVCNOsc/Q7Ymx9/lWLTJzMwGCE9yaGZmheR5ctzMzGwPJw4zMyvEicPMzApx4jAzs0KcOMzMrBAnDjMzK8SJw8zMCnHiMDOzQpw4zMysECcOMzMrxInDzMwKceIwM7NCnDjMzKwQJw4zMyvEicPMzApx4jAzs0KcOMzMrBAnDjMzK8SJw8zMCnHiMDOzQkpNHJKmSFonab2kGZ3UOVXSCklrJC1KZcdI+omktan8oqr6l0tqT8eskHRGmZ/BzMz2NbisE0tqAa4G3gFsBJZImhcRj1TVGQZ8A5gSERskHZV2vQD8bUT8TNJhwDJJd1Ud++WI+GJZsZuZWefKbHGcBKyPiEcjYifwHWBqTZ1zgDkRsQEgIrak35sj4mfp9TPAWmBUibGamVlOZSaOUcDjVdsbefGX/zhguKSFkpZJ+ljtSSSNBSYCD1UVXyhppaQbJA2v9+aSzpO0VNLSrVu3HsjnMDOzKmUmDtUpi5rtwcCJwJnAZOAySeP2nEA6FPgu8DcR8XQqvgZ4JTAB2AxcVe/NI+LaiJgUEZNGjBhxQB/EzMz2Kq2Pg6yFcUzV9mhgU5062yJiB7BD0r3ACcAvJLWSJY1vRcScygER8UTltaTrgB+UFL+ZmdVRZotjCXCcpGMlHQR8GJhXU+cO4C2SBks6BDgZWCtJwL8AayPiS9UHSBpZtXkWsLq0T2BmZi9SWosjIl6QdCGwAGgBboiINZIuSPtnR8RaSXcCK4HdwPURsVrSm4GPAqskrUin/ExEzAe+IGkC2W2vx4Dzy/oMZmb2YmXeqiJ90c+vKZtdsz0LmFVTdh/1+0iIiI92c5hmZlaAnxw3M7NCnDjMzKwQJw4zMyvEicPMzApx4jAzs0KcOMzMrBAnDjMzK8SJw8zMCnHiMDOzQpw4zMysECcOMzMrxInDzMwKceIwM7NCnDjMzKwQJw4zMyuk1PU4+rK5y9uZtWAdm7Z3cPSwNqZPHt/bIZmZNQW3OOqYu7ydS+eson17BwG0b+/g0jmr2Pbsc70dmplZr3PiqGPWgnV0PL9rn7KO53fx6NYdvRSRmVnzcOKoY9P2jrrlAUydMKpngzEzazJOHHUcPaytbvmoYW2cc/KYHo7GzKy5OHHUMX3yeNpaW/Ypa2ttcQe5mRkeVVXXtInZ7ajaUVWVcjOzgcyJoxPTJo5yojAzq8O3qszMrBAnDjMzK8SJw8zMCnHiMDOzQpw4zMysEEVEb8dQOklbgV8VOORIYFtJ4ZTFMfcMx9wzHHPP6Crml0fEiNrCAZE4ipK0NCIm9XYcRTjmnuGYe4Zj7hn7G7NvVZmZWSFOHGZmVogTR33X9nYA+8Ex9wzH3DMcc8/Yr5jdx2FmZoW4xWFmZoU4cZiZWSFOHFUkTZG0TtJ6STN6O548JD0maZWkFZKW9nY89Ui6QdIWSauryo6QdJek/0q/h/dmjPV0EvflktrT9V4h6YzejLGapGMk/UTSWklrJF2Uypv2WjeIuZmv8xBJiyU9nGK+IpU37XWGhnEXvtbu40gktQC/AN4BbASWAGdHxCO9GlgXJD0GTIqIpn3wSNJbgWeBmyLidansC8CTETEzJenhEfHp3oyzVidxXw48GxFf7M3Y6pE0EhgZET+TdBiwDJgG/AlNeq0bxPxBmvc6C3hJRDwrqRW4D7gIeC9Nep2hYdxTKHit3eLY6yRgfUQ8GhE7ge8AU3s5pn4hIu4FnqwpngrcmF7fSPZl0VQ6ibtpRcTmiPhZev0MsBYYRRNf6wYxN63IPJs2W9NP0MTXGRrGXZgTx16jgMertjfS5P+AkwB+LGmZpPN6O5gCXhYRmyH78gCO6uV4irhQ0sp0K6upbkdUSBoLTAQeoo9c65qYoYmvs6QWSSuALcBdEdEnrnMncUPBa+3EsZfqlPWF+3inRMQbgHcCf5lur1h5rgFeCUwANgNX9W44LybpUOC7wN9ExNO9HU8edWJu6uscEbsiYgIwGjhJ0ut6O6Y8Oom78LV24thrI3BM1fZoYFMvxZJbRGxKv7cA3yO75dYXPJHub1fuc2/p5XhyiYgn0v98u4HraLLrne5dfxf4VkTMScVNfa3rxdzs17kiIrYDC8n6CZr6Olerjnt/rrUTx15LgOMkHSvpIODDwLxejqkhSS9JHYpIegnwx8Dqxkc1jXnAx9PrjwN39GIsuVW+GJKzaKLrnTo//wVYGxFfqtrVtNe6s5ib/DqPkDQsvW4D3g78nCa+ztB53PtzrT2qqkoahvYVoAW4ISKu7OWQGpL0CrJWBsBg4OZmjFnSt4FTyaZwfgL4R2AucCswBtgAfCAimqojupO4TyVr0gfwGHB+5b52b5P0ZuA/gVXA7lT8GbI+g6a81g1iPpvmvc6vJ+v8biH74/vWiPicpJfSpNcZGsb9bxS81k4cZmZWiG9VmZlZIU4cZmZWiBOHmZkV4sRhZmaFOHGYmVkhThzW50n6sqS/qdpeIOn6qu2rJF3c4PhvSnp/er1Q0qQ6dVolzUwzn65Os4y+M+17TNKR+xH3nvftZP/VabbSRyR1VM1e+n5J8ytj8ruTpJGSftBg/0GS7pU0uLvf2/oOJw7rD34KvAlA0iCy5y5eW7X/TcD9B/genwdGAq9LM+W+GzjsAM/ZUET8ZZoe4gzglxExIf3cHhFnpKd/u9vFZE8PdxbTTuBu4EMlvLf1EU4c1h/cT0ocZAljNfCMpOGSDgZeAyyX9FlJS1KL4dr01HKXJB0CfBL4q4h4DvZMiXFrnboXp/OvrmkFfSxNIvdweuCq9rjPpxZIrv8nK60cSWMl/VzS9ek9vyXp7ZLuT62jk1L9l6QJ7JZIWi6ps5mf3wfcmY55bWpZrUixH5fqzAX+T544rX9yc9P6vIjYJOkFSWPIEsgDZDMbvxH4DbAyInZK+npEfA4gfXm/C/h+jrd4FbChqwkDJZ0InAucTDZp5kOSFgE7gb8nm5Bym6Qjao77AnA4cG7s3xO5rwI+AJxHNnXOOcCbgfeQPYU9Lb3/PRHxp+kW12JJ/xERO6riOBZ4qpIcgQuAr0bEt9I0PC2pfDXwB/sRp/UTbnFYf1FpdVQSx0HVZTAAAAIUSURBVANV2z9NdU6T9JCkVcDp7Hs7qzu8GfheROxI6x7MAd6S3uv2ymJbNdNQXAYMi4jz9zNpAPx3RKxKk9StAe5O51oFjE11/hiYoWxK7YXAELKpMaqNBLZWbT8AfEbSp4GXR0RHin8XsLMyT5oNPE4c1l9U+jl+n+wv4gfJWhxvAu6XNAT4BvD+iPh9svv4Q3Keez0wJscXZWe3vkTnU/QvAU6sbYUU9FzV691V27vZe1dBwPuq+knGRMTamvN0UHVNIuJmslZLB7BA0ulVdQ8GfncAMVsf5sRh/cX9ZLeenkxTRD8JDCNLHg+w9wtxm7K1HzodzVQrIn5LNoPr19Itm8roo4/UVL0XmCbpkDRb8VlkE/jdDXwwTYJHTZK4E5gJ/LDkv+AXAH9V6deRNLFOnV+wt4VSmUTz0Yj4GtnMr69P5S8FtkbE8yXGa03MicP6i1Vko6kerCn7TURsSyOQrktlc8n+0i/iH8hu4zwiaXU6R/VtHdISqN8EFpPNSHt9RCyPiDXAlcAiSQ8DX6o57rYU27w03XUZPk+2VOjKFP/nayuk/o5fSnpVKvoQsDrd3no1cFMqPw2YX1Kc1gd4dlwz20PSWcCJEfEPDerMAS6NiHU9F5k1E4+qMrM9IuJ7lVtq9aRbdXOdNAY2tzjMzKwQ93GYmVkhThxmZlaIE4eZmRXixGFmZoU4cZiZWSH/CwSPkN84qmWtAAAAAElFTkSuQmCC\n" }, "metadata": { "needs_background": "light" @@ -401,7 +671,7 @@ "plt.xlabel('Wall Clock Time (s)')\n", "plt.ylabel('Validation Accuracy')\n", "plt.scatter(time_history, 1-np.array(valid_loss_history))\n", - "plt.plot(time_history, 1-np.array(best_valid_loss_history))\n", + "plt.step(time_history, 1-np.array(best_valid_loss_history), where='post')\n", "plt.show()" ] }, @@ -450,7 +720,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 13, "metadata": { "slideshow": { "slide_type": "slide" @@ -555,7 +825,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 14, "metadata": { "slideshow": { "slide_type": "slide" @@ -569,7 +839,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 15, "metadata": { "slideshow": { "slide_type": "slide" @@ -581,121 +851,158 @@ "output_type": "stream", "name": "stderr", "text": [ - "[flaml.automl: 01-31 05:28:44] {816} INFO - Evaluation method: holdout\n", - "[flaml.automl: 01-31 05:28:45] {541} INFO - Using StratifiedKFold\n", - "[flaml.automl: 01-31 05:28:45] {837} INFO - Minimizing error metric: 1-accuracy\n", - "[flaml.automl: 01-31 05:28:45] {857} INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", - "[flaml.automl: 01-31 05:28:45] {916} INFO - iteration 0 current learner RGF\n", - "[flaml.automl: 01-31 05:28:46] {1046} INFO - at 1.2s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", - "[flaml.automl: 01-31 05:28:46] {916} INFO - iteration 1 current learner RGF\n", - "[flaml.automl: 01-31 05:28:46] {1046} INFO - at 2.1s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", - "[flaml.automl: 01-31 05:28:46] {916} INFO - iteration 2 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:47] {1046} INFO - at 2.2s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", - "[flaml.automl: 01-31 05:28:47] {916} INFO - iteration 3 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:47] {1046} INFO - at 2.3s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", - "[flaml.automl: 01-31 05:28:47] {916} INFO - iteration 4 current learner RGF\n", - "[flaml.automl: 01-31 05:28:47] {1046} INFO - at 3.1s,\tbest RGF's error=0.3787,\tbest lgbm's error=0.3777\n", - "[flaml.automl: 01-31 05:28:47] {916} INFO - iteration 5 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:47] {1046} INFO - at 3.1s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", - "[flaml.automl: 01-31 05:28:47] {916} INFO - iteration 6 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.2s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 7 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.3s,\tbest lgbm's error=0.3662,\tbest lgbm's error=0.3662\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 8 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.3s,\tbest lgbm's error=0.3636,\tbest lgbm's error=0.3636\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 9 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.4s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 10 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.5s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 11 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.5s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 12 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.6s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 13 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.7s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 14 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.7s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 15 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:48] {1046} INFO - at 3.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:48] {916} INFO - iteration 16 current learner RGF\n", - "[flaml.automl: 01-31 05:28:49] {1046} INFO - at 4.8s,\tbest RGF's error=0.3719,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:49] {916} INFO - iteration 17 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:50] {1046} INFO - at 5.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:50] {916} INFO - iteration 18 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:50] {1046} INFO - at 5.9s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:50] {916} INFO - iteration 19 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:50] {1046} INFO - at 6.0s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:50] {916} INFO - iteration 20 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:50] {1046} INFO - at 6.0s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:50] {916} INFO - iteration 21 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:50] {1046} INFO - at 6.1s,\tbest xgboost's error=0.3768,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:50] {916} INFO - iteration 22 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 6.2s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 23 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 6.3s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 24 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 6.4s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 25 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 6.5s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 26 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 6.6s,\tbest xgboost's error=0.3621,\tbest lgbm's error=0.3621\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 27 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 6.8s,\tbest xgboost's error=0.3611,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 28 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:51] {1046} INFO - at 7.1s,\tbest xgboost's error=0.3611,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 01-31 05:28:51] {916} INFO - iteration 29 current learner lgbm\n", - "[flaml.automl: 01-31 05:28:52] {1046} INFO - at 7.9s,\tbest lgbm's error=0.3618,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 01-31 05:28:52] {916} INFO - iteration 30 current learner RGF\n", - "[flaml.automl: 01-31 05:28:53] {1046} INFO - at 8.9s,\tbest RGF's error=0.3719,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 01-31 05:28:53] {916} INFO - iteration 31 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:54] {1046} INFO - at 9.3s,\tbest xgboost's error=0.3611,\tbest xgboost's error=0.3611\n", - "[flaml.automl: 01-31 05:28:54] {916} INFO - iteration 32 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:54] {1046} INFO - at 10.0s,\tbest xgboost's error=0.3523,\tbest xgboost's error=0.3523\n", - "[flaml.automl: 01-31 05:28:54] {916} INFO - iteration 33 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:55] {1046} INFO - at 10.6s,\tbest xgboost's error=0.3523,\tbest xgboost's error=0.3523\n", - "[flaml.automl: 01-31 05:28:55] {916} INFO - iteration 34 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:56] {1046} INFO - at 11.5s,\tbest xgboost's error=0.3523,\tbest xgboost's error=0.3523\n", - "[flaml.automl: 01-31 05:28:56] {916} INFO - iteration 35 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:58] {1046} INFO - at 13.2s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:28:58] {916} INFO - iteration 36 current learner rf\n", - "[flaml.automl: 01-31 05:28:58] {1046} INFO - at 13.8s,\tbest rf's error=0.4023,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:28:58] {916} INFO - iteration 37 current learner rf\n", - "[flaml.automl: 01-31 05:28:59] {1046} INFO - at 14.2s,\tbest rf's error=0.4011,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:28:59] {916} INFO - iteration 38 current learner xgboost\n", - "[flaml.automl: 01-31 05:28:59] {1046} INFO - at 15.0s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:28:59] {916} INFO - iteration 39 current learner xgboost\n", - "[flaml.automl: 01-31 05:29:03] {1046} INFO - at 18.2s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:29:03] {916} INFO - iteration 40 current learner xgboost\n", - "[flaml.automl: 01-31 05:29:06] {1046} INFO - at 21.2s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:29:06] {916} INFO - iteration 41 current learner xgboost\n", - "[flaml.automl: 01-31 05:29:08] {1046} INFO - at 23.4s,\tbest xgboost's error=0.3503,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:29:08] {916} INFO - iteration 42 current learner RGF\n", - "[flaml.automl: 01-31 05:29:09] {1046} INFO - at 24.5s,\tbest RGF's error=0.3719,\tbest xgboost's error=0.3503\n", - "[flaml.automl: 01-31 05:29:09] {916} INFO - iteration 43 current learner xgboost\n", - "[flaml.automl: 01-31 05:29:18] {1046} INFO - at 33.6s,\tbest xgboost's error=0.3408,\tbest xgboost's error=0.3408\n", - "[flaml.automl: 01-31 05:29:18] {916} INFO - iteration 44 current learner xgboost\n", - "[flaml.automl: 01-31 05:29:32] {1046} INFO - at 47.3s,\tbest xgboost's error=0.3345,\tbest xgboost's error=0.3345\n", - "[flaml.automl: 01-31 05:29:32] {916} INFO - iteration 45 current learner rf\n", - "[flaml.automl: 01-31 05:29:32] {1046} INFO - at 47.7s,\tbest rf's error=0.4011,\tbest xgboost's error=0.3345\n", - "[flaml.automl: 01-31 05:29:32] {916} INFO - iteration 46 current learner RGF\n", - "[flaml.automl: 01-31 05:29:32] {1048} INFO - no enough budget for learner RGF\n", - "[flaml.automl: 01-31 05:29:32] {916} INFO - iteration 47 current learner rf\n", - "[flaml.automl: 01-31 05:29:32] {1048} INFO - no enough budget for learner rf\n", - "[flaml.automl: 01-31 05:29:32] {916} INFO - iteration 48 current learner lgbm\n", - "[flaml.automl: 01-31 05:29:32] {1048} INFO - no enough budget for learner lgbm\n", - "[flaml.automl: 01-31 05:29:32] {1086} INFO - selected model: XGBClassifier(base_score=0.5, booster='gbtree',\n", - " colsample_bylevel=0.9421222097860765, colsample_bynode=1,\n", - " colsample_bytree=0.9986336418953021, gamma=0, gpu_id=-1,\n", - " grow_policy='lossguide', importance_type='gain',\n", - " interaction_constraints=None, learning_rate=0.16476442995703428,\n", - " max_delta_step=0, max_depth=0, max_leaves=85,\n", - " min_child_weight=2.8366848012228014, missing=nan,\n", - " monotone_constraints=None, n_estimators=84, n_jobs=-1,\n", - " num_parallel_tree=1, random_state=0,\n", - " reg_alpha=5.566263839755687e-07, reg_lambda=0.6128658162970646,\n", - " scale_pos_weight=1, subsample=0.978338719375802,\n", - " tree_method='hist', validate_parameters=False, verbosity=0)\n", - "[flaml.automl: 01-31 05:29:32] {871} INFO - fit succeeded\n" + "[flaml.automl: 02-17 13:58:01] {839} INFO - Evaluation method: holdout\n", + "INFO - Evaluation method: holdout\n", + "[flaml.automl: 02-17 13:58:01] {564} INFO - Using StratifiedKFold\n", + "INFO - Using StratifiedKFold\n", + "[flaml.automl: 02-17 13:58:01] {860} INFO - Minimizing error metric: 1-accuracy\n", + "INFO - Minimizing error metric: 1-accuracy\n", + "[flaml.automl: 02-17 13:58:01] {880} INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", + "INFO - List of ML learners in AutoML Run: ['RGF', 'lgbm', 'rf', 'xgboost']\n", + "[flaml.automl: 02-17 13:58:01] {939} INFO - iteration 0 current learner RGF\n", + "INFO - iteration 0 current learner RGF\n", + "[flaml.automl: 02-17 13:58:02] {1093} INFO - at 1.4s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "INFO - at 1.4s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "[flaml.automl: 02-17 13:58:02] {939} INFO - iteration 1 current learner RGF\n", + "INFO - iteration 1 current learner RGF\n", + "[flaml.automl: 02-17 13:58:04] {1093} INFO - at 2.9s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "INFO - at 2.9s,\tbest RGF's error=0.3787,\tbest RGF's error=0.3787\n", + "[flaml.automl: 02-17 13:58:04] {939} INFO - iteration 2 current learner lgbm\n", + "INFO - iteration 2 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:04] {1093} INFO - at 3.1s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "INFO - at 3.1s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 02-17 13:58:04] {939} INFO - iteration 3 current learner lgbm\n", + "INFO - iteration 3 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:04] {1093} INFO - at 3.3s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "INFO - at 3.3s,\tbest lgbm's error=0.3777,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 02-17 13:58:04] {939} INFO - iteration 4 current learner RGF\n", + "INFO - iteration 4 current learner RGF\n", + "[flaml.automl: 02-17 13:58:06] {1093} INFO - at 4.8s,\tbest RGF's error=0.3787,\tbest lgbm's error=0.3777\n", + "INFO - at 4.8s,\tbest RGF's error=0.3787,\tbest lgbm's error=0.3777\n", + "[flaml.automl: 02-17 13:58:06] {939} INFO - iteration 5 current learner lgbm\n", + "INFO - iteration 5 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:06] {1093} INFO - at 5.0s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", + "INFO - at 5.0s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", + "[flaml.automl: 02-17 13:58:06] {939} INFO - iteration 6 current learner lgbm\n", + "INFO - iteration 6 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:06] {1093} INFO - at 5.2s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", + "INFO - at 5.2s,\tbest lgbm's error=0.3669,\tbest lgbm's error=0.3669\n", + "[flaml.automl: 02-17 13:58:06] {939} INFO - iteration 7 current learner lgbm\n", + "INFO - iteration 7 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:06] {1093} INFO - at 5.3s,\tbest lgbm's error=0.3662,\tbest lgbm's error=0.3662\n", + "INFO - at 5.3s,\tbest lgbm's error=0.3662,\tbest lgbm's error=0.3662\n", + "[flaml.automl: 02-17 13:58:06] {939} INFO - iteration 8 current learner lgbm\n", + "INFO - iteration 8 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:07] {1093} INFO - at 5.7s,\tbest lgbm's error=0.3636,\tbest lgbm's error=0.3636\n", + "INFO - at 5.7s,\tbest lgbm's error=0.3636,\tbest lgbm's error=0.3636\n", + "[flaml.automl: 02-17 13:58:07] {939} INFO - iteration 9 current learner lgbm\n", + "INFO - iteration 9 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:07] {1093} INFO - at 5.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 5.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:07] {939} INFO - iteration 10 current learner lgbm\n", + "INFO - iteration 10 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:07] {1093} INFO - at 6.2s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 6.2s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:07] {939} INFO - iteration 11 current learner lgbm\n", + "INFO - iteration 11 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:07] {1093} INFO - at 6.3s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 6.3s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:07] {939} INFO - iteration 12 current learner lgbm\n", + "INFO - iteration 12 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:07] {1093} INFO - at 6.4s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 6.4s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:07] {939} INFO - iteration 13 current learner lgbm\n", + "INFO - iteration 13 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:07] {1093} INFO - at 6.6s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 6.6s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:07] {939} INFO - iteration 14 current learner lgbm\n", + "INFO - iteration 14 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:08] {1093} INFO - at 6.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 6.9s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:08] {939} INFO - iteration 15 current learner lgbm\n", + "INFO - iteration 15 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:09] {1093} INFO - at 8.6s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "INFO - at 8.6s,\tbest lgbm's error=0.3621,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:09] {939} INFO - iteration 16 current learner xgboost\n", + "INFO - iteration 16 current learner xgboost\n", + "[flaml.automl: 02-17 13:58:10] {1093} INFO - at 8.7s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3621\n", + "INFO - at 8.7s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3621\n", + "[flaml.automl: 02-17 13:58:10] {939} INFO - iteration 17 current learner lgbm\n", + "INFO - iteration 17 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:12] {1093} INFO - at 10.7s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "INFO - at 10.7s,\tbest lgbm's error=0.3611,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 02-17 13:58:12] {939} INFO - iteration 18 current learner xgboost\n", + "INFO - iteration 18 current learner xgboost\n", + "[flaml.automl: 02-17 13:58:12] {1093} INFO - at 10.9s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3611\n", + "INFO - at 10.9s,\tbest xgboost's error=0.3787,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 02-17 13:58:12] {939} INFO - iteration 19 current learner xgboost\n", + "INFO - iteration 19 current learner xgboost\n", + "[flaml.automl: 02-17 13:58:12] {1093} INFO - at 11.0s,\tbest xgboost's error=0.3757,\tbest lgbm's error=0.3611\n", + "INFO - at 11.0s,\tbest xgboost's error=0.3757,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 02-17 13:58:12] {939} INFO - iteration 20 current learner xgboost\n", + "INFO - iteration 20 current learner xgboost\n", + "[flaml.automl: 02-17 13:58:12] {1093} INFO - at 11.1s,\tbest xgboost's error=0.3756,\tbest lgbm's error=0.3611\n", + "INFO - at 11.1s,\tbest xgboost's error=0.3756,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 02-17 13:58:12] {939} INFO - iteration 21 current learner rf\n", + "INFO - iteration 21 current learner rf\n", + "[flaml.automl: 02-17 13:58:13] {1093} INFO - at 11.8s,\tbest rf's error=0.4012,\tbest lgbm's error=0.3611\n", + "INFO - at 11.8s,\tbest rf's error=0.4012,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 02-17 13:58:13] {939} INFO - iteration 22 current learner RGF\n", + "INFO - iteration 22 current learner RGF\n", + "[flaml.automl: 02-17 13:58:14] {1093} INFO - at 13.2s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3611\n", + "INFO - at 13.2s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3611\n", + "[flaml.automl: 02-17 13:58:14] {939} INFO - iteration 23 current learner lgbm\n", + "INFO - iteration 23 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:16] {1093} INFO - at 14.7s,\tbest lgbm's error=0.3585,\tbest lgbm's error=0.3585\n", + "INFO - at 14.7s,\tbest lgbm's error=0.3585,\tbest lgbm's error=0.3585\n", + "[flaml.automl: 02-17 13:58:16] {939} INFO - iteration 24 current learner rf\n", + "INFO - iteration 24 current learner rf\n", + "[flaml.automl: 02-17 13:58:16] {1093} INFO - at 15.3s,\tbest rf's error=0.3977,\tbest lgbm's error=0.3585\n", + "INFO - at 15.3s,\tbest rf's error=0.3977,\tbest lgbm's error=0.3585\n", + "[flaml.automl: 02-17 13:58:16] {939} INFO - iteration 25 current learner xgboost\n", + "INFO - iteration 25 current learner xgboost\n", + "[flaml.automl: 02-17 13:58:16] {1093} INFO - at 15.5s,\tbest xgboost's error=0.3756,\tbest lgbm's error=0.3585\n", + "INFO - at 15.5s,\tbest xgboost's error=0.3756,\tbest lgbm's error=0.3585\n", + "[flaml.automl: 02-17 13:58:16] {939} INFO - iteration 26 current learner lgbm\n", + "INFO - iteration 26 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:18] {1093} INFO - at 16.9s,\tbest lgbm's error=0.3585,\tbest lgbm's error=0.3585\n", + "INFO - at 16.9s,\tbest lgbm's error=0.3585,\tbest lgbm's error=0.3585\n", + "[flaml.automl: 02-17 13:58:18] {939} INFO - iteration 27 current learner lgbm\n", + "INFO - iteration 27 current learner lgbm\n", + "[flaml.automl: 02-17 13:58:21] {1093} INFO - at 19.6s,\tbest lgbm's error=0.3531,\tbest lgbm's error=0.3531\n", + "INFO - at 19.6s,\tbest lgbm's error=0.3531,\tbest lgbm's error=0.3531\n", + "[flaml.automl: 02-17 13:58:21] {939} INFO - iteration 28 current learner rf\n", + "INFO - iteration 28 current learner rf\n", + "[flaml.automl: 02-17 13:58:21] {1093} INFO - at 20.3s,\tbest rf's error=0.3977,\tbest lgbm's error=0.3531\n", + "INFO - at 20.3s,\tbest rf's error=0.3977,\tbest lgbm's error=0.3531\n", + "[flaml.automl: 02-17 13:58:21] {939} INFO - iteration 29 current learner rf\n", + "INFO - iteration 29 current learner rf\n", + "[flaml.automl: 02-17 13:58:22] {1093} INFO - at 20.9s,\tbest rf's error=0.3977,\tbest lgbm's error=0.3531\n", + "INFO - at 20.9s,\tbest rf's error=0.3977,\tbest lgbm's error=0.3531\n", + "[flaml.automl: 02-17 13:58:22] {939} INFO - iteration 30 current learner RGF\n", + "INFO - iteration 30 current learner RGF\n", + "[flaml.automl: 02-17 13:58:23] {1093} INFO - at 21.9s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3531\n", + "INFO - at 21.9s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3531\n", + "[flaml.automl: 02-17 13:58:23] {939} INFO - iteration 31 current learner RGF\n", + "INFO - iteration 31 current learner RGF\n", + "[flaml.automl: 02-17 13:58:24] {1093} INFO - at 23.3s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3531\n", + "INFO - at 23.3s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3531\n", + "[flaml.automl: 02-17 13:58:24] {939} INFO - iteration 32 current learner RGF\n", + "INFO - iteration 32 current learner RGF\n", + "[flaml.automl: 02-17 13:59:08] {1093} INFO - at 67.1s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3531\n", + "INFO - at 67.1s,\tbest RGF's error=0.3674,\tbest lgbm's error=0.3531\n", + "[flaml.automl: 02-17 13:59:08] {1133} INFO - selected model: LGBMClassifier(learning_rate=0.1564464373197609, max_bin=511,\n", + " min_child_weight=1.4188300323104601, n_estimators=12,\n", + " num_leaves=45, objective='binary',\n", + " reg_alpha=3.209664512322882e-10, reg_lambda=0.8927146483558472,\n", + " subsample=0.96058565726185)\n", + "INFO - selected model: LGBMClassifier(learning_rate=0.1564464373197609, max_bin=511,\n", + " min_child_weight=1.4188300323104601, n_estimators=12,\n", + " num_leaves=45, objective='binary',\n", + " reg_alpha=3.209664512322882e-10, reg_lambda=0.8927146483558472,\n", + " subsample=0.96058565726185)\n", + "[flaml.automl: 02-17 13:59:08] {894} INFO - fit succeeded\n", + "INFO - fit succeeded\n" ] } ], @@ -713,17 +1020,158 @@ "'''The main flaml automl API'''\n", "automl.fit(X_train = X_train, y_train = y_train, **settings)" ] + }, + { + "source": [ + "## 4. Comparison with alternatives\n", + "\n", + "### FLAML's accuracy" + ], + "cell_type": "markdown", + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "flaml accuracy = 0.6721222728149148\n" + ] + } + ], + "source": [ + "print('flaml accuracy', '=', 1 - sklearn_metric_loss_score('accuracy', y_pred, y_test))" + ] + }, + { + "source": [ + "### Default LightGBM" + ], + "cell_type": "markdown", + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "from lightgbm import LGBMClassifier\n", + "lgbm = LGBMClassifier()" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "LGBMClassifier()" + ] + }, + "metadata": {}, + "execution_count": 18 + } + ], + "source": [ + "lgbm.fit(X_train, y_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "default lgbm accuracy = 0.6602123904305652\n" + ] + } + ], + "source": [ + "y_pred = lgbm.predict(X_test)\n", + "from flaml.ml import sklearn_metric_loss_score\n", + "print('default lgbm accuracy', '=', 1 - sklearn_metric_loss_score('accuracy', y_pred, y_test))" + ] + }, + { + "source": [ + "### Default XGBoost" + ], + "cell_type": "markdown", + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "from xgboost import XGBClassifier\n", + "xgb = XGBClassifier()" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,\n", + " colsample_bynode=1, colsample_bytree=1, gamma=0, gpu_id=-1,\n", + " importance_type='gain', interaction_constraints='',\n", + " learning_rate=0.300000012, max_delta_step=0, max_depth=6,\n", + " min_child_weight=1, missing=nan, monotone_constraints='()',\n", + " n_estimators=100, n_jobs=8, num_parallel_tree=1, random_state=0,\n", + " reg_alpha=0, reg_lambda=1, scale_pos_weight=1, subsample=1,\n", + " tree_method='exact', validate_parameters=1, verbosity=None)" + ] + }, + "metadata": {}, + "execution_count": 21 + } + ], + "source": [ + "xgb.fit(X_train, y_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "default xgboost accuracy = 0.6676060098186078\n" + ] + } + ], + "source": [ + "y_pred = xgb.predict(X_test)\n", + "from flaml.ml import sklearn_metric_loss_score\n", + "print('default xgboost accuracy', '=', 1 - sklearn_metric_loss_score('accuracy', y_pred, y_test))" + ] } ], "metadata": { "kernelspec": { "name": "python3", - "display_name": "Python 3.7.7 64-bit ('flaml': conda)", - "metadata": { - "interpreter": { - "hash": "bfcd9a6a9254a5e160761a1fd7a9e444f011592c6770d9f4180dde058a9df5dd" - } - } + "display_name": "Python 3", + "language": "python" }, "language_info": { "codemirror_mode": { diff --git a/notebook/transformers_requirements.txt b/notebook/transformers_requirements.txt deleted file mode 100644 index 9e1b3466b5..0000000000 --- a/notebook/transformers_requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -torch -transformers -datasets -ipywidgets \ No newline at end of file diff --git a/setup.py b/setup.py index 47d8b2898c..0a2aba41e8 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,6 @@ "scipy>=1.4.1", "catboost>=0.23", "scikit-learn>=0.23.2", - "optuna==2.3.0" ], @@ -48,6 +47,10 @@ "coverage>=5.3", "xgboost<1.3", "rgf-python", + "optuna==2.3.0", + ], + "blendsearch": [ + "optuna==2.3.0" ], "ray": [ "ray[tune]==1.1.0", diff --git a/test/test_pytorch_cifar10.py b/test/test_pytorch_cifar10.py index a7460594e5..7841e2d934 100644 --- a/test/test_pytorch_cifar10.py +++ b/test/test_pytorch_cifar10.py @@ -1,3 +1,5 @@ +'''Require: pip install torchvision ray +''' import unittest import os import time diff --git a/test/test_tune.py b/test/test_tune.py index 6c9c9e8285..50f92d29eb 100644 --- a/test/test_tune.py +++ b/test/test_tune.py @@ -1,5 +1,6 @@ +'''Require: pip install flaml[test,ray] +''' import unittest -import os import time from sklearn.model_selection import train_test_split import sklearn.metrics @@ -138,6 +139,7 @@ def _test_xgboost(method='BlendSearch'): scheduler=scheduler, search_alg=algo) ray.shutdown() # # Load the best model checkpoint + # import os # best_bst = xgb.Booster() # best_bst.load_model(os.path.join(analysis.best_checkpoint, # "model.xgb"))