Skip to content

Commit

Permalink
Fixed: Extre Trees and Gradient Boosting from sklearn working and tested
Browse files Browse the repository at this point in the history
  • Loading branch information
Caparrini committed Feb 23, 2024
1 parent b7f852c commit 3e54c16
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 12 deletions.
33 changes: 23 additions & 10 deletions mloptimizer/genoptimizer/trees.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,20 @@ class ExtraTreesOptimizer(ForestOptimizer, ABC):
It inherits from ForestOptimizer.
"""

@staticmethod
def get_default_hyperparams():
"""
Hyperparams for the creation of individuals (relative to the algorithm)
These hyperparams define the name of the hyperparam, min value, max value, and type
:return: list of hyperparams
"""
hyperparams = ForestOptimizer.get_default_hyperparams()
# learning_rate
del hyperparams["max_samples"]
# Return all the hyperparams
return hyperparams

def get_clf(self, individual):
individual_dict = self.individual2dict(individual)

Expand All @@ -81,25 +95,24 @@ class GradientBoostingOptimizer(ForestOptimizer, ABC):
It inherits from ForestOptimizer.
"""

def get_hyperparams(self):
def get_clf(self, individual):
individual_dict = self.individual2dict(individual)
clf = GradientBoostingClassifier(random_state=self.mlopt_seed,
**individual_dict)
return clf

@staticmethod
def get_default_hyperparams():
"""
Hyperparams for the creation of individuals (relative to the algorithm)
These hyperparams define the name of the hyperparam, min value, max value, and type
:return: list of hyperparams
"""
hyperparams = super(GradientBoostingOptimizer, self).get_hyperparams()
hyperparams = ExtraTreesOptimizer.get_default_hyperparams()
# learning_rate
hyperparams["learning_rate"] = Hyperparam('learning_rate', 1, 10000, float, 1000000)
# subsample
del hyperparams["max_samples"]
# subsample must be a float in the range (0.0, 1.0]
hyperparams["subsample"] = Hyperparam('subsample', 10, 100, float, 100)
# Return all the hyperparams
return hyperparams

def get_clf(self, individual):
individual_dict = self.individual2dict(individual)
clf = GradientBoostingClassifier(random_state=self.mlopt_seed,
**individual_dict)
return clf
4 changes: 2 additions & 2 deletions mloptimizer/test/test_genoptimizer/test_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
(balanced_accuracy_score, accuracy_score))
@pytest.mark.parametrize('optimizer',
(TreeOptimizer, ForestOptimizer,
# ExtraTreesOptimizer, GradientBoostingOptimizer,
ExtraTreesOptimizer, GradientBoostingOptimizer,
XGBClassifierOptimizer,
# SVCOptimizer,
KerasClassifierOptimizer))
Expand All @@ -27,7 +27,7 @@
def test_optimizer(optimizer, dataset, target_metric):
X, y = dataset(return_X_y=True)

Check notice on line 28 in mloptimizer/test/test_genoptimizer/test_optimizers.py

View workflow job for this annotation

GitHub Actions / Qodana Community for Python

PEP 8 naming convention violation

Variable in function should be lowercase
opt = optimizer(X, y, score_function=target_metric)
clf = opt.optimize_clf(2, 1)
clf = opt.optimize_clf(2, 2)
assert clf is not None


Expand Down

0 comments on commit 3e54c16

Please sign in to comment.