Skip to content

Commit

Permalink
force_all_finite support added
Browse files Browse the repository at this point in the history
  • Loading branch information
cosmic-cortex committed Nov 11, 2019
1 parent 309ef4f commit fdd2aa7
Showing 1 changed file with 12 additions and 3 deletions.
15 changes: 12 additions & 3 deletions modAL/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ class BaseLearner(ABC, BaseEstimator):
for instance, modAL.uncertainty.uncertainty_sampling.
X_training: Initial training samples, if available.
y_training: Initial training labels corresponding to initial training samples.
force_all_finite: When True, forces all values of the data finite.
When False, accepts np.nan and np.inf values.
bootstrap_init: If initial training data is available, bootstrapping can be done during the first training.
Useful when building Committee models with bagging.
**fit_kwargs: keyword arguments.
Expand All @@ -47,6 +49,7 @@ def __init__(self,
X_training: Optional[modALinput] = None,
y_training: Optional[modALinput] = None,
bootstrap_init: bool = False,
force_all_finite: bool = True,
**fit_kwargs
) -> None:
assert callable(query_strategy), 'query_strategy must be callable'
Expand All @@ -59,6 +62,9 @@ def __init__(self,
if X_training is not None:
self._fit_to_known(bootstrap=bootstrap_init, **fit_kwargs)

assert isinstance(force_all_finite, bool), 'force_all_finite must be a bool'
self.force_all_finite = force_all_finite

def _add_training_data(self, X: modALinput, y: modALinput) -> None:
"""
Adds the new data and label to the known data, but does not retrain the model.
Expand All @@ -71,7 +77,8 @@ def _add_training_data(self, X: modALinput, y: modALinput) -> None:
If the classifier has been fitted, the features in X have to agree with the training samples which the
classifier has seen.
"""
check_X_y(X, y, accept_sparse=True, ensure_2d=False, allow_nd=True, multi_output=True, dtype=None)
check_X_y(X, y, accept_sparse=True, ensure_2d=False, allow_nd=True, multi_output=True, dtype=None,
force_all_finite=self.force_all_finite)

if self.X_training is None:
self.X_training = X
Expand Down Expand Up @@ -117,7 +124,8 @@ def _fit_on_new(self, X: modALinput, y: modALinput, bootstrap: bool = False, **f
Returns:
self
"""
check_X_y(X, y, accept_sparse=True, ensure_2d=False, allow_nd=True, multi_output=True, dtype=None)
check_X_y(X, y, accept_sparse=True, ensure_2d=False, allow_nd=True, multi_output=True, dtype=None,
force_all_finite=self.force_all_finite)

if not bootstrap:
self.estimator.fit(X, y, **fit_kwargs)
Expand Down Expand Up @@ -146,7 +154,8 @@ def fit(self, X: modALinput, y: modALinput, bootstrap: bool = False, **fit_kwarg
Returns:
self
"""
check_X_y(X, y, accept_sparse=True, ensure_2d=False, allow_nd=True, multi_output=True, dtype=None)
check_X_y(X, y, accept_sparse=True, ensure_2d=False, allow_nd=True, multi_output=True, dtype=None,
force_all_finite=self.force_all_finite)
self.X_training, self.y_training = X, y
return self._fit_to_known(bootstrap=bootstrap, **fit_kwargs)

Expand Down

0 comments on commit fdd2aa7

Please sign in to comment.