@@ -30,6 +30,8 @@ class BaseLearner(ABC, BaseEstimator):
30
30
for instance, modAL.uncertainty.uncertainty_sampling.
31
31
X_training: Initial training samples, if available.
32
32
y_training: Initial training labels corresponding to initial training samples.
33
+ force_all_finite: When True, forces all values of the data finite.
34
+ When False, accepts np.nan and np.inf values.
33
35
bootstrap_init: If initial training data is available, bootstrapping can be done during the first training.
34
36
Useful when building Committee models with bagging.
35
37
**fit_kwargs: keyword arguments.
@@ -47,6 +49,7 @@ def __init__(self,
47
49
X_training : Optional [modALinput ] = None ,
48
50
y_training : Optional [modALinput ] = None ,
49
51
bootstrap_init : bool = False ,
52
+ force_all_finite : bool = True ,
50
53
** fit_kwargs
51
54
) -> None :
52
55
assert callable (query_strategy ), 'query_strategy must be callable'
@@ -59,6 +62,9 @@ def __init__(self,
59
62
if X_training is not None :
60
63
self ._fit_to_known (bootstrap = bootstrap_init , ** fit_kwargs )
61
64
65
+ assert isinstance (force_all_finite , bool ), 'force_all_finite must be a bool'
66
+ self .force_all_finite = force_all_finite
67
+
62
68
def _add_training_data (self , X : modALinput , y : modALinput ) -> None :
63
69
"""
64
70
Adds the new data and label to the known data, but does not retrain the model.
@@ -71,7 +77,8 @@ def _add_training_data(self, X: modALinput, y: modALinput) -> None:
71
77
If the classifier has been fitted, the features in X have to agree with the training samples which the
72
78
classifier has seen.
73
79
"""
74
- check_X_y (X , y , accept_sparse = True , ensure_2d = False , allow_nd = True , multi_output = True , dtype = None )
80
+ check_X_y (X , y , accept_sparse = True , ensure_2d = False , allow_nd = True , multi_output = True , dtype = None ,
81
+ force_all_finite = self .force_all_finite )
75
82
76
83
if self .X_training is None :
77
84
self .X_training = X
@@ -117,7 +124,8 @@ def _fit_on_new(self, X: modALinput, y: modALinput, bootstrap: bool = False, **f
117
124
Returns:
118
125
self
119
126
"""
120
- check_X_y (X , y , accept_sparse = True , ensure_2d = False , allow_nd = True , multi_output = True , dtype = None )
127
+ check_X_y (X , y , accept_sparse = True , ensure_2d = False , allow_nd = True , multi_output = True , dtype = None ,
128
+ force_all_finite = self .force_all_finite )
121
129
122
130
if not bootstrap :
123
131
self .estimator .fit (X , y , ** fit_kwargs )
@@ -146,7 +154,8 @@ def fit(self, X: modALinput, y: modALinput, bootstrap: bool = False, **fit_kwarg
146
154
Returns:
147
155
self
148
156
"""
149
- check_X_y (X , y , accept_sparse = True , ensure_2d = False , allow_nd = True , multi_output = True , dtype = None )
157
+ check_X_y (X , y , accept_sparse = True , ensure_2d = False , allow_nd = True , multi_output = True , dtype = None ,
158
+ force_all_finite = self .force_all_finite )
150
159
self .X_training , self .y_training = X , y
151
160
return self ._fit_to_known (bootstrap = bootstrap , ** fit_kwargs )
152
161
0 commit comments