@@ -270,10 +270,10 @@ class AdaBoostClassifier(BaseWeightBoosting, ClassifierMixin):
270270 ``learning_rate``. There is a trade-off between ``learning_rate`` and
271271 ``n_estimators``.
272272
273- algorithm : string, optional (default=" SAMME.R" )
274- If " SAMME.R" then use the SAMME.R real boosting algorithm.
273+ algorithm : {'SAMME', 'SAMME.R'}, optional (default=' SAMME.R' )
274+ If ' SAMME.R' then use the SAMME.R real boosting algorithm.
275275 ``base_estimator`` must support calculation of class probabilities.
276- If " SAMME" then use the SAMME discrete boosting algorithm.
276+ If ' SAMME' then use the SAMME discrete boosting algorithm.
277277 The SAMME.R algorithm typically converges faster than SAMME,
278278 achieving a lower test error with fewer boosting iterations.
279279
@@ -319,7 +319,7 @@ def __init__(self,
319319 base_estimator = DecisionTreeClassifier (max_depth = 1 ),
320320 n_estimators = 50 ,
321321 learning_rate = 1. ,
322- algorithm = " SAMME.R" ,
322+ algorithm = ' SAMME.R' ,
323323 compute_importances = False ):
324324
325325 super (AdaBoostClassifier , self ).__init__ (
@@ -356,13 +356,13 @@ def fit(self, X, y, sample_weight=None):
356356 "subclass of ClassifierMixin" )
357357
358358 # Check that algorithm is supported
359- if self .algorithm != " SAMME" and self .algorithm != " SAMME.R" :
359+ if self .algorithm != ' SAMME' and self .algorithm != ' SAMME.R' :
360360 raise ValueError ("algorithm %s is not supported"
361361 % self .algorithm )
362362
363363 # SAMME-R requires predict_proba-enabled base estimators
364- if self .algorithm == " SAMME.R" :
365- if not hasattr (self .base_estimator , " predict_proba" ):
364+ if self .algorithm == ' SAMME.R' :
365+ if not hasattr (self .base_estimator , ' predict_proba' ):
366366 raise TypeError (
367367 "AdaBoostClassifier with algorithm='SAMME.R' requires "
368368 "that the weak learner supports the calculation of class "
@@ -407,7 +407,7 @@ def _boost(self, iboost, X, y, sample_weight):
407407 The classification error for the current boost.
408408 If None then boosting has terminated early.
409409 """
410- if self .algorithm == " SAMME.R" :
410+ if self .algorithm == ' SAMME.R' :
411411 return self ._boost_real (iboost , X , y , sample_weight )
412412
413413 else : # elif self.algorithm == "SAMME":
@@ -601,7 +601,7 @@ def decision_function(self, X):
601601
602602 norm += weight
603603
604- if self .algorithm == " SAMME.R" :
604+ if self .algorithm == ' SAMME.R' :
605605 current_pred = _samme_proba (estimator , n_classes , X )
606606 else : # elif self.algorithm == "SAMME":
607607 current_pred = estimator .predict (X )
@@ -653,7 +653,7 @@ def staged_decision_function(self, X):
653653
654654 norm += weight
655655
656- if self .algorithm == " SAMME.R" :
656+ if self .algorithm == ' SAMME.R' :
657657 current_pred = _samme_proba (estimator , n_classes , X )
658658 else : # elif self.algorithm == "SAMME":
659659 current_pred = estimator .predict (X )
@@ -799,9 +799,9 @@ class AdaBoostRegressor(BaseWeightBoosting, RegressorMixin):
799799 ``learning_rate``. There is a trade-off between ``learning_rate`` and
800800 ``n_estimators``.
801801
802- loss_function : string, optional (default=" linear" )
803- The loss function ("linear", "square", or "exponential") to use when
804- updating the weights after each boosting iteration.
802+ loss : {'linear', 'square', 'exponential'}, optional (default=' linear' )
803+ The loss function to use when updating the weights after each
804+ boosting iteration.
805805
806806 compute_importances : boolean, optional (default=False)
807807 Whether feature importances are computed and stored in the
@@ -844,7 +844,7 @@ def __init__(self,
844844 base_estimator = DecisionTreeRegressor (max_depth = 3 ),
845845 n_estimators = 50 ,
846846 learning_rate = 1. ,
847- loss_function = 'linear' ,
847+ loss = 'linear' ,
848848 compute_importances = False ,
849849 random_state = None ):
850850
@@ -854,7 +854,7 @@ def __init__(self,
854854 learning_rate = learning_rate ,
855855 compute_importances = compute_importances )
856856
857- self .loss_function = loss_function
857+ self .loss = loss
858858 self .random_state = random_state
859859
860860 def fit (self , X , y , sample_weight = None ):
@@ -882,9 +882,9 @@ def fit(self, X, y, sample_weight=None):
882882 raise TypeError ("base_estimator must be a "
883883 "subclass of RegressorMixin" )
884884
885- if self .loss_function not in ('linear' , 'square' , 'exponential' ):
885+ if self .loss not in ('linear' , 'square' , 'exponential' ):
886886 raise ValueError (
887- "loss_function must be 'linear', 'square', or 'exponential'" )
887+ "loss must be 'linear', 'square', or 'exponential'" )
888888
889889 # Fit
890890 return super (AdaBoostRegressor , self ).fit (X , y , sample_weight )
@@ -948,9 +948,9 @@ def _boost(self, iboost, X, y, sample_weight):
948948 if error_max != 0. :
949949 error_vect /= error_vect .max ()
950950
951- if self .loss_function == 'square' :
951+ if self .loss == 'square' :
952952 error_vect *= error_vect
953- elif self .loss_function == 'exponential' :
953+ elif self .loss == 'exponential' :
954954 error_vect = 1. - np .exp (- error_vect )
955955
956956 # Calculate the average loss
0 commit comments