Skip to content

Commit 50876ec

Browse files
committed
COSMIT Flake8 fixes for svm/base.py
1 parent 9df01a8 commit 50876ec

File tree

1 file changed

+10
-10
lines changed

1 file changed

+10
-10
lines changed

sklearn/svm/base.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
from . import libsvm, liblinear
99
from . import libsvm_sparse
10-
from ..base import BaseEstimator, ClassifierMixin, RegressorMixin
10+
from ..base import BaseEstimator, ClassifierMixin
1111
from ..preprocessing import LabelEncoder
1212
from ..utils import check_array, check_random_state, column_or_1d
1313
from ..utils import ConvergenceWarning, compute_class_weight
@@ -70,7 +70,7 @@ def __init__(self, impl, kernel, degree, gamma, coef0,
7070
tol, C, nu, epsilon, shrinking, probability, cache_size,
7171
class_weight, verbose, max_iter, random_state):
7272

73-
if not impl in LIBSVM_IMPL: # pragma: no cover
73+
if impl not in LIBSVM_IMPL: # pragma: no cover
7474
raise ValueError("impl should be one of %s, %s was given" % (
7575
LIBSVM_IMPL, impl))
7676

@@ -384,7 +384,7 @@ def decision_function(self, X):
384384

385385
def _validate_for_predict(self, X):
386386
check_is_fitted(self, 'support_')
387-
387+
388388
X = check_array(X, accept_sparse='csr', dtype=np.float64, order="C")
389389
if self._sparse and not sp.isspmatrix(X):
390390
X = sp.csr_matrix(X)
@@ -615,10 +615,10 @@ def _get_liblinear_solver_type(multi_class, penalty, loss, dual):
615615
'logistic_regression': {
616616
'l1': {False: 6},
617617
'l2': {False: 0, True: 7}},
618-
'hinge' : {
619-
'l2' : {True: 3}},
618+
'hinge': {
619+
'l2': {True: 3}},
620620
'squared_hinge': {
621-
'l1': {False : 5},
621+
'l1': {False: 5},
622622
'l2': {False: 2, True: 1}},
623623
'epsilon_insensitive': {
624624
'l2': {True: 13}},
@@ -652,15 +652,15 @@ def _get_liblinear_solver_type(multi_class, penalty, loss, dual):
652652
% (penalty, loss, dual))
653653
else:
654654
return solver_num
655-
655+
656656
raise ValueError(('Unsupported set of arguments: %s, '
657657
'Parameters: penalty=%r, loss=%r, dual=%r')
658658
% (error_string, penalty, loss, dual))
659659

660660

661661
def _fit_liblinear(X, y, C, fit_intercept, intercept_scaling, class_weight,
662662
penalty, dual, verbose, max_iter, tol,
663-
random_state=None, multi_class='ovr',
663+
random_state=None, multi_class='ovr',
664664
loss='logistic_regression', epsilon=0.1):
665665
"""Used by Logistic Regression (and CV) and LinearSVC.
666666
@@ -722,7 +722,7 @@ def _fit_liblinear(X, y, C, fit_intercept, intercept_scaling, class_weight,
722722
If `crammer_singer` is chosen, the options loss, penalty and dual will
723723
be ignored.
724724
725-
loss : str, {'logistic_regression', 'hinge', 'squared_hinge',
725+
loss : str, {'logistic_regression', 'hinge', 'squared_hinge',
726726
'epsilon_insensitive', 'squared_epsilon_insensitive}
727727
The loss function used to fit the model.
728728
@@ -788,7 +788,7 @@ def _fit_liblinear(X, y, C, fit_intercept, intercept_scaling, class_weight,
788788
# LibLinear wants targets as doubles, even for classification
789789
y_ind = np.asarray(y_ind, dtype=np.float64).ravel()
790790
solver_type = _get_liblinear_solver_type(multi_class, penalty, loss, dual)
791-
raw_coef_, n_iter_ = liblinear.train_wrap(
791+
raw_coef_, n_iter_ = liblinear.train_wrap(
792792
X, y_ind, sp.isspmatrix(X), solver_type, tol, bias, C,
793793
class_weight_, max_iter, rnd.randint(np.iinfo('i').max),
794794
epsilon

0 commit comments

Comments
 (0)