Skip to content

Commit def6c35

Browse files
committed
More python3 fixes (and just plain bugs)
1 parent c619e7c commit def6c35

File tree

4 files changed

+5
-5
lines changed

4 files changed

+5
-5
lines changed

sklearn/linear_model/stochastic_gradient.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -648,7 +648,7 @@ class SGDClassifier(BaseSGDClassifier, _LearntSelectorMixin):
648648
SGDClassifier(alpha=0.0001, class_weight=None, epsilon=0.1, eta0=0.0,
649649
fit_intercept=True, l1_ratio=0.15, learning_rate='optimal',
650650
loss='hinge', n_iter=5, n_jobs=1, penalty='l2', power_t=0.5,
651-
random_state=None, rho=None, shuffle=False,
651+
random_state=None, rho=None, seed=None, shuffle=False,
652652
verbose=0, warm_start=False)
653653
>>> print(clf.predict([[-0.8, -1]]))
654654
[1]

sklearn/linear_model/tests/test_ridge.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ def test_ridge_individual_penalties():
214214

215215
# Test error is raised when number of targets and penalties do not match.
216216
ridge = Ridge(alpha=penalties[:3])
217-
assert_raises(ValueError, ridge.fit, X, target)
217+
assert_raises(ValueError, ridge.fit, X, y)
218218

219219

220220
def _test_ridge_loo(filter_):

sklearn/metrics/metrics.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1199,7 +1199,7 @@ def f1_score(y_true, y_pred, labels=None, pos_label=1, average='weighted'):
11991199
>>> f1_score(y_true, y_pred, average='macro') # doctest: +ELLIPSIS
12001200
0.66...
12011201
>>> f1_score(y_true, y_pred, average='micro') # doctest: +ELLIPSIS
1202-
0.80...
1202+
0.8...
12031203
>>> f1_score(y_true, y_pred, average='weighted') # doctest: +ELLIPSIS
12041204
0.66...
12051205
>>> f1_score(y_true, y_pred, average='samples') # doctest: +ELLIPSIS
@@ -1619,7 +1619,7 @@ def precision_recall_fscore_support(y_true, y_pred, beta=1.0, labels=None,
16191619
(0.66..., 0.66..., 0.66..., None)
16201620
>>> precision_recall_fscore_support(y_true, y_pred, average='micro')
16211621
... # doctest: +ELLIPSIS
1622-
(1.0, 0.66..., 0.80..., None)
1622+
(1.0, 0.66..., 0.8..., None)
16231623
>>> precision_recall_fscore_support(y_true, y_pred, average='weighted')
16241624
... # doctest: +ELLIPSIS
16251625
(0.66..., 0.66..., 0.66..., None)

sklearn/preprocessing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -748,7 +748,7 @@ class OneHotEncoder(BaseEstimator, TransformerMixin):
748748
>>> enc = OneHotEncoder()
749749
>>> enc.fit([[0, 0, 3], [1, 1, 0], [0, 2, 1], \
750750
[1, 0, 2]]) # doctest: +ELLIPSIS
751-
OneHotEncoder(categorical_features='all', dtype=<type 'float'>,
751+
OneHotEncoder(categorical_features='all', dtype=<... 'float'>,
752752
n_values='auto')
753753
>>> enc.n_values_
754754
array([2, 3, 4])

0 commit comments

Comments
 (0)