Skip to content

Commit 77695ee

Browse files
committed
COSMIT pep8
1 parent d041aed commit 77695ee

File tree

12 files changed

+55
-47
lines changed

12 files changed

+55
-47
lines changed

sklearn/datasets/tests/test_svmlight_format.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,8 @@ def test_dump_concise():
260260
dump_svmlight_file(X, y, f)
261261
f.seek(0)
262262
# make sure it's using the most concise format possible
263-
assert_equal(f.readline(), b("1 0:1 1:2.1 2:3.01 3:1.000000000000001 4:1\n"))
263+
assert_equal(f.readline(),
264+
b("1 0:1 1:2.1 2:3.01 3:1.000000000000001 4:1\n"))
264265
assert_equal(f.readline(), b("2.1 0:1000000000 1:2e+18 2:3e+27\n"))
265266
assert_equal(f.readline(), b("3.01 \n"))
266267
assert_equal(f.readline(), b("1.000000000000001 \n"))

sklearn/decomposition/nmf.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def _initialize_nmf(X, n_components, variant=None, eps=1e-6,
9797
initialization: A head start for nonnegative
9898
matrix factorization - Pattern Recognition, 2008
9999
100-
http://scgroup.hpclab.ceid.upatras.gr/faculty/stratis/Papers/HPCLAB020107.pdf
100+
http://tinyurl.com/nndsvd
101101
"""
102102
check_non_negative(X, "NMF initialization")
103103
if variant not in (None, 'a', 'ar'):
@@ -359,8 +359,7 @@ class ProjectedGradientNMF(BaseEstimator, TransformerMixin):
359359
C. Boutsidis, E. Gallopoulos: SVD based
360360
initialization: A head start for nonnegative
361361
matrix factorization - Pattern Recognition, 2008
362-
http://scgroup.hpclab.ceid.upatras.gr/faculty/stratis/Papers/HPCLAB020107.pdf
363-
362+
http://tinyurl.com/nndsvd
364363
"""
365364

366365
def __init__(self, n_components=None, init=None, sparseness=None, beta=1,

sklearn/feature_extraction/image.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -215,6 +215,7 @@ def _compute_n_patches(i_h, i_w, p_h, p_w, max_patches=None):
215215
else:
216216
return all_patches
217217

218+
218219
def extract_patches(arr, patch_shape=8, extraction_step=1):
219220
"""Extracts patches of any n-dimensional array in place using strides.
220221

sklearn/feature_extraction/tests/test_image.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -186,13 +186,16 @@ def test_patch_extractor_max_patches():
186186

187187
max_patches = 100
188188
expected_n_patches = len(lenas) * max_patches
189-
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches, random_state=0)
189+
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
190+
random_state=0)
190191
patches = extr.transform(lenas)
191192
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
192193

193194
max_patches = 0.5
194-
expected_n_patches = len(lenas) * int((i_h - p_h + 1) * (i_w - p_w + 1) * max_patches)
195-
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches, random_state=0)
195+
expected_n_patches = len(lenas) * int((i_h - p_h + 1) * (i_w - p_w + 1)
196+
* max_patches)
197+
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
198+
random_state=0)
196199
patches = extr.transform(lenas)
197200
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
198201

sklearn/feature_selection/base.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,4 +113,3 @@ def inverse_transform(self, X):
113113
Xt = np.zeros((X.shape[0], support.size), dtype=X.dtype)
114114
Xt[:, support] = X
115115
return Xt
116-

sklearn/linear_model/ridge.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,7 @@ def _solve_dense_cholesky_kernel(K, y, alpha, sample_weight=None):
124124
# Only one penalty, we can solve multi-target problems in one time.
125125
K.flat[::n_samples + 1] += alpha[0]
126126

127-
dual_coef = linalg.solve(K, y,
128-
sym_pos=True, overwrite_a=True)
127+
dual_coef = linalg.solve(K, y, sym_pos=True, overwrite_a=True)
129128

130129
# K is expensive to compute and store in memory so change it back in
131130
# case it was user-given.
@@ -196,7 +195,8 @@ def ridge_regression(X, y, alpha, sample_weight=1.0, solver='auto',
196195
- 'auto' chooses the solver automatically based on the type of data.
197196
198197
- 'svd' uses a Singular Value Decomposition of X to compute the Ridge
199-
coefficients. More stable for singular matrices than 'dense_cholesky'.
198+
coefficients. More stable for singular matrices than
199+
'dense_cholesky'.
200200
201201
- 'dense_cholesky' uses the standard scipy.linalg.solve function to
202202
obtain a closed-form solution via a Cholesky decomposition of
@@ -266,7 +266,8 @@ def ridge_regression(X, y, alpha, sample_weight=1.0, solver='auto',
266266
alpha = safe_asarray(alpha).ravel()
267267
if alpha.size not in [1, n_targets]:
268268
raise ValueError("Number of targets and number of penalties "
269-
"do not correspond: %d != %d" % (alpha.size, n_targets))
269+
"do not correspond: %d != %d"
270+
% (alpha.size, n_targets))
270271

271272
if alpha.size == 1 and n_targets > 1:
272273
alpha = np.repeat(alpha, n_targets)
@@ -293,7 +294,7 @@ def ridge_regression(X, y, alpha, sample_weight=1.0, solver='auto',
293294
coef = safe_sparse_dot(X.T, dual_coef, dense_output=True).T
294295
else:
295296
try:
296-
coef =_solve_dense_cholesky(X, y, alpha)
297+
coef = _solve_dense_cholesky(X, y, alpha)
297298
except linalg.LinAlgError:
298299
# use SVD solver if matrix is singular
299300
solver = 'svd'
@@ -379,7 +380,8 @@ class Ridge(_BaseRidge, RegressorMixin):
379380
- 'auto' chooses the solver automatically based on the type of data.
380381
381382
- 'svd' uses a Singular Value Decomposition of X to compute the Ridge
382-
coefficients. More stable for singular matrices than 'dense_cholesky'.
383+
coefficients. More stable for singular matrices than
384+
'dense_cholesky'.
383385
384386
- 'dense_cholesky' uses the standard scipy.linalg.solve function to
385387
obtain a closed-form solution.

sklearn/metrics/metrics.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -193,8 +193,8 @@ def _check_clf_targets(y_true, y_pred):
193193
def auc(x, y, reorder=False):
194194
"""Compute Area Under the Curve (AUC) using the trapezoidal rule
195195
196-
This is a general function, given points on a curve. For computing the area
197-
under the ROC-curve, see :func:`auc_score`.
196+
This is a general function, given points on a curve. For computing the
197+
area under the ROC-curve, see :func:`auc_score`.
198198
199199
Parameters
200200
----------

sklearn/tests/test_common.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@ def test_all_estimators():
111111
else:
112112
assert_equal(params[arg], default)
113113

114+
114115
def test_estimators_sparse_data():
115116
# All estimators should either deal with sparse data, or raise an
116117
# intelligible error message
@@ -990,8 +991,9 @@ def test_cluster_overwrite_params():
990991
def test_import_all_consistency():
991992
# Smoke test to check that any name in a __all__ list is actually defined
992993
# in the namespace of the module or package.
993-
for importer, modname, ispkg in pkgutil.walk_packages(
994-
path=sklearn.__path__, prefix='sklearn.', onerror=lambda x: None):
994+
pkgs = pkgutil.walk_packages(path=sklearn.__path__, prefix='sklearn.',
995+
onerror=lambda _: None)
996+
for importer, modname, ispkg in pkgs:
995997
if ".tests." in modname:
996998
continue
997999
package = __import__(modname, fromlist="dummy")

sklearn/utils/extmath.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,7 @@ def cartesian(arrays, out=None):
414414
415415
References
416416
----------
417-
http://stackoverflow.com/questions/1208118/using-numpy-to-build-an-array-of-all-combinations-of-two-arrays
417+
http://stackoverflow.com/q/1208118
418418
419419
"""
420420
arrays = [np.asarray(x).ravel() for x in arrays]

sklearn/utils/mst/_graph_validation.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
DTYPE = np.float64
99

10+
1011
def validate_graph(csgraph, directed, dtype=DTYPE,
1112
csr_output=True, dense_output=True,
1213
copy_if_dense=False, copy_if_sparse=False,

0 commit comments

Comments
 (0)