8000 convert to floating point instead of using future · scikit-learn/scikit-learn@1d620d4 · GitHub
[go: up one dir, main page]

Skip to content

Commit 1d620d4

Browse files
committed
convert to floating point instead of using future
1 parent 887fe6a commit 1d620d4

File tree

4 files changed

+3
-5
lines changed

4 files changed

+3
-5
lines changed

build_tools/travis/flake8_diff.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ if [[ "$MODIFIED_FILES" == "no_match" ]]; then
138138
echo "No file outside sklearn/externals and doc/sphinxext/sphinx_gallery has been modified"
139139
else
140140
# Default ignore PEP8 violations are from flake8 3.3.0
141-
DEFAULT_IGNORED_PEP8=E121,E123,E126,E226,E24,E704,F404,W503,W504
141+
DEFAULT_IGNORED_PEP8=E121,E123,E126,E226,E24,E704,W503,W504
142142
check_files "$(echo "$MODIFIED_FILES" | grep -v ^examples)" \
143143
--ignore $DEFAULT_IGNORED_PEP8
144144
# Examples are allowed to not have imports at top of file

examples/applications/plot_tomography_l1_reconstruction.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@ class :class:`sklearn.linear_model.Lasso`, that uses the coordinate descent
3939

4040
# Author: Emmanuelle Gouillart <emmanuelle.gouillart@nsup.org>
4141
# License: BSD 3 clause
42-
from __future__ import division
4342

4443
import numpy as np
4544
from scipy import sparse
@@ -102,7 +101,7 @@ def generate_synthetic_data():
102101
rs = np.random.RandomState(0)
103102
n_pts = 36
104103
x, y = np.ogrid[0:l, 0:l]
105-
mask_outer = (x - l / 2) ** 2 + (y - l / 2) ** 2 < (l / 2) ** 2
104+
mask_outer = (x - (l*1.0) / 2) ** 2 + (y - (l*1.0) / 2) ** 2 < ((l*1.0) / 2) ** 2
106105
mask = np.zeros((l, l))
107106
points = l * rs.rand(2, n_pts)
108107
mask[(points[0]).astype(np.int), (points[1]).astype(np.int)] = 1

examples/linear_model/plot_sparse_logistic_regression_20newsgroups.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
input features would be to use univariate feature selection followed by a
1919
traditional (l2-penalised) logistic regression model.
2020
"""
21-
from __future__ import division
2221
import time
2322

2423
import matplotlib.pyplot as plt

examples/linear_model/plot_sparse_logistic_regression_mnist.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
X_test = scaler.transform(X_test)
5353

5454
# Turn up tolerance for faster convergence
55-
clf = LogisticRegression(C=50 / train_samples,
55+
clf = LogisticRegression(C=50. / train_samples,
5656
multi_class='multinomial',
5757
penalty='l1', solver='saga', tol=0.1)
5858
clf.fit(X_train, y_train)

0 commit comments

Comments
 (0)
0