diff --git a/examples/api/mathtext_asarray.py b/examples/api/mathtext_asarray.py index cf8eac90745a..258ad0fa4b5e 100644 --- a/examples/api/mathtext_asarray.py +++ b/examples/api/mathtext_asarray.py @@ -22,7 +22,7 @@ r'some other string', color='red', fontsize=20, dpi=200) fig = plt.figure() -fig.figimage(rgba1.astype(float)/255., 100, 100) -fig.figimage(rgba2.astype(float)/255., 100, 300) +fig.figimage(rgba1, 100, 100) +fig.figimage(rgba2, 100, 300) plt.show() diff --git a/examples/color/color_cycle_default.py b/examples/color/color_cycle_default.py index 29afdef912d2..ae887b15c2ab 100644 --- a/examples/color/color_cycle_default.py +++ b/examples/color/color_cycle_default.py @@ -13,7 +13,7 @@ colors = prop_cycle.by_key()['color'] lwbase = plt.rcParams['lines.linewidth'] -thin = float('%.1f' % (lwbase / 2)) +thin = lwbase / 2 thick = lwbase * 3 fig, axs = plt.subplots(nrows=2, ncols=2, sharex=True, sharey=True) @@ -29,7 +29,7 @@ axs[1, icol].set_facecolor('k') axs[1, icol].xaxis.set_ticks(np.arange(0, 10, 2)) - axs[0, icol].set_title('line widths (pts): %.1f, %.1f' % (lwx, lwy), + axs[0, icol].set_title('line widths (pts): %g, %g' % (lwx, lwy), fontsize='medium') for irow in range(2): diff --git a/examples/lines_bars_and_markers/stackplot_demo.py b/examples/lines_bars_and_markers/stackplot_demo.py index d2965d90e579..92a25ac79c62 100644 --- a/examples/lines_bars_and_markers/stackplot_demo.py +++ b/examples/lines_bars_and_markers/stackplot_demo.py @@ -43,7 +43,7 @@ def bump(a): y = 2 * np.random.random() - .5 z = 10 / (.1 + np.random.random()) for i in range(m): - w = (i / float(m) - y) * z + w = (i / m - y) * z a[i] += x * np.exp(-w * w) a = np.zeros((m, n)) for i in range(n): diff --git a/examples/statistics/hist.py b/examples/statistics/hist.py index 98e1f95d43eb..fa55549f901d 100644 --- a/examples/statistics/hist.py +++ b/examples/statistics/hist.py @@ -52,7 +52,7 @@ N, bins, patches = axs[0].hist(x, bins=n_bins) # We'll color code by height, but you could use any scalar -fracs = N.astype(float) / N.max() +fracs = N / N.max() # we need to normalize the data to 0..1 for the full range of the colormap norm = colors.Normalize(fracs.min(), fracs.max()) diff --git a/lib/matplotlib/axes/_axes.py b/lib/matplotlib/axes/_axes.py index 881b11467dd1..d61f6dfe809f 100644 --- a/lib/matplotlib/axes/_axes.py +++ b/lib/matplotlib/axes/_axes.py @@ -6490,7 +6490,7 @@ def hist(self, x, bins=None, range=None, density=None, weights=None, if stacked and density: db = np.diff(bins) for m in tops: - m[:] = (m.astype(float) / db) / tops[-1].sum() + m[:] = (m / db) / tops[-1].sum() if cumulative: slc = slice(None) if cbook.is_numlike(cumulative) and cumulative < 0: diff --git a/lib/matplotlib/backends/backend_agg.py b/lib/matplotlib/backends/backend_agg.py index 6f9b3386d8c8..13a77762f982 100644 --- a/lib/matplotlib/backends/backend_agg.py +++ b/lib/matplotlib/backends/backend_agg.py @@ -146,7 +146,7 @@ def draw_path(self, gc, path, transform, rgbFace=None): if (nmax > 100 and npts > nmax and path.should_simplify and rgbFace is None and gc.get_hatch() is None): - nch = np.ceil(npts / float(nmax)) + nch = np.ceil(npts / nmax) chsize = int(np.ceil(npts / nch)) i0 = np.arange(0, npts, chsize) i1 = np.zeros_like(i0) diff --git a/lib/matplotlib/backends/backend_mixed.py b/lib/matplotlib/backends/backend_mixed.py index 40d7fd64398c..8e475bd13c95 100644 --- a/lib/matplotlib/backends/backend_mixed.py +++ b/lib/matplotlib/backends/backend_mixed.py @@ -139,8 +139,8 @@ def stop_rasterizing(self): # backends support this. self._renderer.draw_image( gc, - float(l) / self.dpi * self._figdpi, - (float(height)-b-h) / self.dpi * self._figdpi, + l * self._figdpi / self.dpi, + (height-b-h) * self._figdpi / self.dpi, image) self._raster_renderer = None self._rasterizing = False diff --git a/lib/matplotlib/backends/backend_pdf.py b/lib/matplotlib/backends/backend_pdf.py index 85c83654039c..b9e1a1104e0e 100644 --- a/lib/matplotlib/backends/backend_pdf.py +++ b/lib/matplotlib/backends/backend_pdf.py @@ -1282,7 +1282,7 @@ def writeGouraudTriangles(self): flat_colors = colors.reshape((shape[0] * shape[1], 4)) points_min = np.min(flat_points, axis=0) - (1 << 8) points_max = np.max(flat_points, axis=0) + (1 << 8) - factor = float(0xffffffff) / (points_max - points_min) + factor = 0xffffffff / (points_max - points_min) self.beginStream( ob.id, None, diff --git a/lib/matplotlib/backends/backend_ps.py b/lib/matplotlib/backends/backend_ps.py index 1348f338d3f8..1aeee39a246a 100644 --- a/lib/matplotlib/backends/backend_ps.py +++ b/lib/matplotlib/backends/backend_ps.py @@ -800,7 +800,7 @@ def draw_gouraud_triangles(self, gc, points, colors, trans): flat_colors = colors.reshape((shape[0] * shape[1], 4)) points_min = np.min(flat_points, axis=0) - (1 << 12) points_max = np.max(flat_points, axis=0) + (1 << 12) - factor = np.ceil(float(2 ** 32 - 1) / (points_max - points_min)) + factor = np.ceil((2 ** 32 - 1) / (points_max - points_min)) xmin, ymin = points_min xmax, ymax = points_max diff --git a/lib/matplotlib/backends/backend_wx.py b/lib/matplotlib/backends/backend_wx.py index b2190e3a06da..d2fd3c5218ab 100644 --- a/lib/matplotlib/backends/backend_wx.py +++ b/lib/matplotlib/backends/backend_wx.py @@ -1099,7 +1099,7 @@ def _onMouseWheel(self, evt): delta = evt.GetWheelDelta() rotation = evt.GetWheelRotation() rate = evt.GetLinesPerAction() - step = rate * float(rotation) / delta + step = rate * rotation / delta # Done handling event evt.Skip() diff --git a/lib/matplotlib/colors.py b/lib/matplotlib/colors.py index d2a3a8b95f97..681b1bc32d38 100644 --- a/lib/matplotlib/colors.py +++ b/lib/matplotlib/colors.py @@ -1315,7 +1315,7 @@ def __call__(self, value, clip=None): for i, b in enumerate(self.boundaries): iret[xx >= b] = i if self._interp: - scalefac = float(self.Ncmap - 1) / (self.N - 2) + scalefac = (self.Ncmap - 1) / (self.N - 2) iret = (iret * scalefac).astype(np.int16) iret[xx < self.vmin] = -1 iret[xx >= self.vmax] = max_col diff --git a/lib/matplotlib/contour.py b/lib/matplotlib/contour.py index 3df6d8a32458..4c54786dd6a8 100644 --- a/lib/matplotlib/contour.py +++ b/lib/matplotlib/contour.py @@ -1563,8 +1563,8 @@ def _initialize_x_y(self, z): x0, x1, y0, y1 = (0, Nx, 0, Ny) else: x0, x1, y0, y1 = self.extent - dx = float(x1 - x0) / Nx - dy = float(y1 - y0) / Ny + dx = (x1 - x0) / Nx + dy = (y1 - y0) / Ny x = x0 + (np.arange(Nx) + 0.5) * dx y = y0 + (np.arange(Ny) + 0.5) * dy if self.origin == 'upper': diff --git a/lib/matplotlib/dates.py b/lib/matplotlib/dates.py index 9a15ffef048e..3eab32648893 100644 --- a/lib/matplotlib/dates.py +++ b/lib/matplotlib/dates.py @@ -306,23 +306,21 @@ def _from_ordinalf(x, tz=None): if tz is None: tz = _get_rc_timezone() - ix = int(x) + ix, remainder = divmod(x, 1) + ix = int(ix) if ix < 1: - raise ValueError('cannot convert {} to a date. This ' - 'often happens if non-datetime values are passed to ' - 'an axis that expects datetime objects. ' - .format(ix)) + raise ValueError('Cannot convert {} to a date. This often happens if ' + 'non-datetime values are passed to an axis that ' + 'expects datetime objects.'.format(ix)) dt = datetime.datetime.fromordinal(ix).replace(tzinfo=UTC) - remainder = float(x) - ix - # Since the input date `x` float is unable to preserve microsecond # precision of time representation in non-antique years, the # resulting datetime is rounded to the nearest multiple of # `musec_prec`. A value of 20 is appropriate for current dates. musec_prec = 20 - remainder_musec = int(round(remainder * MUSECONDS_PER_DAY / - float(musec_prec)) * musec_prec) + remainder_musec = int(round(remainder * MUSECONDS_PER_DAY / musec_prec) + * musec_prec) # For people trying to plot with full microsecond precision, enable # an early-year workaround @@ -1287,10 +1285,10 @@ def get_locator(self, dmin, dmax): # these similar functions, and it's best to avoid doing our own math # whenever possible. numYears = float(delta.years) - numMonths = (numYears * MONTHS_PER_YEAR) + delta.months + numMonths = numYears * MONTHS_PER_YEAR + delta.months numDays = tdelta.days # Avoids estimates of days/month, days/year - numHours = (numDays * HOURS_PER_DAY) + delta.hours - numMinutes = (numHours * MIN_PER_HOUR) + delta.minutes + numHours = numDays * HOURS_PER_DAY + delta.hours + numMinutes = numHours * MIN_PER_HOUR + delta.minutes numSeconds = np.floor(tdelta.total_seconds()) numMicroseconds = np.floor(tdelta.total_seconds() * 1e6) @@ -1745,14 +1743,14 @@ def seconds(s): """ Return seconds as days. """ - return float(s) / SEC_PER_DAY + return s / SEC_PER_DAY def minutes(m): """ Return minutes as days. """ - return float(m) / MINUTES_PER_DAY + return m / MINUTES_PER_DAY def hours(h): diff --git a/lib/matplotlib/figure.py b/lib/matplotlib/figure.py index d9e00247574b..216353a0c71a 100644 --- a/lib/matplotlib/figure.py +++ b/lib/matplotlib/figure.py @@ -726,7 +726,7 @@ def figimage(self, X, if resize: dpi = self.get_dpi() - figsize = [x / float(dpi) for x in (X.shape[1], X.shape[0])] + figsize = [x / dpi for x in (X.shape[1], X.shape[0])] self.set_size_inches(figsize, forward=True) im = FigureImage(self, cmap, norm, xo, yo, origin, **kwargs) @@ -2288,9 +2288,9 @@ def figaspect(arg): # Extract the aspect ratio of the array if isarray: nr, nc = arg.shape[:2] - arr_ratio = float(nr) / nc + arr_ratio = nr / nc else: - arr_ratio = float(arg) + arr_ratio = arg # Height of user figure defaults fig_height = rcParams['figure.figsize'][1] diff --git a/lib/matplotlib/image.py b/lib/matplotlib/image.py index de7e5018125f..4d3acef3ded5 100644 --- a/lib/matplotlib/image.py +++ b/lib/matplotlib/image.py @@ -1453,8 +1453,8 @@ def thumbnail(infile, thumbfile, scale=0.1, interpolation='bilinear', # need it for the mpl API dpi = 100 - height = float(rows)/dpi*scale - width = float(cols)/dpi*scale + height = rows / dpi * scale + width = cols / dpi * scale extension = extout.lower() diff --git a/lib/matplotlib/mlab.py b/lib/matplotlib/mlab.py index d735afc55f9e..fe1811a12764 100644 --- a/lib/matplotlib/mlab.py +++ b/lib/matplotlib/mlab.py @@ -1667,7 +1667,7 @@ def __init__(self, a, standardize=True): self.s = s**2 # and now the contribution of the individual components - vars = self.s/float(len(s)) + vars = self.s / len(s) self.fracs = vars/vars.sum() def project(self, x, minfrac=0.): @@ -2172,7 +2172,7 @@ def frange(xini, xfin=None, delta=None, **kw): # compute # of points, spacing and return final list try: npts = kw['npts'] - delta = (xfin-xini)/float(npts-endpoint) + delta = (xfin-xini) / (npts-endpoint) except KeyError: npts = int(np.round((xfin-xini)/delta)) + endpoint # round finds the nearest, so the endpoint can be up to diff --git a/lib/matplotlib/patches.py b/lib/matplotlib/patches.py index c9e255e7ac7e..1d66125561b1 100644 --- a/lib/matplotlib/patches.py +++ b/lib/matplotlib/patches.py @@ -1085,7 +1085,7 @@ def _recompute_path(self): # Partial annulus needs to draw the outer ring # followed by a reversed and scaled inner ring v1 = arc.vertices - v2 = arc.vertices[::-1] * float(self.r - self.width) / self.r + v2 = arc.vertices[::-1] * (self.r - self.width) / self.r v = np.vstack([v1, v2, v1[0, :], (0, 0)]) c = np.hstack([arc.codes, arc.codes, connector, Path.CLOSEPOLY]) c[len(arc.codes)] = connector @@ -1179,8 +1179,8 @@ def __init__(self, x, y, dx, dy, width=1.0, **kwargs): L = np.hypot(dx, dy) if L != 0: - cx = float(dx) / L - sx = float(dy) / L + cx = dx / L + sx = dy / L else: # Account for division by zero cx, sx = 0, 1 @@ -1286,12 +1286,12 @@ def __init__(self, x, y, dx, dy, width=0.001, length_includes_head=False, else: raise ValueError("Got unknown shape: %s" % shape) if distance != 0: - cx = float(dx) / distance - sx = float(dy) / distance + cx = dx / distance + sx = dy / distance else: - #Account for division by zero + # Account for division by zero cx, sx = 0, 1 - M = np.array([[cx, sx], [-sx, cx]]) + M = [[cx, sx], [-sx, cx]] verts = np.dot(coords, M) + (x + dx, y + dy) Polygon.__init__(self, list(map(tuple, verts)), closed=True, **kwargs) diff --git a/lib/matplotlib/streamplot.py b/lib/matplotlib/streamplot.py index 9f3097d386bf..752a11eb4aaf 100644 --- a/lib/matplotlib/streamplot.py +++ b/lib/matplotlib/streamplot.py @@ -262,8 +262,8 @@ def __init__(self, grid, mask): self.grid = grid self.mask = mask # Constants for conversion between grid- and mask-coordinates - self.x_grid2mask = float(mask.nx - 1) / grid.nx - self.y_grid2mask = float(mask.ny - 1) / grid.ny + self.x_grid2mask = (mask.nx - 1) / grid.nx + self.y_grid2mask = (mask.ny - 1) / grid.ny self.x_mask2grid = 1. / self.x_grid2mask self.y_mask2grid = 1. / self.y_grid2mask diff --git a/lib/matplotlib/testing/compare.py b/lib/matplotlib/testing/compare.py index c0b933ed6986..dcda681d4384 100644 --- a/lib/matplotlib/testing/compare.py +++ b/lib/matplotlib/testing/compare.py @@ -367,12 +367,8 @@ def calculate_rms(expectedImage, actualImage): raise ImageComparisonFailure( "Image sizes do not match expected size: {0} " "actual size {1}".format(expectedImage.shape, actualImage.shape)) - num_values = expectedImage.size - abs_diff_image = abs(expectedImage - actualImage) - histogram = np.bincount(abs_diff_image.ravel(), minlength=256) - sum_of_squares = np.sum(histogram * np.arange(len(histogram)) ** 2) - rms = np.sqrt(float(sum_of_squares) / num_values) - return rms + # Convert to float to avoid overflowing finite integer types. + return np.sqrt(((expectedImage - actualImage).astype(float) ** 2).mean()) def compare_images(expected, actual, tol, in_decorator=False): diff --git a/lib/matplotlib/testing/jpl_units/Duration.py b/lib/matplotlib/testing/jpl_units/Duration.py index 4d71c78e8270..99b2f9872985 100644 --- a/lib/matplotlib/testing/jpl_units/Duration.py +++ b/lib/matplotlib/testing/jpl_units/Duration.py @@ -166,7 +166,7 @@ def __div__( self, rhs ): = RETURN VALUE - Returns the scaled Duration. """ - return Duration( self._frame, self._seconds / float( rhs ) ) + return Duration( self._frame, self._seconds / rhs ) #----------------------------------------------------------------------- def __rdiv__( self, rhs ): @@ -178,7 +178,7 @@ def __rdiv__( self, rhs ): = RETURN VALUE - Returns the scaled Duration. """ - return Duration( self._frame, float( rhs ) / self._seconds ) + return Duration( self._frame, rhs / self._seconds ) #----------------------------------------------------------------------- def __str__( self ): diff --git a/lib/matplotlib/tests/test_collections.py b/lib/matplotlib/tests/test_collections.py index 37072a72fb45..291647d178f7 100644 --- a/lib/matplotlib/tests/test_collections.py +++ b/lib/matplotlib/tests/test_collections.py @@ -453,8 +453,8 @@ def test_EllipseCollection(): X, Y = np.meshgrid(x, y) XY = np.vstack((X.ravel(), Y.ravel())).T - ww = X/float(x[-1]) - hh = Y/float(y[-1]) + ww = X / x[-1] + hh = Y / y[-1] aa = np.ones_like(ww) * 20 # first axis is 20 degrees CCW from x axis ec = mcollections.EllipseCollection(ww, hh, aa, diff --git a/lib/matplotlib/tests/test_mlab.py b/lib/matplotlib/tests/test_mlab.py index 99a686a0513d..a2909ac7db1b 100644 --- a/lib/matplotlib/tests/test_mlab.py +++ b/lib/matplotlib/tests/test_mlab.py @@ -2435,7 +2435,7 @@ def test_contiguous_regions(): def test_psd_onesided_norm(): u = np.array([0, 1, 2, 3, 1, 2, 1]) dt = 1.0 - Su = np.abs(np.fft.fft(u) * dt)**2 / float(dt * u.size) + Su = np.abs(np.fft.fft(u) * dt)**2 / (dt * u.size) P, f = mlab.psd(u, NFFT=u.size, Fs=1/dt, window=mlab.window_none, detrend=mlab.detrend_none, noverlap=0, pad_to=None, scale_by_freq=None, @@ -2445,10 +2445,10 @@ def test_psd_onesided_norm(): def test_psd_oversampling(): - """Test the case len(x) < NFFT for psd(). """ + """Test the case len(x) < NFFT for psd().""" u = np.array([0, 1, 2, 3, 1, 2, 1]) dt = 1.0 - Su = np.abs(np.fft.fft(u) * dt)**2 / float(dt * u.size) + Su = np.abs(np.fft.fft(u) * dt)**2 / (dt * u.size) P, f = mlab.psd(u, NFFT=u.size*2, Fs=1/dt, window=mlab.window_none, detrend=mlab.detrend_none, noverlap=0, pad_to=None, scale_by_freq=None, diff --git a/lib/matplotlib/textpath.py b/lib/matplotlib/textpath.py index a02d83e93779..63a7208bf5ff 100644 --- a/lib/matplotlib/textpath.py +++ b/lib/matplotlib/textpath.py @@ -99,7 +99,7 @@ def get_text_width_height_descent(self, s, prop, ismath): return w, h, d fontsize = prop.get_size_in_points() - scale = float(fontsize) / self.FONT_SCALE + scale = fontsize / self.FONT_SCALE if ismath: prop = prop.copy() diff --git a/lib/matplotlib/ticker.py b/lib/matplotlib/ticker.py index b57ede217c25..812f39338603 100644 --- a/lib/matplotlib/ticker.py +++ b/lib/matplotlib/ticker.py @@ -1573,7 +1573,7 @@ def tick_values(self, vmin, vmax): """ if self.nbins is None: return self.locs - step = max(int(0.99 + len(self.locs) / float(self.nbins)), 1) + step = max(int(np.ceil(len(self.locs) / self.nbins)), 1) ticks = self.locs[::step] for i in range(1, step): ticks1 = self.locs[i::step] @@ -2323,7 +2323,7 @@ def get_log_range(lo, hi): total_ticks = (a_range[1] - a_range[0]) + (c_range[1] - c_range[0]) if has_b: total_ticks += 1 - stride = max(np.floor(float(total_ticks) / (self.numticks - 1)), 1) + stride = max(total_ticks // (self.numticks - 1), 1) decades = [] if has_a: diff --git a/lib/mpl_toolkits/axes_grid1/axes_size.py b/lib/mpl_toolkits/axes_grid1/axes_size.py index 552d76d1432f..163a6245fef0 100644 --- a/lib/mpl_toolkits/axes_grid1/axes_size.py +++ b/lib/mpl_toolkits/axes_grid1/axes_size.py @@ -268,7 +268,7 @@ def from_any(size, fraction_ref=None): return Fixed(size) elif isinstance(size, six.string_types): if size[-1] == "%": - return Fraction(float(size[:-1])/100., fraction_ref) + return Fraction(float(size[:-1]) / 100, fraction_ref) raise ValueError("Unknown format") diff --git a/lib/mpl_toolkits/axisartist/angle_helper.py b/lib/mpl_toolkits/axisartist/angle_helper.py index a75122ff1bb1..7f2b5d338b7c 100644 --- a/lib/mpl_toolkits/axisartist/angle_helper.py +++ b/lib/mpl_toolkits/axisartist/angle_helper.py @@ -3,13 +3,9 @@ import six -from math import floor - import numpy as np import math -A = np.array - from mpl_toolkits.axisartist.grid_finder import ExtremeFinderSimple def select_step_degree(dv): @@ -21,10 +17,10 @@ def select_step_degree(dv): minsec_limits_ = [1.5, 2.5, 3.5, 8, 11, 18, 25, 45] minsec_steps_ = [1, 2, 3, 5, 10, 15, 20, 30] - minute_limits_ = A(minsec_limits_)*(1./60.) + minute_limits_ = np.array(minsec_limits_) / 60 minute_factors = [60.] * len(minute_limits_) - second_limits_ = A(minsec_limits_)*(1./3600.) + second_limits_ = np.array(minsec_limits_) / 3600 second_factors = [3600.] * len(second_limits_) degree_limits = np.concatenate([second_limits_, @@ -56,10 +52,10 @@ def select_step_hour(dv): minsec_limits_ = [1.5, 2.5, 3.5, 4.5, 5.5, 8, 11, 14, 18, 25, 45] minsec_steps_ = [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 30] - minute_limits_ = A(minsec_limits_)*(1./60.) + minute_limits_ = np.array(minsec_limits_) / 60 minute_factors = [60.] * len(minute_limits_) - second_limits_ = A(minsec_limits_)*(1./3600.) + second_limits_ = np.array(minsec_limits_) / 3600 second_factors = [3600.] * len(second_limits_) hour_limits = np.concatenate([second_limits_, @@ -107,7 +103,7 @@ def select_step(v1, v2, nv, hour=False, include_last=True, if v1 > v2: v1, v2 = v2, v1 - dv = float(v2 - v1) / nv + dv = (v2 - v1) / nv if hour: _select_step = select_step_hour @@ -128,8 +124,7 @@ def select_step(v1, v2, nv, hour=False, include_last=True, f1, f2, fstep = v1*factor, v2*factor, step/factor - levs = np.arange(math.floor(f1/step), math.ceil(f2/step)+0.5, - 1, dtype="i") * step + levs = np.arange(np.floor(f1/step), np.ceil(f2/step)+0.5, dtype=int) * step # n : number of valid levels. If there is a cycle, e.g., [0, 90, 180, # 270, 360], the grid line needs to be extended from 0 to 360, so @@ -250,8 +245,8 @@ def _get_number_fraction(self, factor): break d = factor // threshold - int_log_d = int(floor(math.log10(d))) - if 10**int_log_d == d and d!=1: + int_log_d = int(np.floor(np.log10(d))) + if 10**int_log_d == d and d != 1: number_fraction = int_log_d factor = factor // 10**int_log_d return factor, number_fraction diff --git a/lib/mpl_toolkits/mplot3d/axes3d.py b/lib/mpl_toolkits/mplot3d/axes3d.py index 61a6eafe8d11..a7f97bef6cdc 100644 --- a/lib/mpl_toolkits/mplot3d/axes3d.py +++ b/lib/mpl_toolkits/mplot3d/axes3d.py @@ -1686,8 +1686,8 @@ def plot_surface(self, X, Y, Z, *args, **kwargs): polys = [] # Only need these vectors to shade if there is no cmap if cmap is None and shade : - totpts = int(np.ceil(float(rows - 1) / rstride) * - np.ceil(float(cols - 1) / cstride)) + totpts = int(np.ceil((rows - 1) / rstride) * + np.ceil((cols - 1) / cstride)) v1 = np.empty((totpts, 3)) v2 = np.empty((totpts, 3)) # This indexes the vertex points diff --git a/lib/mpl_toolkits/mplot3d/proj3d.py b/lib/mpl_toolkits/mplot3d/proj3d.py index dce46d9cf7a9..a084e7f36a42 100644 --- a/lib/mpl_toolkits/mplot3d/proj3d.py +++ b/lib/mpl_toolkits/mplot3d/proj3d.py @@ -61,7 +61,7 @@ def line2d_seg_dist(p1, p2, p0): x01 = np.asarray(p0[0]) - p1[0] y01 = np.asarray(p0[1]) - p1[1] - u = (x01*x21 + y01*y21)/float(abs(x21**2 + y21**2)) + u = (x01*x21 + y01*y21) / (x21**2 + y21**2) u = np.clip(u, 0, 1) d = np.sqrt((x01 - u*x21)**2 + (y01 - u*y21)**2) diff --git a/tutorials/colors/colormaps.py b/tutorials/colors/colormaps.py index e33f5c737777..01bd2d05dc58 100644 --- a/tutorials/colors/colormaps.py +++ b/tutorials/colors/colormaps.py @@ -223,7 +223,7 @@ def plot_color_gradients(cmap_category, cmap_list, nrows): # Do subplots so that colormaps have enough space. # Default is 6 colormaps per subplot. dsub = _DSUBS.get(cmap_category, 6) - nsubplots = int(np.ceil(len(cmap_list) / float(dsub))) + nsubplots = int(np.ceil(len(cmap_list) / dsub)) # squeeze=False to handle similarly the case of a single subplot fig, axes = plt.subplots(nrows=nsubplots, squeeze=False, diff --git a/unit/memleak.py b/unit/memleak.py index a57deaf41aed..11db092e4006 100755 --- a/unit/memleak.py +++ b/unit/memleak.py @@ -44,8 +44,9 @@ def run_memleak_test(bench, iterations, report): nobjs = len(gc.get_objects()) garbage = len(gc.garbage) open_files = len(p.open_files()) - print("{0: 4d}: pymalloc {1: 10d}, rss {2: 10d}, nobjs {3: 10d}, garbage {4: 4d}, files: {5: 4d}".format( - i, malloc, rss, nobjs, garbage, open_files)) + print("{0: 4d}: pymalloc {1: 10d}, rss {2: 10d}, nobjs {3: 10d}, " + "garbage {4: 4d}, files: {5: 4d}".format( + i, malloc, rss, nobjs, garbage, open_files)) malloc_arr[i] = malloc rss_arr[i] = rss @@ -56,8 +57,8 @@ def run_memleak_test(bench, iterations, report): garbage_arr[i] = garbage open_files_arr[i] = open_files - print('Average memory consumed per loop: %1.4f bytes\n' % - (np.sum(rss_peaks[starti+1:] - rss_peaks[starti:-1]) / float(endi - starti))) + print('Average memory consumed per loop: {:1.4f} bytes\n'.format( + np.sum(rss_peaks[starti+1:] - rss_peaks[starti:-1]) / (endi - starti))) from matplotlib import pyplot as plt fig, (ax1, ax2, ax3) = plt.subplots(3) @@ -142,4 +143,5 @@ def __call__(self): from matplotlib import pyplot as plt plt.ion() - run_memleak_test(MemleakTest(args.empty), args.iterations[0], args.report[0]) + run_memleak_test( + MemleakTest(args.empty), args.iterations[0], args.report[0])